max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
370 | <gh_stars>100-1000
package tw.skyarrow.ehreader.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import tw.skyarrow.ehreader.R;
import tw.skyarrow.ehreader.util.DownloadHelper;
import tw.skyarrow.ehreader.util.NetworkHelper;
import tw.skyarrow.ehreader.util.UpdateHelper;
/**
* Created by SkyArrow on 2014/2/4.
*/
public class NetworkStateReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
NetworkHelper network = NetworkHelper.getInstance(context);
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
DownloadHelper downloadHelper = DownloadHelper.getInstance(context);
UpdateHelper updateHelper = new UpdateHelper(context);
boolean autoDownload = preferences.getBoolean(context.getString(R.string.pref_auto_download),
context.getResources().getBoolean(R.bool.pref_auto_download_default));
boolean downloadOverWifi = preferences.getBoolean(context.getString(R.string.pref_download_over_wifi),
context.getResources().getBoolean(R.bool.pref_download_over_wifi_default));
boolean autoUpdateInterrupted = preferences.getBoolean(context.getString(R.string.pref_update_is_interrupted), false);
boolean networkAvailable = false;
if (downloadOverWifi && network.isWifiAvailable()) {
networkAvailable = true;
} else if (network.isAvailable()) {
networkAvailable = true;
}
if (networkAvailable) {
if (autoDownload) downloadHelper.startAll();
if (autoUpdateInterrupted) updateHelper.setupAlarm();
} else {
downloadHelper.pauseAll();
}
}
}
| 669 |
702 | /*
* Copyright ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.api.jsonrpc.internal.filter;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.api.query.PrivacyQueries;
import org.hyperledger.besu.ethereum.core.PrivacyParameters;
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPool;
import java.util.Optional;
import com.google.common.annotations.VisibleForTesting;
public class FilterManagerBuilder {
private BlockchainQueries blockchainQueries;
private TransactionPool transactionPool;
private FilterIdGenerator filterIdGenerator = new FilterIdGenerator();
private FilterRepository filterRepository = new FilterRepository();
private Optional<PrivacyParameters> privacyParameters = Optional.empty();
private Optional<PrivacyQueries> privacyQueries = Optional.empty();
public FilterManagerBuilder filterIdGenerator(final FilterIdGenerator filterIdGenerator) {
this.filterIdGenerator = filterIdGenerator;
return this;
}
public FilterManagerBuilder filterRepository(final FilterRepository filterRepository) {
this.filterRepository = filterRepository;
return this;
}
public FilterManagerBuilder blockchainQueries(final BlockchainQueries blockchainQueries) {
this.blockchainQueries = blockchainQueries;
return this;
}
public FilterManagerBuilder transactionPool(final TransactionPool transactionPool) {
this.transactionPool = transactionPool;
return this;
}
public FilterManagerBuilder privacyParameters(final PrivacyParameters privacyParameters) {
this.privacyParameters = Optional.ofNullable(privacyParameters);
return this;
}
@VisibleForTesting
FilterManagerBuilder privacyQueries(final PrivacyQueries privacyQueries) {
this.privacyQueries = Optional.ofNullable(privacyQueries);
return this;
}
public FilterManager build() {
if (blockchainQueries == null) {
throw new IllegalStateException("BlockchainQueries is required to build FilterManager");
}
if (transactionPool == null) {
throw new IllegalStateException("TransactionPool is required to build FilterManager");
}
if (privacyQueries.isEmpty()
&& privacyParameters.isPresent()
&& privacyParameters.get().isEnabled()) {
privacyQueries =
Optional.of(
new PrivacyQueries(
blockchainQueries, privacyParameters.get().getPrivateWorldStateReader()));
}
return new FilterManager(
blockchainQueries, transactionPool, privacyQueries, filterIdGenerator, filterRepository);
}
}
| 931 |
778 | <reponame>troels/compute-runtime
/*
* Copyright (C) 2020-2021 Intel Corporation
*
* SPDX-License-Identifier: MIT
*
*/
#pragma once
extern "C" {
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGet_Tracing(ze_driver_handle_t hDriver,
uint32_t *pCount,
ze_device_handle_t *phDevices);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetProperties_Tracing(ze_device_handle_t hDevice,
ze_device_properties_t *pDeviceProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetComputeProperties_Tracing(ze_device_handle_t hDevice,
ze_device_compute_properties_t *pComputeProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetMemoryProperties_Tracing(ze_device_handle_t hDevice,
uint32_t *pCount,
ze_device_memory_properties_t *pMemProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetCacheProperties_Tracing(ze_device_handle_t hDevice,
uint32_t *pCount,
ze_device_cache_properties_t *pCacheProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetImageProperties_Tracing(ze_device_handle_t hDevice,
ze_device_image_properties_t *pImageProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetSubDevices_Tracing(ze_device_handle_t hDevice,
uint32_t *pCount,
ze_device_handle_t *phSubdevices);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetP2PProperties_Tracing(ze_device_handle_t hDevice,
ze_device_handle_t hPeerDevice,
ze_device_p2p_properties_t *pP2PProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceCanAccessPeer_Tracing(ze_device_handle_t hDevice,
ze_device_handle_t hPeerDevice,
ze_bool_t *value);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSetCacheConfig_Tracing(ze_kernel_handle_t hKernel,
ze_cache_config_flags_t flags);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetMemoryAccessProperties_Tracing(ze_device_handle_t hDevice,
ze_device_memory_access_properties_t *pMemAccessProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetModuleProperties_Tracing(ze_device_handle_t hDevice,
ze_device_module_properties_t *pModuleProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetCommandQueueGroupProperties_Tracing(ze_device_handle_t hDevice,
uint32_t *pCount,
ze_command_queue_group_properties_t *pCommandQueueGroupProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetExternalMemoryProperties_Tracing(ze_device_handle_t hDevice,
ze_device_external_memory_properties_t *pExternalMemoryProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeDeviceGetStatus_Tracing(ze_device_handle_t hDevice);
}
| 1,636 |
2,775 | {
"title": {
"zh": "经典热力图",
"en": "heatmap"
},
"demos": [
{
"filename": "heatmap.js",
"title": "经典热力图2D",
"screenshot":"https://gw.alipayobjects.com/mdn/antv_site/afts/img/A*QstiQq4JBOIAAAAAAAAAAABkARQnAQ"
},
{
"filename": "heatmap_purple.js",
"title": "经典热力图2D",
"screenshot":"https://gw.alipayobjects.com/mdn/antv_site/afts/img/A*GD2bR7pb-yIAAAAAAAAAAABkARQnAQ"
},
{
"filename": "heatmap3d.js",
"title": "经典热力图3D",
"screenshot":"https://gw.alipayobjects.com/mdn/antv_site/afts/img/A*dmhzTqrHpR4AAAAAAAAAAABkARQnAQ"
}
]
}
| 378 |
2,248 | <gh_stars>1000+
from the_import import ProvincialClass as pc, imported_func
class Abra():
def __init__(self):
self.cadabra()
def cadabra(self):
print("cadabra")
def b():
Abra()
b()
pc()
HiddenClass() # this is probably too defensive
imported_func()
| 116 |
879 | package com.bookstore;
import com.bookstore.service.BookstoreService;
import com.vladmihalcea.sql.SQLStatementCountValidator;
import static com.vladmihalcea.sql.SQLStatementCountValidator.assertDeleteCount;
import static com.vladmihalcea.sql.SQLStatementCountValidator.assertInsertCount;
import static com.vladmihalcea.sql.SQLStatementCountValidator.assertSelectCount;
import static com.vladmihalcea.sql.SQLStatementCountValidator.assertUpdateCount;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class MainApplication {
private final BookstoreService bookstoreService;
public MainApplication(BookstoreService bookstoreService) {
this.bookstoreService = bookstoreService;
}
public static void main(String[] args) {
SpringApplication.run(MainApplication.class, args);
}
@Bean
public ApplicationRunner init() {
return args -> {
SQLStatementCountValidator.reset();
bookstoreService.updateAuthorWithoutTransactional();
// at this point there is no transaction running
// there are 3 statements
assertSelectCount(2);
assertUpdateCount(1);
assertInsertCount(0);
assertDeleteCount(0);
SQLStatementCountValidator.reset();
bookstoreService.updateAuthorWithTransactional();
// allow the transaction to commit
// there are 2 statements instead of 3
assertSelectCount(1);
assertUpdateCount(1);
assertInsertCount(0);
assertDeleteCount(0);
};
}
}
| 678 |
1,150 | ///*
// * Copyright (c) 2016 <NAME> (<EMAIL>)
// */
//
//package com.kaedea.frontia.demo.apis;
//
//import android.content.Context;
//import moe.studio.behavior.video.LoginApi;
//
//
///**
// * Created by kaede on 2015/12/7.
// */
//public class LoginApiImpl extends LoginApi {
// @Override
// public boolean isLogined() {
// return false;
// }
//
// @Override
// public void goToLogin(Context context) {
//
// }
//}
| 162 |
357 | <reponame>minatoyuichiro/Blueqat
import cmath
from math import pi
import random
from typing import Callable, List
import pytest
import numpy as np
from blueqat import Circuit
from blueqat.gate import OneQubitGate, Mat1Gate, HGate, UGate, PhaseGate, RXGate, RYGate, RZGate
from blueqat.circuit_funcs import circuit_to_unitary
from blueqat.backends.onequbitgate_decomposer import ryrz_decomposer, u_decomposer
Decomposer = Callable[[OneQubitGate], List[OneQubitGate]]
decomposer_test = pytest.mark.parametrize('decomposer',
[ryrz_decomposer, u_decomposer])
def check_decomposed(g: OneQubitGate, d: Decomposer, ignore_global: bool):
c1 = Circuit(1, [g])
c2 = Circuit(1, d(g))
u1 = circuit_to_unitary(c1)
u2 = circuit_to_unitary(c2)
if ignore_global:
gphase1 = cmath.phase(np.linalg.det(u1))
gphase2 = cmath.phase(np.linalg.det(u2))
su1 = u1 * cmath.exp(-0.5j * gphase1)
su2 = u2 * cmath.exp(-0.5j * gphase2)
assert np.isclose(np.linalg.det(su1), 1.0)
assert np.isclose(np.linalg.det(su2), 1.0)
else:
su1 = su2 = np.eye(2) # To avoid static analyzer warning.
try:
if ignore_global:
assert np.allclose(su1, su2) or np.allclose(su1, -su2)
else:
assert np.allclose(u1, u2)
except AssertionError:
print("Orig:", c1)
print(u1)
if ignore_global:
print("-->")
print(su1)
print("Conv:", c2)
print(u2)
if ignore_global:
print("-->")
print(su2)
if ignore_global:
print("abs(Orig - Conv):")
print(np.abs(su1 - su2))
print("abs(Orig + Conv):")
print(np.abs(su1 + su2))
else:
print("abs(Orig - Conv):")
print(np.abs(u1 - u2))
raise
@decomposer_test
def test_identity(decomposer):
g = Mat1Gate((0, ), np.eye(2, dtype=complex))
check_decomposed(g, decomposer, False)
@decomposer_test
def test_identity_plus_delta(decomposer):
g = Mat1Gate((0, ), np.eye(2, dtype=complex) + np.ones((2, 2)) * 1e-10)
check_decomposed(g, decomposer, False)
@decomposer_test
def test_hadamard(decomposer):
g = HGate((0, ))
check_decomposed(g, decomposer, True)
@decomposer_test
def test_random_rx(decomposer):
for _ in range(20):
t = random.random() * pi
g = RXGate((0, ), t)
check_decomposed(g, decomposer, True)
@decomposer_test
def test_random_ry(decomposer):
for _ in range(20):
t = random.random() * pi
g = RYGate((0, ), t)
check_decomposed(g, decomposer, True)
@decomposer_test
def test_random_rz(decomposer):
for _ in range(20):
t = random.random() * pi
g = RZGate((0, ), t)
check_decomposed(g, decomposer, True)
@decomposer_test
def test_random_r(decomposer):
for _ in range(20):
t = random.random() * pi
g = PhaseGate((0, ), t)
check_decomposed(g, decomposer, True)
@decomposer_test
def test_random_u(decomposer):
for _ in range(20):
t1, t2, t3, t4 = [random.random() * pi for _ in range(4)]
g = UGate((0, ), t1, t2, t3, t4)
check_decomposed(g, decomposer, True)
| 1,649 |
841 | /*
* Copyright (c) 2016 <NAME>
* Copyright (c) 2013-2016 the Civetweb developers
* Copyright (c) 2004-2013 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "httplib_main.h"
/*
* char *httplib_strndup( const char *str, size_t len );
*
* The function strndup() duplicates a string with a maximum given length to a
* new string in a newly allocated block of memory. The function is equivalent
* to the Posix function strndup() with the difference that LibHTTP memory
* allocation functions are used which allow for tracking of memory leaks
* through a monitor hook. The size of the allocated memory block is the given
* length plus one byte for the terminating NUL character.
*
* If the duplicate of the string is no longer used, the allocated memory
* should be returned to the heap with a call to httplib_free.
*
* If the function fails, the value NULL is returned, otherwise a pointer to
* the duplicate.
*/
LIBHTTP_API char *httplib_strndup( const char *str, size_t len ) {
char *p;
if ( str == NULL ) return NULL;
p = httplib_malloc( len+1 );
if ( p == NULL ) return NULL;
httplib_strlcpy( p, str, len+1 );
return p;
} /* httplib_strndup */
| 618 |
442 | <filename>spring-context/src/test/java/org/springframework/context/annotation/AnnotationBeanNameGeneratorTests.java
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import example.scannable.DefaultNamedComponent;
import org.junit.Test;
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
import org.springframework.beans.factory.annotation.AnnotatedGenericBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.SimpleBeanDefinitionRegistry;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Controller;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for {@link AnnotationBeanNameGenerator}.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
public class AnnotationBeanNameGeneratorTests {
private AnnotationBeanNameGenerator beanNameGenerator = new AnnotationBeanNameGenerator();
@Test
public void generateBeanNameWithNamedComponent() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComponentWithName.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertNotNull("The generated beanName must *never* be null.", beanName);
assertTrue("The generated beanName must *never* be blank.", StringUtils.hasText(beanName));
assertEquals("walden", beanName);
}
@Test
public void generateBeanNameWithDefaultNamedComponent() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(DefaultNamedComponent.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertNotNull("The generated beanName must *never* be null.", beanName);
assertTrue("The generated beanName must *never* be blank.", StringUtils.hasText(beanName));
assertEquals("thoreau", beanName);
}
@Test
public void generateBeanNameWithNamedComponentWhereTheNameIsBlank() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComponentWithBlankName.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertNotNull("The generated beanName must *never* be null.", beanName);
assertTrue("The generated beanName must *never* be blank.", StringUtils.hasText(beanName));
String expectedGeneratedBeanName = this.beanNameGenerator.buildDefaultBeanName(bd);
assertEquals(expectedGeneratedBeanName, beanName);
}
@Test
public void generateBeanNameWithAnonymousComponentYieldsGeneratedBeanName() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(AnonymousComponent.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertNotNull("The generated beanName must *never* be null.", beanName);
assertTrue("The generated beanName must *never* be blank.", StringUtils.hasText(beanName));
String expectedGeneratedBeanName = this.beanNameGenerator.buildDefaultBeanName(bd);
assertEquals(expectedGeneratedBeanName, beanName);
}
@Test
public void generateBeanNameFromMetaComponentWithStringValue() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComponentFromStringMeta.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertEquals("henry", beanName);
}
@Test
public void generateBeanNameFromMetaComponentWithNonStringValue() {
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComponentFromNonStringMeta.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertEquals("annotationBeanNameGeneratorTests.ComponentFromNonStringMeta", beanName);
}
@Test
public void generateBeanNameFromComposedControllerAnnotationWithoutName() {
// SPR-11360
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComposedControllerAnnotationWithoutName.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
String expectedGeneratedBeanName = this.beanNameGenerator.buildDefaultBeanName(bd);
assertEquals(expectedGeneratedBeanName, beanName);
}
@Test
public void generateBeanNameFromComposedControllerAnnotationWithBlankName() {
// SPR-11360
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(ComposedControllerAnnotationWithBlankName.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
String expectedGeneratedBeanName = this.beanNameGenerator.buildDefaultBeanName(bd);
assertEquals(expectedGeneratedBeanName, beanName);
}
@Test
public void generateBeanNameFromComposedControllerAnnotationWithStringValue() {
// SPR-11360
BeanDefinitionRegistry registry = new SimpleBeanDefinitionRegistry();
AnnotatedBeanDefinition bd = new AnnotatedGenericBeanDefinition(
ComposedControllerAnnotationWithStringValue.class);
String beanName = this.beanNameGenerator.generateBeanName(bd, registry);
assertEquals("restController", beanName);
}
@Component("walden")
private static class ComponentWithName {
}
@Component(" ")
private static class ComponentWithBlankName {
}
@Component
private static class AnonymousComponent {
}
@Service("henry")
private static class ComponentFromStringMeta {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Component
public @interface NonStringMetaComponent {
long value();
}
@NonStringMetaComponent(123)
private static class ComponentFromNonStringMeta {
}
/**
* @see org.springframework.web.bind.annotation.RestController
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Controller
public static @interface TestRestController {
String value() default "";
}
@TestRestController
public static class ComposedControllerAnnotationWithoutName {
}
@TestRestController(" ")
public static class ComposedControllerAnnotationWithBlankName {
}
@TestRestController("restController")
public static class ComposedControllerAnnotationWithStringValue {
}
}
| 2,197 |
313 | /*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.titus.api.appscale.model;
public class PolicyConfiguration {
private final String name;
private final PolicyType policyType;
private final StepScalingPolicyConfiguration stepScalingPolicyConfiguration;
private final AlarmConfiguration alarmConfiguration;
private final TargetTrackingPolicy targetTrackingPolicy;
public PolicyConfiguration(String name, PolicyType policyType, StepScalingPolicyConfiguration stepScalingPolicyConfiguration, AlarmConfiguration alarmConfiguration, TargetTrackingPolicy targetTrackingPolicy) {
this.name = name;
this.policyType = policyType;
this.stepScalingPolicyConfiguration = stepScalingPolicyConfiguration;
this.alarmConfiguration = alarmConfiguration;
this.targetTrackingPolicy = targetTrackingPolicy;
}
public String getName() {
return name;
}
public PolicyType getPolicyType() {
return policyType;
}
public StepScalingPolicyConfiguration getStepScalingPolicyConfiguration() {
return stepScalingPolicyConfiguration;
}
public AlarmConfiguration getAlarmConfiguration() {
return alarmConfiguration;
}
public TargetTrackingPolicy getTargetTrackingPolicy() {
return targetTrackingPolicy;
}
@Override
public String toString() {
return "PolicyConfiguration{" +
"name='" + name + '\'' +
", policyType=" + policyType +
", stepScalingPolicyConfiguration=" + stepScalingPolicyConfiguration +
", alarmConfiguration=" + alarmConfiguration +
", targetTrackingPolicy=" + targetTrackingPolicy +
'}';
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private String name;
private PolicyType policyType;
private StepScalingPolicyConfiguration stepScalingPolicyConfiguration;
private AlarmConfiguration alarmConfiguration;
private TargetTrackingPolicy targetTrackingPolicy;
private Builder() {
}
public Builder withName(String name) {
this.name = name;
return this;
}
public Builder withPolicyType(PolicyType policyType) {
this.policyType = policyType;
return this;
}
public Builder withStepScalingPolicyConfiguration(StepScalingPolicyConfiguration stepScalingPolicyConfiguration) {
this.stepScalingPolicyConfiguration = stepScalingPolicyConfiguration;
return this;
}
public Builder withAlarmConfiguration(AlarmConfiguration alarmConfiguration) {
this.alarmConfiguration = alarmConfiguration;
return this;
}
public Builder withTargetTrackingPolicy(TargetTrackingPolicy targetTrackingPolicy) {
this.targetTrackingPolicy = targetTrackingPolicy;
return this;
}
public PolicyConfiguration build() {
return new PolicyConfiguration(name, policyType, stepScalingPolicyConfiguration, alarmConfiguration, targetTrackingPolicy);
}
}
}
| 1,272 |
1,863 | //
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Copyright (c) 2018 NVIDIA Corporation. All rights reserved.
#ifndef __APEX_PHYSX_OBJECT_DESC_H__
#define __APEX_PHYSX_OBJECT_DESC_H__
#include "Apex.h"
#include "PhysXObjectDescIntl.h"
namespace nvidia
{
namespace apex
{
class ApexPhysXObjectDesc : public PhysXObjectDescIntl
{
public:
typedef PhysXObjectDescIntl Parent;
ApexPhysXObjectDesc() : mNext(0), mPrev(0)
{
mFlags = 0;
userData = NULL;
mPhysXObject = NULL;
}
// Need a copy constructor because we contain an array, and we are in arrays
ApexPhysXObjectDesc(const ApexPhysXObjectDesc& desc) : PhysXObjectDescIntl(desc)
{
*this = desc;
}
ApexPhysXObjectDesc& operator = (const ApexPhysXObjectDesc& desc)
{
mFlags = desc.mFlags;
userData = desc.userData;
mApexActors = desc.mApexActors;
mPhysXObject = desc.mPhysXObject;
mNext = desc.mNext;
mPrev = desc.mPrev;
return *this;
}
void swap(ApexPhysXObjectDesc& rhs)
{
Parent::swap(rhs);
shdfnd::swap(mNext, rhs.mNext);
shdfnd::swap(mPrev, rhs.mPrev);
}
static uint16_t makeHash(size_t hashable);
uint32_t mNext, mPrev;
friend class ApexSDKImpl;
virtual ~ApexPhysXObjectDesc(void)
{
}
};
}
} // end namespace nvidia::apex
#endif // __APEX_PHYSX_OBJECT_DESC_H__
| 923 |
679 | <reponame>Grosskopf/openoffice
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// Basic.h : Declaration of the CBasic
#ifndef __Foo_H_
#define __Foo_H_
#include "resource.h" // main symbols
//#include "AxTestComponents.h"
#import "AxTestComponents.tlb" no_namespace no_implementation raw_interfaces_only named_guids
//#include "AxTestComponents.h"
/////////////////////////////////////////////////////////////////////////////
// CBasic
class ATL_NO_VTABLE CFoo :
public CComObjectRootEx<CComSingleThreadModel>,
public CComCoClass<CFoo, &CLSID_Foo>,
public IFoo
// public IDispatchImpl<IFoo, &__uuidof(IFoo), &LIBID_AXTESTCOMPONENTSLib, /* wMajor = */ 1, /* wMinor = */ 0>
{
public:
CFoo();
~CFoo();
DECLARE_REGISTRY_RESOURCEID(IDR_BASIC)
DECLARE_PROTECT_FINAL_CONSTRUCT()
BEGIN_COM_MAP(CFoo)
COM_INTERFACE_ENTRY(IFoo)
END_COM_MAP()
STDMETHOD(Foo)(IUnknown* val);
// IFoo Methods
public:
};
#endif //__BASIC_H_
| 569 |
505 | package de.rieckpil.blog;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("/todos")
public class TodoController {
private final TodoRepository todoRepository;
public TodoController(TodoRepository todoRepository) {
this.todoRepository = todoRepository;
}
@GetMapping
public List<Todo> getAllTodos() {
return todoRepository.findAll();
}
}
| 185 |
1,338 | <reponame>Kirishikesan/haiku
/*
** Copyright 2004, <NAME> (<EMAIL>).
** Distributed under the terms of the MIT License.
**
*/
#ifndef _TEST_H
#define _TEST_H
#include <stdio.h>
class AssertStatistics {
private:
AssertStatistics();
public:
static AssertStatistics* GetInstance();
void AssertFailed() { fAssertions++; fFailed++; }
void AssertPassed() { fAssertions++; fPassed++; }
void Print();
int fAssertions;
int fPassed;
int fFailed;
static AssertStatistics* fStatistics;
};
class Item
{
public:
Item() { Init(); }
Item(const Item& item) : fValue(item.fValue) { Init(); };
Item(int value) : fValue(value) { Init(); };
virtual ~Item() { fInstances --; }
int Value() { return fValue; }
bool Equals(Item* item) {
return item != NULL && fValue == item->fValue;
}
static int GetNumberOfInstances() { return fInstances; }
void Print() {
fprintf(stderr, "[%d] %d", fID, fValue);
// fprintf(stderr, "id: %d; value: %d", fID, fValue);
}
static int Compare(const void* a, const void* b);
private:
void Init() {
fID = fNextID ++;
fInstances ++;
}
int fID; // unique id for each created Item
int fValue; // the value of the item
static int fNextID;
static int fInstances;
};
#endif
| 480 |
650 | <filename>Userland/Utilities/chown.cpp
/*
* Copyright (c) 2018-2020, <NAME> <<EMAIL>>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/String.h>
#include <AK/Vector.h>
#include <grp.h>
#include <pwd.h>
#include <stdio.h>
#include <string.h>
#include <sys/stat.h>
#include <unistd.h>
int main(int argc, char** argv)
{
if (pledge("stdio rpath chown", nullptr) < 0) {
perror("pledge");
return 1;
}
if (argc < 3) {
warnln("usage: chown <uid[:gid]> <path>");
return 1;
}
uid_t new_uid = -1;
gid_t new_gid = -1;
auto parts = String(argv[1]).split(':', true);
if (parts.is_empty()) {
warnln("Empty uid/gid spec");
return 1;
}
if (parts[0].is_empty() || (parts.size() == 2 && parts[1].is_empty()) || parts.size() > 2) {
warnln("Invalid uid/gid spec");
return 1;
}
auto number = parts[0].to_uint();
if (number.has_value()) {
new_uid = number.value();
} else {
auto* passwd = getpwnam(parts[0].characters());
if (!passwd) {
warnln("Unknown user '{}'", parts[0]);
return 1;
}
new_uid = passwd->pw_uid;
}
if (parts.size() == 2) {
auto number = parts[1].to_uint();
if (number.has_value()) {
new_gid = number.value();
} else {
auto* group = getgrnam(parts[1].characters());
if (!group) {
warnln("Unknown group '{}'", parts[1]);
return 1;
}
new_gid = group->gr_gid;
}
}
int rc = chown(argv[2], new_uid, new_gid);
if (rc < 0) {
perror("chown");
return 1;
}
return 0;
}
| 893 |
2,095 | <reponame>Mu-L/rsocket-java<filename>rsocket-core/src/main/java/io/rsocket/DuplexConnection.java
/*
* Copyright 2015-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rsocket;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import java.net.SocketAddress;
import java.nio.channels.ClosedChannelException;
import org.reactivestreams.Subscriber;
import reactor.core.publisher.Flux;
/** Represents a connection with input/output that the protocol uses. */
public interface DuplexConnection extends Availability, Closeable {
/**
* Delivers the given frame to the underlying transport connection. This method is non-blocking
* and can be safely executed from multiple threads. This method does not provide any flow-control
* mechanism.
*
* @param streamId to which the given frame relates
* @param frame with the encoded content
*/
void sendFrame(int streamId, ByteBuf frame);
/**
* Send an error frame and after it is successfully sent, close the connection.
*
* @param errorException to encode in the error frame
*/
void sendErrorAndClose(RSocketErrorException errorException);
/**
* Returns a stream of all {@code Frame}s received on this connection.
*
* <p><strong>Completion</strong>
*
* <p>Returned {@code Publisher} <em>MUST</em> never emit a completion event ({@link
* Subscriber#onComplete()}).
*
* <p><strong>Error</strong>
*
* <p>Returned {@code Publisher} can error with various transport errors. If the underlying
* physical connection is closed by the peer, then the returned stream from here <em>MUST</em>
* emit an {@link ClosedChannelException}.
*
* <p><strong>Multiple Subscriptions</strong>
*
* <p>Returned {@code Publisher} is not required to support multiple concurrent subscriptions.
* RSocket will never have multiple subscriptions to this source. Implementations <em>MUST</em>
* emit an {@link IllegalStateException} for subsequent concurrent subscriptions, if they do not
* support multiple concurrent subscriptions.
*
* @return Stream of all {@code Frame}s received.
*/
Flux<ByteBuf> receive();
/**
* Returns the assigned {@link ByteBufAllocator}.
*
* @return the {@link ByteBufAllocator}
*/
ByteBufAllocator alloc();
/**
* Return the remote address that this connection is connected to. The returned {@link
* SocketAddress} varies by transport type and should be downcast to obtain more detailed
* information. For TCP and WebSocket, the address type is {@link java.net.InetSocketAddress}. For
* local transport, it is {@link io.rsocket.transport.local.LocalSocketAddress}.
*
* @return the address
* @since 1.1
*/
SocketAddress remoteAddress();
@Override
default double availability() {
return isDisposed() ? 0.0 : 1.0;
}
}
| 1,003 |
2,542 | <reponame>vishnuk007/service-fabric
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#pragma once
namespace Hosting2
{
class IGuestServiceTypeHost
{
public:
__declspec(property(get = get_ActivationManager)) Common::ComPointer<ComGuestServiceCodePackageActivationManager> & ActivationManager;
virtual Common::ComPointer<ComGuestServiceCodePackageActivationManager> & get_ActivationManager() = 0;
__declspec(property(get = get_Endpoints)) std::vector<ServiceModel::EndpointDescription> const & Endpoints;
virtual std::vector<ServiceModel::EndpointDescription> const & get_Endpoints() = 0;
__declspec(property(get = get_DependentCodePackages)) std::vector<std::wstring> const & DependentCodePackages;
virtual std::vector<std::wstring> const & get_DependentCodePackages() = 0;
__declspec(property(get = get_HostContext)) ApplicationHostContext const & HostContext;
virtual ApplicationHostContext const & get_HostContext() = 0;
__declspec(property(get = get_CodeContext)) CodePackageContext const & CodeContext;
virtual CodePackageContext const & get_CodeContext() = 0;
virtual Common::ErrorCode GetCodePackageActivator(_Out_ Common::ComPointer<IFabricCodePackageActivator> & codePackageActivator) = 0;
};
}
| 467 |
2,086 | #include "strm.h"
#include "khash.h"
KHASH_MAP_INIT_INT64(env, strm_value);
typedef khash_t(env) strm_env;
strm_env *globals;
static int
env_set(strm_env *env, strm_string name, strm_value val)
{
int r;
khiter_t k;
assert(env != globals || !strm_event_loop_started);
if (!strm_str_intern_p(name)) {
name = strm_str_intern_str(name);
}
k = kh_put(env, env, name, &r);
if (r <= 0) return STRM_NG; /* r=0 key is present in the hash table */
/* r=-1 operation failed */
kh_value(env, k) = val;
return STRM_OK;
}
int
env_get(strm_env* env, strm_string name, strm_value* val)
{
khiter_t k;
if (!strm_str_intern_p(name)) {
name = strm_str_intern_str(name);
}
k = kh_get(env, env, name);
if (k == kh_end(env)) return STRM_NG;
*val = kh_value(env, k);
return STRM_OK;
}
int
strm_var_set(strm_state* state, strm_string name, strm_value val)
{
strm_env *e;
if (!state) {
if (!globals) {
globals = kh_init(env);
}
e = globals;
}
else {
if (!state->env) {
state->env = kh_init(env);
}
e = state->env;
}
return env_set(e, name, val);
}
int
strm_var_def(strm_state* state, const char* name, strm_value val)
{
return strm_var_set(state, strm_str_intern(name, strlen(name)), val);
}
int
strm_var_get(strm_state* state, strm_string name, strm_value* val)
{
while (state) {
if (state->env) {
if (env_get((strm_env*)state->env, name, val) == 0)
return STRM_OK;
}
state = state->prev;
}
if (!globals) return STRM_NG;
return env_get(globals, name, val);
}
int
strm_var_match(strm_state* state, strm_string name, strm_value val)
{
if (state && state->env) {
strm_value v0;
if (env_get((strm_env*)state->env, name, &v0) == 0) {
if (strm_value_eq(v0, val))
return STRM_OK;
return STRM_NG;
}
}
return strm_var_set(state, name, val);
}
int
strm_env_copy(strm_state* s1, strm_state* s2)
{
strm_env *e1 = s1->env;
strm_env *e2 = s2->env;
khiter_t k, kk;
int r;
if (!e1) {
e1 = s1->env = kh_init(env);
}
if (!e2) {
e2 = s1->env = kh_init(env);
}
for (k = kh_begin(e2); k != kh_end(e2); k++) {
if (kh_exist(e2, k)) {
kk = kh_put(env, e1, kh_key(e2, k), &r);
if (r <= 0) return STRM_NG; /* r=0 key is present in the hash table */
/* r=-1 operation failed */
kh_value(e1, kk) = kh_value(e2, k);
}
}
return STRM_OK;
}
| 1,233 |
442 | <reponame>yuweihn/jmqtt
package org.jmqtt.bus.enums;
/**
* cluster event code
*/
public enum ClusterEventCodeEnum {
MQTT_CLEAR_SESSION("MQTT_CLEAR_SESSION"),
DISPATCHER_CLIENT_MESSAGE("DISPATCHER_CLIENT_MESSAGE"),
;
private String code;
ClusterEventCodeEnum(String code) {
this.code = code;
}
public String getCode() {
return code;
}
}
| 178 |
351 | package com.talosvfx.talos.editor.widgets.ui;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.scenes.scene2d.ui.TextField;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.talosvfx.talos.editor.widgets.propertyWidgets.FloatFieldFilter;
import com.talosvfx.talos.runtime.values.NumericalValue;
public class NumericalValueField extends Table {
private final TextField x;
private final TextField y;
private final TextField z;
NumericalValue value;
public NumericalValueField(Skin skin) {
setSkin(skin);
x = new TextField("0.0", getSkin(), "panel");
x.setTextFieldFilter(new FloatFieldFilter());
y = new TextField("0.0", getSkin(), "panel");
y.setTextFieldFilter(new FloatFieldFilter());
z = new TextField("0.0", getSkin(), "panel");
z.setTextFieldFilter(new FloatFieldFilter());
x.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
try {
float parseFloat = Float.parseFloat(x.getText());
value.set(0, parseFloat);
} catch (NumberFormatException e) {
value.set(0, 0f);
}
}
});
y.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
try {
float parseFloat = Float.parseFloat(y.getText());
value.set(1, parseFloat);
} catch (NumberFormatException e) {
value.set(1, 0f);
}
}
});
z.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
try {
float parseFloat = Float.parseFloat(z.getText());
value.set(2, parseFloat);
} catch (NumberFormatException e) {
value.set(2, 0f);
}
}
});
add(x).padRight(6f).prefWidth(52).minWidth(10).growX().height(25);
add(y).prefWidth(52).minWidth(10).growX().height(25);
add(z).padLeft(6f).prefWidth(52).minWidth(10).growX().height(25);
}
public void setNumericalValue (NumericalValue value) {
this.value = value;
this.x.setText(String.valueOf(value.get(0)));
this.y.setText(String.valueOf(value.get(1)));
this.z.setText(String.valueOf(value.get(2)));
}
}
| 1,302 |
745 | /*******************************************************************************
*
* MIT License
*
* Copyright (c) 2017 Advanced Micro Devices, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*******************************************************************************/
#include <cassert>
#include <miopen/fusion.hpp>
#include <miopen/logger.hpp>
namespace miopen {
// operator args
OperatorArgs::OperatorArgs() {}
void OperatorArgs::ins_arg(std::string name, OpKernelArg v)
{
args_map.emplace(std::make_pair(name, v));
// args_map[name] = std::move(v);
args_vec.push_back(v);
}
std::ostream& operator<<(std::ostream& stream, const OperatorArgs&) // x )
{
/*MIOPEN_LOG_ENUM(stream,
x.mode,
miopenActivationPASTHRU,
miopenActivationLOGISTIC,
miopenActivationTANH,
miopenActivationRELU,
miopenActivationSOFTRELU,
miopenActivationABS,
miopenActivationPOWER,
miopenActivationCLIPPEDRELU,
miopenActivationLEAKYRELU,
miopenActivationELU)*/
// LogRange(stream, x.parms, ", ") << ", ";
return stream;
}
} // namespace miopen
| 832 |
565 |
#include "numerics/polynomial.hpp"
#include <tuple>
#include "base/macros.hpp"
#include "geometry/frame.hpp"
#include "geometry/grassmann.hpp"
#include "geometry/named_quantities.hpp"
#include "gtest/gtest.h"
#include "numerics/polynomial_evaluators.hpp"
#include "quantities/constants.hpp"
#include "quantities/named_quantities.hpp"
#include "quantities/quantities.hpp"
#include "quantities/si.hpp"
#include "serialization/geometry.pb.h"
#include "serialization/numerics.pb.h"
#include "testing_utilities/almost_equals.hpp"
#include "testing_utilities/matchers.hpp"
#define PRINCIPIA_USE_IACA 0
#if PRINCIPIA_USE_IACA
#include "intel/iacaMarks.h"
#endif
namespace principia {
using geometry::Frame;
using geometry::Displacement;
using geometry::Handedness;
using geometry::Inertial;
using geometry::Instant;
using geometry::Position;
using geometry::Vector;
using geometry::Velocity;
using quantities::Acceleration;
using quantities::Energy;
using quantities::Entropy;
using quantities::Length;
using quantities::Product;
using quantities::Quotient;
using quantities::Current;
using quantities::Temperature;
using quantities::Time;
using quantities::constants::BoltzmannConstant;
using quantities::constants::SpeedOfLight;
using quantities::si::Ampere;
using quantities::si::Joule;
using quantities::si::Kelvin;
using quantities::si::Metre;
using quantities::si::Second;
using quantities::si::Watt;
using testing_utilities::AlmostEquals;
using testing_utilities::EqualsProto;
using ::testing::Eq;
namespace numerics {
class PolynomialTest : public ::testing::Test {
protected:
using World = Frame<serialization::Frame::TestTag,
Inertial,
Handedness::Right,
serialization::Frame::TEST>;
using P2V = PolynomialInMonomialBasis<Displacement<World>, Time, 2,
HornerEvaluator>;
using P2A = PolynomialInMonomialBasis<Displacement<World>, Instant, 2,
HornerEvaluator>;
using P2P = PolynomialInMonomialBasis<Position<World>, Instant, 2,
HornerEvaluator>;
using P17 = PolynomialInMonomialBasis<Displacement<World>, Time, 17,
EstrinEvaluator>;
PolynomialTest()
: coefficients_({
Displacement<World>({0 * Metre,
0 * Metre,
1 * Metre}),
Velocity<World>({0 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}),
Vector<Acceleration, World>({1 * Metre / Second / Second,
0 * Metre / Second / Second,
0 * Metre / Second / Second})}) {}
P2V::Coefficients const coefficients_;
};
#if PRINCIPIA_USE_IACA
// A convenient skeleton for analysing code with IACA.
TEST_F(PolynomialTest, DISABLED_IACA) {
constexpr int degree = 17;
using E = EstrinEvaluator<Displacement<World>, Time, degree>;
using P = PolynomialInMonomialBasis<Displacement<World>,
Time,
degree,
EstrinEvaluator>;
P::Coefficients const coefficients;
auto iaca = [](P::Coefficients const& c, Time const& t) {
IACA_VC64_START;
auto const result = E::Evaluate(c, t);
IACA_VC64_END;
return result;
};
CHECK_EQ(iaca(coefficients, 2 * Second), iaca(coefficients, 2 * Second));
}
#endif
// Check that coefficients can be accessed and have the right type.
TEST_F(PolynomialTest, Coefficients) {
Displacement<World> const d = std::get<0>(coefficients_);
Velocity<World> const v = std::get<1>(coefficients_);
EXPECT_EQ(1 * Metre, d.coordinates().z);
EXPECT_EQ(1 * Metre / Second, v.coordinates().y);
}
// Check that a polynomial can be constructed and evaluated.
TEST_F(PolynomialTest, Evaluate2V) {
P2V const p(coefficients_);
EXPECT_EQ(2, p.degree());
Displacement<World> const d = p(0.5 * Second);
Velocity<World> const v = p.EvaluateDerivative(0.5 * Second);
EXPECT_THAT(d, AlmostEquals(Displacement<World>({0.25 * Metre,
0.5 * Metre,
1 * Metre}), 0));
EXPECT_THAT(v, AlmostEquals(Velocity<World>({1 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}), 0));
}
// Check that a polynomial can be for an affine argument.
TEST_F(PolynomialTest, Evaluate2A) {
Instant const t0 = Instant() + 0.3 * Second;
P2A const p(coefficients_, t0);
EXPECT_EQ(2, p.degree());
Displacement<World> const d = p(t0 + 0.5 * Second);
Velocity<World> const v = p.EvaluateDerivative(t0 + 0.5 * Second);
EXPECT_THAT(d, AlmostEquals(Displacement<World>({0.25 * Metre,
0.5 * Metre,
1 * Metre}), 0));
EXPECT_THAT(v, AlmostEquals(Velocity<World>({1 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}), 0));
// This compiles.
p.Primitive();
}
// Check that a polynomial can return an affine value.
TEST_F(PolynomialTest, Evaluate2P) {
Instant const t0 = Instant() + 0.3 * Second;
P2P const p({World::origin + std::get<0>(coefficients_),
std::get<1>(coefficients_),
std::get<2>(coefficients_)},
t0);
EXPECT_EQ(2, p.degree());
Position<World> const d = p(t0 + 0.5 * Second);
Velocity<World> const v = p.EvaluateDerivative(t0 + 0.5 * Second);
EXPECT_THAT(d, AlmostEquals(World::origin + Displacement<World>({0.25 * Metre,
0.5 * Metre,
1 * Metre}),
0));
EXPECT_THAT(v, AlmostEquals(Velocity<World>({1 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}), 0));
// This doesn't compile (and rightly so).
#if 0
p.Primitive();
#endif
}
// Check that a polynomial of high order may be declared.
TEST_F(PolynomialTest, Evaluate17) {
P17::Coefficients const coefficients;
P17 const p(coefficients);
EXPECT_EQ(17, p.degree());
Displacement<World> const d = p(0.5 * Second);
EXPECT_THAT(d, AlmostEquals(Displacement<World>({0 * Metre,
0 * Metre,
0 * Metre}), 0));
}
// Check that a conversion to increase the degree works.
TEST_F(PolynomialTest, Conversion) {
P2V const p2v(coefficients_);
P17 const p17 = P17(p2v);
Displacement<World> const d = p17(0.5 * Second);
Velocity<World> const v = p17.EvaluateDerivative(0.5 * Second);
EXPECT_THAT(d, AlmostEquals(Displacement<World>({0.25 * Metre,
0.5 * Metre,
1 * Metre}), 0));
EXPECT_THAT(v, AlmostEquals(Velocity<World>({1 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}), 0));
}
TEST_F(PolynomialTest, VectorSpace) {
P2V const p2v(coefficients_);
{
auto const p = p2v + p2v;
auto const actual = p(0 * Second);
auto const expected =
Displacement<World>({0 * Metre, 0 * Metre, 2 * Metre});
EXPECT_THAT(actual, AlmostEquals(expected, 0));
}
{
auto const p = p2v - p2v;
auto const actual = p(0 * Second);
auto const expected =
Displacement<World>({0 * Metre, 0 * Metre, 0 * Metre});
EXPECT_THAT(actual, AlmostEquals(expected, 0));
}
{
auto const p = 3.0 * Joule * p2v;
auto const actual = p(0 * Second);
auto const expected = Vector<Product<Energy, Length>, World>(
{0 * Joule * Metre, 0 * Joule * Metre, 3 * Joule * Metre});
EXPECT_THAT(actual, AlmostEquals(expected, 0));
}
{
auto const p = p2v * (3.0 * Joule);
auto const actual = p(0 * Second);
auto const expected = Vector<Product<Length, Energy>, World>(
{0 * Joule * Metre, 0 * Joule * Metre, 3 * Joule * Metre});
EXPECT_THAT(actual, AlmostEquals(expected, 0));
}
{
auto const p = p2v / (4.0 * Joule);
auto const actual = p(0 * Second);
auto const expected = Vector<Quotient<Length, Energy>, World>(
{0 * Metre / Joule, 0 * Metre / Joule, 0.25 * Metre / Joule});
EXPECT_THAT(actual, AlmostEquals(expected, 0));
}
}
TEST_F(PolynomialTest, Ring) {
using P2 = PolynomialInMonomialBasis<Temperature, Time, 2, HornerEvaluator>;
using P3 = PolynomialInMonomialBasis<Current, Time, 3, HornerEvaluator>;
P2 const p2({1 * Kelvin, 3 * Kelvin / Second, -8 * Kelvin / Second / Second});
P3 const p3({2 * Ampere,
-4 * Ampere / Second,
3 * Ampere / Second / Second,
1 * Ampere / Second / Second / Second});
auto const p = p2 * p3;
{
auto const actual = p(0 * Second);
EXPECT_THAT(actual, AlmostEquals(2 * Ampere * Kelvin, 0));
}
{
auto const actual = p(1 * Second);
EXPECT_THAT(actual, AlmostEquals(-8 * Ampere * Kelvin, 0));
}
{
auto const actual = p(-1 * Second);
EXPECT_THAT(actual, AlmostEquals(-80 * Ampere * Kelvin, 0));
}
{
auto const actual = p(2 * Second);
EXPECT_THAT(actual, AlmostEquals(-350 * Ampere * Kelvin, 0));
}
{
auto const actual = p(-2 * Second);
EXPECT_THAT(actual, AlmostEquals(-518 * Ampere * Kelvin, 0));
}
}
TEST_F(PolynomialTest, Affine) {
using P0A = PolynomialInMonomialBasis<Instant, Time, 0, HornerEvaluator>;
using P0V = PolynomialInMonomialBasis<Time, Time, 0, HornerEvaluator>;
P0A const p0a(std::tuple{Instant() + 1 * Second});
P0V const p0v(std::tuple{2 * Second});
#if PRINCIPIA_COMPILER_MSVC_HANDLES_POLYNOMIAL_OPERATORS
{
P0A const p = p0v + Instant();
EXPECT_THAT(p(3 * Second), AlmostEquals(Instant() + 2 * Second, 0));
}
#endif
{
P0A const p = Instant() + p0v;
EXPECT_THAT(p(3 * Second), AlmostEquals(Instant() + 2 * Second, 0));
}
{
P0V const p = p0a - Instant();
EXPECT_THAT(p(3 * Second), AlmostEquals(1 * Second, 0));
}
{
P0V const p = Instant() - p0a;
EXPECT_THAT(p(3 * Second), AlmostEquals(-1 * Second, 0));
}
}
// Compose contains a fold expression which fails to compile in Clang because of
// https://bugs.llvm.org/show_bug.cgi?id=30590. That bug will be fixed post-
// 11.0.0. Since we don't use Compose as of this writing, and working around
// the bug would be hard, we ifdef out the test.
#if PRINCIPIA_COMPILER_MSVC
TEST_F(PolynomialTest, Monoid) {
using P0 =
PolynomialInMonomialBasis<Current, Temperature, 0, HornerEvaluator>;
using P2A =
PolynomialInMonomialBasis<Temperature, Instant, 2, HornerEvaluator>;
using P2V =
PolynomialInMonomialBasis<Temperature, Time, 2, HornerEvaluator>;
using P3 =
PolynomialInMonomialBasis<Current, Temperature, 3, HornerEvaluator>;
Instant const t0;
P0 const p0(std::tuple{9 * Ampere});
P2A const p2a({1 * Kelvin,
3 * Kelvin / Second,
-8 * Kelvin / Second / Second}, t0);
P2V const p2v({1 * Kelvin,
3 * Kelvin / Second,
-8 * Kelvin / Second / Second});
P3 const p3({2 * Ampere,
-4 * Ampere / Kelvin,
3 * Ampere / Kelvin / Kelvin,
1 * Ampere / Kelvin / Kelvin / Kelvin});
auto const pa = Compose(p3, p2a);
auto const pv = Compose(p3, p2v);
{
auto const actual_a = pa(t0 + 0 * Second);
auto const actual_v = pv(0 * Second);
EXPECT_THAT(actual_a, AlmostEquals(2 * Ampere, 0));
EXPECT_THAT(actual_v, AlmostEquals(2 * Ampere, 0));
}
{
auto const actual_a = pa(t0 + 1 * Second);
auto const actual_v = pv(1 * Second);
EXPECT_THAT(actual_a, AlmostEquals(2 * Ampere, 0));
EXPECT_THAT(actual_v, AlmostEquals(2 * Ampere, 0));
}
{
auto const actual_a = pa(t0 - 1 * Second);
auto const actual_v = pv(-1 * Second);
EXPECT_THAT(actual_a, AlmostEquals(-658 * Ampere, 0));
EXPECT_THAT(actual_v, AlmostEquals(-658 * Ampere, 0));
}
{
auto const actual_a = pa(t0 + 2 * Second);
auto const actual_v = pv(2 * Second);
EXPECT_THAT(actual_a, AlmostEquals(-13648 * Ampere, 0));
EXPECT_THAT(actual_v, AlmostEquals(-13648 * Ampere, 0));
}
{
auto const actual_a = pa(t0 - 2 * Second);
auto const actual_v = pv(-2 * Second);
EXPECT_THAT(actual_a, AlmostEquals(-46396 * Ampere, 0));
EXPECT_THAT(actual_v, AlmostEquals(-46396 * Ampere, 0));
}
{
auto const actual = Compose(p0, p2a)(t0);
EXPECT_THAT(actual, AlmostEquals(9 * Ampere, 0));
}
}
#endif
TEST_F(PolynomialTest, PointwiseInnerProduct) {
P2V::Coefficients const coefficients({
Displacement<World>({0 * Metre,
2 * Metre,
3 * Metre}),
Velocity<World>({-1 * Metre / Second,
1 * Metre / Second,
0 * Metre / Second}),
Vector<Acceleration, World>({1 * Metre / Second / Second,
1 * Metre / Second / Second,
-2 * Metre / Second / Second})});
P2V const p2va(coefficients_);
P2V const p2vb(coefficients);
auto const p = PointwiseInnerProduct(p2va, p2vb);
{
auto const actual = p(0 * Second);
EXPECT_THAT(actual, AlmostEquals(3 * Metre * Metre, 0));
}
{
auto const actual = p(1 * Second);
EXPECT_THAT(actual, AlmostEquals(5 * Metre * Metre, 0));
}
{
auto const actual = p(-1 * Second);
EXPECT_THAT(actual, AlmostEquals(1 * Metre * Metre, 0));
}
{
auto const actual = p(2 * Second);
EXPECT_THAT(actual, AlmostEquals(19 * Metre * Metre, 0));
}
{
auto const actual = p(-2 * Second);
EXPECT_THAT(actual, AlmostEquals(11 * Metre * Metre, 0));
}
}
TEST_F(PolynomialTest, AtOrigin) {
Instant const t0 = Instant() + 3 * Second;
P2A const p(coefficients_, t0);
P2A const q = p.AtOrigin(Instant() - 2 * Second);
for (Instant t = Instant() - 10 * Second;
t < Instant() + 10 * Second;
t += 0.3 * Second) {
EXPECT_THAT(q(t), AlmostEquals(p(t), 0, 942));
}
}
TEST_F(PolynomialTest, Derivative) {
using P2 = PolynomialInMonomialBasis<Temperature, Time, 2, HornerEvaluator>;
using P3 = PolynomialInMonomialBasis<Current, Time, 3, HornerEvaluator>;
P2 const p2({1 * Kelvin, 3 * Kelvin / Second, -8 * Kelvin / Second / Second});
P3 const p3({2 * Ampere,
-4 * Ampere / Second,
3 * Ampere / Second / Second,
1 * Ampere / Second / Second / Second});
EXPECT_EQ(3 * Kelvin / Second,
p2.Derivative<1>()(0 * Second));
EXPECT_EQ(-16 * Kelvin / Second / Second,
p2.Derivative<2>()(0 * Second));
EXPECT_EQ(-4 * Ampere / Second,
p3.Derivative<1>()(0 * Second));
EXPECT_EQ(6 * Ampere / Second / Second,
p3.Derivative<2>()(0 * Second));
EXPECT_EQ(6 * Ampere / Second / Second / Second,
p3.Derivative<3>()(0 * Second));
}
TEST_F(PolynomialTest, PrimitiveIntegrate) {
using P2 = PolynomialInMonomialBasis<Temperature, Time, 2, HornerEvaluator>;
P2 const p2({1 * Kelvin, 3 * Kelvin / Second, -8 * Kelvin / Second / Second});
EXPECT_THAT(p2.Primitive()(0 * Second),
AlmostEquals(0 * Kelvin * Second, 0));
EXPECT_THAT(p2.Primitive()(1 * Second),
AlmostEquals(-1.0 / 6.0 * Kelvin * Second, 5));
EXPECT_THAT(p2.Primitive()(-1 * Second),
AlmostEquals(19.0 / 6.0 * Kelvin * Second, 1));
EXPECT_THAT(p2.Primitive()(2 * Second),
AlmostEquals(-40.0 / 3.0 * Kelvin * Second, 1));
EXPECT_THAT(p2.Integrate(-1 * Second, 2 * Second),
AlmostEquals(-99.0 / 6.0 * Kelvin * Second, 3));
}
TEST_F(PolynomialTest, EvaluateConstant) {
PolynomialInMonomialBasis<Entropy, Time, 0, HornerEvaluator> const
horner_boltzmann(std::make_tuple(BoltzmannConstant));
PolynomialInMonomialBasis<Entropy, Time, 0, EstrinEvaluator> const
estrin_boltzmann(std::make_tuple(BoltzmannConstant));
EXPECT_THAT(horner_boltzmann(1729 * Second), Eq(BoltzmannConstant));
EXPECT_THAT(estrin_boltzmann(1729 * Second), Eq(BoltzmannConstant));
EXPECT_THAT(horner_boltzmann.EvaluateDerivative(1729 * Second),
Eq(0 * Watt / Kelvin));
EXPECT_THAT(estrin_boltzmann.EvaluateDerivative(1729 * Second),
Eq(0 * Watt / Kelvin));
}
TEST_F(PolynomialTest, EvaluateLinear) {
PolynomialInMonomialBasis<Length, Time, 1, HornerEvaluator> const
horner_light({0 * Metre, SpeedOfLight});
PolynomialInMonomialBasis<Length, Time, 1, EstrinEvaluator> const
estrin_light({0 * Metre, SpeedOfLight});
constexpr Length light_second = Second * SpeedOfLight;
EXPECT_THAT(horner_light(1729 * Second), Eq(1729 * light_second));
EXPECT_THAT(estrin_light(1729 * Second), Eq(1729 * light_second));
EXPECT_THAT(horner_light.EvaluateDerivative(1729 * Second), Eq(SpeedOfLight));
EXPECT_THAT(estrin_light.EvaluateDerivative(1729 * Second), Eq(SpeedOfLight));
}
// Check that polynomials may be serialized.
TEST_F(PolynomialTest, Serialization) {
{
P2V p2v(coefficients_);
serialization::Polynomial message;
p2v.WriteToMessage(&message);
EXPECT_EQ(2, message.degree());
EXPECT_TRUE(message.HasExtension(
serialization::PolynomialInMonomialBasis::extension));
auto const& extension = message.GetExtension(
serialization::PolynomialInMonomialBasis::extension);
EXPECT_EQ(3, extension.coefficient_size());
for (auto const& coefficient : extension.coefficient()) {
EXPECT_TRUE(coefficient.has_multivector());
}
EXPECT_TRUE(extension.has_quantity());
auto const polynomial_read =
Polynomial<Displacement<World>, Time>::ReadFromMessage<HornerEvaluator>(
message);
EXPECT_EQ(2, polynomial_read->degree());
EXPECT_THAT(
(*polynomial_read)(0.5 * Second),
AlmostEquals(
Displacement<World>({0.25 * Metre, 0.5 * Metre, 1 * Metre}), 0));
serialization::Polynomial message2;
polynomial_read->WriteToMessage(&message2);
EXPECT_THAT(message2, EqualsProto(message));
}
{
P2A p2a(coefficients_, Instant());
serialization::Polynomial message;
p2a.WriteToMessage(&message);
EXPECT_EQ(2, message.degree());
EXPECT_TRUE(message.HasExtension(
serialization::PolynomialInMonomialBasis::extension));
auto const& extension = message.GetExtension(
serialization::PolynomialInMonomialBasis::extension);
EXPECT_EQ(3, extension.coefficient_size());
for (auto const& coefficient : extension.coefficient()) {
EXPECT_TRUE(coefficient.has_multivector());
}
EXPECT_TRUE(extension.has_point());
EXPECT_TRUE(extension.point().has_scalar());
auto const polynomial_read =
Polynomial<Displacement<World>,
Instant>::ReadFromMessage<HornerEvaluator>(message);
EXPECT_EQ(2, polynomial_read->degree());
EXPECT_THAT(
(*polynomial_read)(Instant() + 0.5 * Second),
AlmostEquals(
Displacement<World>({0.25 * Metre, 0.5 * Metre, 1 * Metre}), 0));
serialization::Polynomial message2;
polynomial_read->WriteToMessage(&message2);
EXPECT_THAT(message2, EqualsProto(message));
}
{
P17::Coefficients const coefficients;
P17 p17(coefficients);
serialization::Polynomial message;
p17.WriteToMessage(&message);
EXPECT_EQ(17, message.degree());
EXPECT_TRUE(message.HasExtension(
serialization::PolynomialInMonomialBasis::extension));
auto const& extension = message.GetExtension(
serialization::PolynomialInMonomialBasis::extension);
EXPECT_EQ(18, extension.coefficient_size());
for (auto const& coefficient : extension.coefficient()) {
EXPECT_TRUE(coefficient.has_multivector());
}
EXPECT_TRUE(extension.has_quantity());
auto const polynomial_read =
Polynomial<Displacement<World>, Time>::ReadFromMessage<HornerEvaluator>(
message);
EXPECT_EQ(17, polynomial_read->degree());
EXPECT_THAT((*polynomial_read)(0.5 * Second),
AlmostEquals(
Displacement<World>({0 * Metre, 0 * Metre, 0 * Metre}), 0));
serialization::Polynomial message2;
polynomial_read->WriteToMessage(&message2);
EXPECT_THAT(message2, EqualsProto(message));
}
}
TEST_F(PolynomialTest, Output) {
P2V p2v(coefficients_);
P2A p2a(coefficients_, Instant());
P17::Coefficients const coefficients;
P17 p17(coefficients);
LOG(ERROR) << p2v;
LOG(ERROR) << p2a;
LOG(ERROR) << p17;
}
} // namespace numerics
} // namespace principia
#undef PRINCIPIA_USE_IACA
| 9,717 |
4,054 | <filename>modules/lwjgl/driftfx/src/main/java/org/eclipse/fx/drift/internal/GPUSyncUtil.java<gh_stars>1000+
package org.eclipse.fx.drift.internal;
import java.time.Duration;
public class GPUSyncUtil {
public static enum WaitSyncResult {
AREADY_SIGNALED,
TIMEOUT_EXPIRED,
CONDITION_SATISFIED,
WAIT_FAILED
}
public static interface GPUSync {
public WaitSyncResult ClientWaitSync(Duration timeout);
public void WaitSync();
public void Delete();
}
public static class D3DSync implements GPUSync {
// for now this is a noop
@Override
public WaitSyncResult ClientWaitSync(Duration timeout) {
return WaitSyncResult.AREADY_SIGNALED;
}
@Override
public void WaitSync() {
}
@Override
public void Delete() {
}
}
public static GPUSync createFence() {
if (GraphicsPipelineUtil.isES2()) {
return GLSync.CreateFence();
}
else {
return new D3DSync();
}
}
public static class GLSync implements GPUSync {
private long sync;
private GLSync() {
sync = nCreateFence();
}
private void checkSync() {
if (sync == 0) {
throw new RuntimeException("sync object was already deleted!");
}
}
public static GLSync CreateFence() {
return new GLSync();
}
public WaitSyncResult ClientWaitSync(Duration timeout) {
checkSync();
int r = nClientWaitSync(sync, timeout.toNanos());
switch (r) {
case GL_AREADY_SIGNALED: return WaitSyncResult.AREADY_SIGNALED;
case GL_TIMEOUT_EXPIRED: return WaitSyncResult.TIMEOUT_EXPIRED;
case GL_CONDITION_SATISFIED: return WaitSyncResult.CONDITION_SATISFIED;
case GL_WAIT_FAILED: return WaitSyncResult.WAIT_FAILED;
}
System.err.println("glClientWaitSync: Unexpected result!!! " + r);
return WaitSyncResult.WAIT_FAILED;
}
public void WaitSync() {
checkSync();
nWaitSync(sync);
}
public void Delete() {
checkSync();
nDeleteSync(sync);
sync = 0;
}
}
private static native long nCreateFence();
private static native void nDeleteSync(long sync);
private static final int GL_AREADY_SIGNALED = 0x911A;
private static final int GL_TIMEOUT_EXPIRED = 0x911B;
private static final int GL_CONDITION_SATISFIED = 0x911C;
private static final int GL_WAIT_FAILED = 0x911D;
private static native int nClientWaitSync(long sync, long timeout);
private static native void nWaitSync(long sync);
}
| 918 |
1,998 | from .teams_conversation_bot import TeamsConversationBot
__all__ = ["TeamsConversationBot"] | 30 |
14,668 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_EVENTS_OZONE_EVDEV_TOUCH_FILTER_HEURISTIC_STYLUS_PALM_DETECTION_FILTER_H_
#define UI_EVENTS_OZONE_EVDEV_TOUCH_FILTER_HEURISTIC_STYLUS_PALM_DETECTION_FILTER_H_
#include <bitset>
#include <vector>
#include "base/time/time.h"
#include "ui/events/ozone/evdev/event_device_info.h"
#include "ui/events/ozone/evdev/touch_evdev_types.h"
#include "ui/events/ozone/evdev/touch_filter/palm_detection_filter.h"
#include "ui/events/ozone/evdev/touch_filter/shared_palm_detection_filter_state.h"
namespace ui {
// A heuristic implementation of PalmDetectionFilter.
// Relies on firmware palm detection, but modifies behavior _after_ a stylus
// touch since our mutual-exclusion of stylus/touch means that we do not trust
// the device right after stylus.
// Configured with 3 inputs:
// 1. How many strokes to hold on to when holding.
// 2. TimeDelta for cancellation: any strokes started within this delta are
// cancelled automatically.
// 3. TimeDelta for hold: any strokes started after the cancellation and before
// this are held for the stroke count (as above). If they're cancelled
// externally, we never report them. If they terminate before the count, we
// output all items.
//
// NOTE: This filter is only intended for certain boards of hardware that have
// poor interaction between a mutually exclusive stylus and finger input:
// Turning it on for devices where is not intended will probably degrade
// performance and create a poor UX.
class COMPONENT_EXPORT(EVDEV) HeuristicStylusPalmDetectionFilter
: public PalmDetectionFilter {
public:
HeuristicStylusPalmDetectionFilter(
SharedPalmDetectionFilterState* shared_palm_state,
int hold_stroke_count,
base::TimeDelta hold,
base::TimeDelta cancel);
HeuristicStylusPalmDetectionFilter(
const HeuristicStylusPalmDetectionFilter&) = delete;
HeuristicStylusPalmDetectionFilter& operator=(
const HeuristicStylusPalmDetectionFilter&) = delete;
~HeuristicStylusPalmDetectionFilter() override;
void Filter(const std::vector<InProgressTouchEvdev>& touches,
base::TimeTicks time,
std::bitset<kNumTouchEvdevSlots>* slots_to_hold,
std::bitset<kNumTouchEvdevSlots>* slots_to_suppress) override;
static const char kFilterName[];
std::string FilterNameForTesting() const override;
base::TimeDelta HoldTime() const;
base::TimeDelta CancelTime() const;
static bool CompatibleWithHeuristicStylusPalmDetectionFilter(
const EventDeviceInfo& device_info);
private:
const int hold_stroke_count_;
const base::TimeDelta time_after_stylus_to_hold_;
const base::TimeDelta time_after_stylus_to_cancel_;
std::vector<base::TimeTicks> touch_started_time_;
// How many items have we seen in this stroke so far?
std::vector<int> stroke_length_;
};
} // namespace ui
#endif // UI_EVENTS_OZONE_EVDEV_TOUCH_FILTER_HEURISTIC_STYLUS_PALM_DETECTION_FILTER_H_
| 1,022 |
6,156 | /*
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import <ComponentKit/CKComponent.h>
#import <ComponentKit/CKComponentController.h>
#import <ComponentKit/CKRenderComponent.h>
struct CKLifecycleMethodCounts {
NSUInteger willMount;
NSUInteger didMount;
NSUInteger willRemount;
NSUInteger didRemount;
NSUInteger willUnmount;
NSUInteger didUnmount;
NSUInteger willRelinquishView;
NSUInteger didAcquireView;
NSString *description() const
{
return [NSString stringWithFormat:@"willMount:%lu didMount:%lu willRemount:%lu didRemount:%lu willUnmount:%lu didUnmount:%lu willRelinquishView:%lu didAcquireView:%lu",
(unsigned long)willMount, (unsigned long)didMount, (unsigned long)willRemount,
(unsigned long)didRemount, (unsigned long)willUnmount, (unsigned long)didUnmount,
(unsigned long)willRelinquishView, (unsigned long)didAcquireView];
}
bool operator==(const CKLifecycleMethodCounts &other) const
{
return willMount == other.willMount && didMount == other.didMount
&& willRemount == other.willRemount && didRemount == other.didRemount
&& willUnmount == other.willUnmount && didUnmount == other.didUnmount
&& willRelinquishView == other.willRelinquishView && didAcquireView == other.didAcquireView;
}
};
@interface CKLifecycleTestComponentController : CKComponentController
@property (nonatomic, assign, readonly) BOOL calledDidInit;
@property (nonatomic, assign) BOOL calledComponentTreeWillAppear;
@property (nonatomic, assign) BOOL calledComponentTreeDidDisappear;
@property (nonatomic, assign) BOOL calledWillUpdateComponent;
@property (nonatomic, assign) BOOL calledDidUpdateComponent;
@property (nonatomic, assign) BOOL calledInvalidateController;
@property (nonatomic, assign) BOOL calledDidPrepareLayoutForComponent;
@property (nonatomic, assign) CKLifecycleMethodCounts counts;
@end
@interface CKLifecycleTestComponent : CKComponent
@property (nonatomic, assign, readonly) NSInteger computeLayoutCount;
- (CKLifecycleTestComponentController *)controller;
- (void)updateStateToIncludeNewAttribute;
@end
auto CKLifecycleTestComponentSetShouldEarlyReturnNew(BOOL shouldEarlyReturnNew) -> void;
@interface CKRenderLifecycleTestComponent : CKRenderComponent
@property (nonatomic, assign, readonly) BOOL isRenderFunctionCalled;
@end
| 809 |
348 | <gh_stars>100-1000
{"nom":"Brie","circ":"1ère circonscription","dpt":"Aisne","inscrits":46,"abs":20,"votants":26,"blancs":5,"nuls":0,"exp":21,"res":[{"nuance":"REM","nom":"<NAME>","voix":15},{"nuance":"FN","nom":"<NAME>","voix":6}]} | 96 |
496 | <reponame>yuanboshe/Nebula
/*******************************************************************************
* Project: Nebula
* @file Timer.hpp
* @brief
* @author Bwar
* @date: 2018年8月5日
* @note
* Modify history:
******************************************************************************/
#ifndef SRC_ACTOR_SESSION_TIMER_HPP_
#define SRC_ACTOR_SESSION_TIMER_HPP_
#include "labor/Worker.hpp"
#include "actor/DynamicCreator.hpp"
#include "Session.hpp"
namespace neb
{
class Timer: public Session
{
public:
Timer(uint32 ulSessionId, ev_tstamp dSessionTimeout = 60.0);
Timer(const std::string& strSessionId, ev_tstamp dSessionTimeout = 60.0);
Timer(const Timer&) = delete;
Timer& operator=(const Timer&) = delete;
virtual ~Timer();
/**
* @brief 会话超时回调
*/
virtual E_CMD_STATUS Timeout() = 0;
protected:
virtual void SetActiveTime(ev_tstamp dActiveTime)
{
;
}
private:
friend class WorkerImpl;
};
} /* namespace neb */
#endif /* SRC_ACTOR_SESSION_TIMER_HPP_ */
| 415 |
1,144 | package org.adempiere.mm.attributes.countryattribute.impl;
/*
* #%L
* de.metas.swat.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.mm.attributes.countryattribute.ICountryAware;
import org.adempiere.mm.attributes.countryattribute.ICountryAwareFactory;
import org.adempiere.model.InterfaceWrapperHelper;
import org.compiere.model.I_C_BPartner_Location;
import org.compiere.model.I_C_Country;
import org.compiere.model.I_M_InOut;
import org.compiere.model.I_M_InOutLine;
import de.metas.util.Check;
public class InOutLineCountryAware implements ICountryAware
{
public static final ICountryAwareFactory factory = new ICountryAwareFactory()
{
@Override
public ICountryAware createCountryAware(final Object model)
{
final I_M_InOutLine inoutLine = InterfaceWrapperHelper.create(model, I_M_InOutLine.class);
return new InOutLineCountryAware(inoutLine);
}
};
private final I_M_InOutLine inoutLine;
private InOutLineCountryAware(final I_M_InOutLine inoutLine)
{
super();
Check.assumeNotNull(inoutLine, "inoutLine not null");
this.inoutLine = inoutLine;
}
@Override
public int getAD_Client_ID()
{
return inoutLine.getAD_Client_ID();
}
@Override
public int getAD_Org_ID()
{
return inoutLine.getAD_Org_ID();
}
@Override
public boolean isSOTrx()
{
final I_M_InOut inout = getM_InOut();
return inout.isSOTrx();
}
@Override
public I_C_Country getC_Country()
{
final I_M_InOut inout = getM_InOut();
final I_C_BPartner_Location bpLocation = InterfaceWrapperHelper.load(inout.getC_BPartner_Location_ID(), I_C_BPartner_Location.class);
if (bpLocation == null)
{
return null;
}
return bpLocation.getC_Location().getC_Country();
}
private I_M_InOut getM_InOut()
{
final I_M_InOut inout = inoutLine.getM_InOut();
if (inout == null)
{
throw new AdempiereException("M_InOut_ID was not set in " + inoutLine);
}
return inout;
}
}
| 950 |
336 | <reponame>nattangwiwat/Mayan-EDMS-recitation
from django import forms
from django.utils.html import escape
from django.utils.translation import ugettext_lazy as _
from mayan.apps.views.forms import DetailForm
from .models import Key
class KeyDetailForm(DetailForm):
def __init__(self, *args, **kwargs):
instance = kwargs['instance']
extra_fields = (
{'label': _('Key ID'), 'field': 'key_id'},
{
'label': _('User ID'),
'field': lambda x: escape(instance.user_id),
},
{
'label': _('Creation date'), 'field': 'creation_date',
'widget': forms.widgets.DateInput
},
{
'label': _('Expiration date'),
'field': lambda x: instance.expiration_date or _('None'),
'widget': forms.widgets.DateInput
},
{'label': _('Fingerprint'), 'field': 'fingerprint'},
{'label': _('Length'), 'field': 'length'},
{'label': _('Algorithm'), 'field': 'algorithm'},
{'label': _('Type'), 'field': lambda x: instance.get_key_type_display()},
)
kwargs['extra_fields'] = extra_fields
super().__init__(*args, **kwargs)
class Meta:
fields = ()
model = Key
class KeySearchForm(forms.Form):
term = forms.CharField(
label=_('Term'),
help_text=_('Name, e-mail, key ID or key fingerprint to look for.')
)
| 711 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-pw59-4qgf-jxr8",
"modified": "2022-04-04T21:27:42Z",
"published": "2021-06-18T22:04:32Z",
"aliases": [
"CVE-2020-17522"
],
"summary": "Cache Manipulation Attack in Apache Traffic Control",
"details": "When ORT (now via atstccfg) generates ip_allow.config files in Apache Traffic Control 3.0.0 to 3.1.0 and 4.0.0 to 4.1.0, those files include permissions that allow bad actors to push arbitrary content into and remove arbitrary content from CDN cache servers. Additionally, these permissions are potentially extended to IP addresses outside the desired range, resulting in them being granted to clients possibly outside the CDN arcitechture.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:N/I:L/A:N"
}
],
"affected": [
{
"package": {
"ecosystem": "Go",
"name": "github.com/apache/trafficcontrol"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "5.0.0"
}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2020-17522"
},
{
"type": "WEB",
"url": "https://github.com/apache/trafficcontrol/commit/492290d810e9608afb5d265b98cd3f3e153e776b"
},
{
"type": "WEB",
"url": "https://lists.apache.org/thread.html/r3c675031ac220b5eae64a9c84a03ee60045c6045738607dca4a96cb8@%3Ccommits.trafficcontrol.apache.org%3E"
},
{
"type": "WEB",
"url": "https://lists.apache.org/thread.html/r3de212a3da73bcf98fa2db7eafb75b2eb8e131ff466e6efc4284df09%40%3Cdev.trafficcontrol.apache.org%3E"
},
{
"type": "WEB",
"url": "https://lists.apache.org/thread.html/rc8bfd7d4f71d61e9193efcd4699eccbab3c202ec1d75ed9d502f08bf@%3Ccommits.trafficcontrol.apache.org%3E"
},
{
"type": "PACKAGE",
"url": "https://github.com/apache/trafficcontrol"
}
],
"database_specific": {
"cwe_ids": [
"CWE-525",
"CWE-732"
],
"severity": "MODERATE",
"github_reviewed": true
}
} | 1,115 |
864 | <reponame>romainreignier/control-toolbox
/**********************************************************************************************************************
This file is part of the Control Toolbox (https://github.com/ethz-adrl/control-toolbox), copyright by ETH Zurich.
Licensed under the BSD-2 license (see LICENSE file in main directory)
**********************************************************************************************************************/
#include <ct/rbd/rbd.h>
#include <memory>
#include <array>
#include <iostream>
#include <gtest/gtest.h>
#include "../models/testIrb4600/RobCoGenTestIrb4600.h"
using namespace ct;
using namespace ct::rbd;
TEST(FixBaseFDSystemTest, ForwardDynamicsTest)
{
const size_t STATE_DIM = FixBaseFDSystem<TestIrb4600::Dynamics>::STATE_DIM;
std::shared_ptr<core::System<STATE_DIM>> dynamics(new FixBaseFDSystem<TestIrb4600::Dynamics>);
core::Integrator<STATE_DIM> integrator(dynamics, ct::core::RK4);
core::StateVector<12> state;
state.setZero();
std::cout << "___________________________________________________________________________" << std::endl;
std::cout << "Init state: " << state.transpose() << std::endl;
integrator.integrate_n_steps(state, 0, 1000, 0.001);
std::cout << "Integrated state: " << state.transpose() << std::endl;
std::cout << "___________________________________________________________________________" << std::endl;
}
TEST(FixBaseFDSystemTest, ActuatorDynamicsTest)
{
const size_t njoints = TestIrb4600::Dynamics::NJOINTS;
const size_t actuator_state_dim = 2 * njoints;
using RobotState_t = FixBaseRobotState<njoints, actuator_state_dim>;
const size_t state_dim = RobotState_t::NSTATE;
// generate actuator dynamics
const double w_n = 2;
const double zeta = 1; // critical damping
const double gc = 1; // select oscillator input gain such that we follow the reference input with no amplification
std::shared_ptr<ct::rbd::SecondOrderActuatorDynamics<njoints>> actDynamics(
new ct::rbd::SecondOrderActuatorDynamics<njoints>(w_n, zeta, gc));
// generate fix-base dynamics
std::shared_ptr<FixBaseFDSystem<TestIrb4600::Dynamics, actuator_state_dim>> combinedDynamics(
new FixBaseFDSystem<TestIrb4600::Dynamics, actuator_state_dim>(actDynamics));
// generate random control action and apply to the system
ct::core::ControlVector<njoints> constantControl;
constantControl.setRandom();
std::cout << "constant control " << constantControl.transpose() << std::endl;
std::shared_ptr<ct::core::ConstantController<state_dim, njoints>> constantController(
new ct::core::ConstantController<state_dim, njoints>(constantControl));
combinedDynamics->setController(constantController);
// make integrator and integrate the combined system
core::Integrator<state_dim> integrator(combinedDynamics, ct::core::RK4);
core::StateVector<state_dim> state;
state.setZero();
std::cout << "___________________________________________________________________________" << std::endl;
std::cout << "Init state: " << state.transpose() << std::endl;
size_t nSteps = 5000;
double dt_sim = 0.005;
integrator.integrate_n_steps(state, 0, nSteps, dt_sim);
std::cout << "Integrated overall state: " << state.transpose() << std::endl;
std::cout << "___________________________________________________________________________" << std::endl;
RobotState_t::JointState_t jointState = RobotState_t::jointStateFromVector(state);
std::cout << "Integrated joint positions: " << jointState.getPositions().transpose() << std::endl;
std::cout << "Integrated joint velocities: " << jointState.getVelocities().transpose() << std::endl;
RobotState_t::actuator_state_vector_t actState = RobotState_t::actuatorStateFromVector(state);
std::cout << "Integrated actuator state: " << actState.transpose() << std::endl;
std::cout << "___________________________________________________________________________" << std::endl;
// in the limit, the actuator states must be identical to the constant control
for (size_t i = 0; i < njoints; i++)
ASSERT_NEAR(actState(i), constantControl(i), 1e-4);
}
int main(int argc, char** argv)
{
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 1,452 |
427 | <reponame>sabaalmas/Java-Coding-Problems<filename>Chapter07/P161_GenericTypesOfMethodsFieldsExceptions/src/modern/challenge/Seed.java
package modern.challenge;
public class Seed {
}
| 66 |
445 | # Make sure to update package.json, too!
# version_info = (4, 5, 0, 'dev')
version_info = (4, 4, 0)
__version__ = '.'.join(map(str, version_info))
| 58 |
1,240 | package com.eventyay.organizer.core.speaker.list;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.databinding.DataBindingUtil;
import androidx.recyclerview.widget.DefaultItemAnimator;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout;
import com.eventyay.organizer.R;
import com.eventyay.organizer.common.mvp.view.BaseFragment;
import com.eventyay.organizer.core.main.MainActivity;
import com.eventyay.organizer.core.speaker.details.SpeakerDetailsActivity;
import com.eventyay.organizer.data.speaker.Speaker;
import com.eventyay.organizer.databinding.SpeakersFragmentBinding;
import com.eventyay.organizer.ui.ViewUtils;
import java.util.List;
import javax.inject.Inject;
import dagger.Lazy;
public class SpeakersFragment extends BaseFragment<SpeakersPresenter> implements SpeakersView {
private Context context;
private long eventId;
@Inject
Lazy<SpeakersPresenter> speakersPresenter;
private SpeakersAdapter speakersAdapter;
private SpeakersFragmentBinding binding;
private SwipeRefreshLayout refreshLayout;
public static SpeakersFragment newInstance(long eventId) {
SpeakersFragment fragment = new SpeakersFragment();
Bundle args = new Bundle();
args.putLong(MainActivity.EVENT_KEY, eventId);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = getContext();
if (getArguments() != null)
eventId = getArguments().getLong(MainActivity.EVENT_KEY);
}
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
binding = DataBindingUtil.inflate(inflater, R.layout.speakers_fragment, container, false);
return binding.getRoot();
}
@Override
public void onStart() {
super.onStart();
setupRecyclerView();
setupRefreshListener();
getPresenter().attach(eventId, this);
getPresenter().start();
}
@Override
protected int getTitle() {
return R.string.speakers;
}
@Override
public void onStop() {
super.onStop();
refreshLayout.setOnRefreshListener(null);
}
private void setupRecyclerView() {
speakersAdapter = new SpeakersAdapter(getPresenter());
RecyclerView recyclerView = binding.speakersRecyclerView;
recyclerView.setLayoutManager(new LinearLayoutManager(context));
recyclerView.setAdapter(speakersAdapter);
recyclerView.setItemAnimator(new DefaultItemAnimator());
}
private void setupRefreshListener() {
refreshLayout = binding.swipeContainer;
refreshLayout.setColorSchemeColors(getResources().getColor(R.color.color_accent));
refreshLayout.setOnRefreshListener(() -> {
refreshLayout.setRefreshing(false);
getPresenter().loadSpeakers(true);
});
}
@Override
public void showError(String error) {
ViewUtils.showSnackbar(binding.getRoot(), error);
}
@Override
public void showProgress(boolean show) {
ViewUtils.showView(binding.progressBar, show);
}
@Override
public void onRefreshComplete(boolean success) {
// Nothing to do
}
@Override
public void showResults(List<Speaker> items) {
speakersAdapter.notifyDataSetChanged();
}
@Override
public void showEmptyView(boolean show) {
ViewUtils.showView(binding.emptyView, show);
}
@Override
protected Lazy<SpeakersPresenter> getPresenterProvider() {
return speakersPresenter;
}
@Override
public void openSpeakersDetailFragment(long speakerId) {
Intent intent = new Intent(context, SpeakerDetailsActivity.class);
intent.putExtra(SpeakerDetailsActivity.SPEAKER_ID, speakerId);
context.startActivity(intent);
}
}
| 1,587 |
361 | {
"description": "o365-exchange-techniques - Office365/Exchange related techniques by @johnLaTwC and @inversecos",
"icon": "map",
"kill_chain_order": {
"tactics": [
"Recon",
"Initial Access",
"Discovery",
"Compromise",
"Persistence",
"Expansion",
"Actions on Intent"
]
},
"name": "o365-exchange-techniques",
"namespace": "misp",
"type": "cloud-security",
"uuid": "44574c7e-b732-4466-a7be-ef363374013a",
"version": 1
}
| 220 |
544 | from office365.runtime.paths.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
from office365.sharepoint.directory.group import Group
class GroupAndUserStatus(BaseEntity):
@property
def group(self):
"""Get a Group"""
return self.properties.get("Group",
Group(self.context, ResourcePath("Group", self.resource_path)))
| 151 |
1,045 | <gh_stars>1000+
/***************************************************************************************************
Tencent is pleased to support the open source community by making RapidView available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MITLicense (the "License"); you may not use this file except in compliance
withthe License. You mayobtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions and limitations under the
License.
***************************************************************************************************/
package com.tencent.rapidview.animation;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
/**
* @Class RapidAlphaAnimation
* @Desc RapidAlphaAnimation解析器
*
* @author arlozhang
* @date 2016.08.17
*/
public class RapidAlphaAnimation extends RapidAnimation {
public RapidAlphaAnimation(RapidAnimationCenter center){
super(center);
}
@Override
protected Animation createAnimation(){
String fromAlpha = mMapAttribute.get("fromalpha");
String toAlpha = mMapAttribute.get("toalpha");
if( fromAlpha == null ){
fromAlpha = "0";
}
if( toAlpha == null ){
toAlpha = "0";
}
return new AlphaAnimation(Integer.parseInt(fromAlpha), Integer.parseInt(toAlpha));
}
}
| 474 |
22,040 | <filename>spacy/tests/lang/tr/test_parser.py
from spacy.tokens import Doc
def test_tr_noun_chunks_amod_simple(tr_tokenizer):
text = "<NAME>"
heads = [1, 1]
deps = ["amod", "ROOT"]
pos = ["ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "sarı kedi "
def test_tr_noun_chunks_nmod_simple(tr_tokenizer):
text = "<NAME>" # my friend's cat
heads = [1, 1]
deps = ["nmod", "ROOT"]
pos = ["NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "arkadaşımın kedisi "
def test_tr_noun_chunks_determiner_simple(tr_tokenizer):
text = "O kedi" # that cat
heads = [1, 1]
deps = ["det", "ROOT"]
pos = ["DET", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "O kedi "
def test_tr_noun_chunks_nmod_amod(tr_tokenizer):
text = "okulun eski müdürü"
heads = [2, 2, 2]
deps = ["nmod", "amod", "ROOT"]
pos = ["NOUN", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "okulun eski müdürü "
def test_tr_noun_chunks_one_det_one_adj_simple(tr_tokenizer):
text = "O sarı kedi"
heads = [2, 2, 2]
deps = ["det", "amod", "ROOT"]
pos = ["DET", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "O sarı kedi "
def test_tr_noun_chunks_two_adjs_simple(tr_tokenizer):
text = "beyaz tombik kedi"
heads = [2, 2, 2]
deps = ["amod", "amod", "ROOT"]
pos = ["ADJ", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "beyaz tombik kedi "
def test_tr_noun_chunks_one_det_two_adjs_simple(tr_tokenizer):
text = "o beyaz tombik kedi"
heads = [3, 3, 3, 3]
deps = ["det", "amod", "amod", "ROOT"]
pos = ["DET", "ADJ", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "o beyaz tombik kedi "
def test_tr_noun_chunks_nmod_two(tr_tokenizer):
text = "kızın saçının rengi"
heads = [1, 2, 2]
deps = ["nmod", "nmod", "ROOT"]
pos = ["NOUN", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "kızın saçının rengi "
def test_tr_noun_chunks_chain_nmod_with_adj(tr_tokenizer):
text = "ev sahibinin tatlı köpeği"
heads = [1, 3, 3, 3]
deps = ["nmod", "nmod", "amod", "ROOT"]
pos = ["NOUN", "NOUN", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "ev sahibinin tatlı köpeği "
def test_tr_noun_chunks_chain_nmod_with_acl(tr_tokenizer):
text = "ev sahibinin gelen köpeği"
heads = [1, 3, 3, 3]
deps = ["nmod", "nmod", "acl", "ROOT"]
pos = ["NOUN", "NOUN", "VERB", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "ev sahibinin gelen köpeği "
def test_tr_noun_chunks_chain_nmod_head_with_amod_acl(tr_tokenizer):
text = "arabanın kırdığım sol aynası"
heads = [3, 3, 3, 3]
deps = ["nmod", "acl", "amod", "ROOT"]
pos = ["NOUN", "VERB", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "arabanın kırdığım sol aynası "
def test_tr_noun_chunks_nmod_three(tr_tokenizer):
text = "güney Afrika ülkelerinden Mozambik"
heads = [1, 2, 3, 3]
deps = ["nmod", "nmod", "nmod", "ROOT"]
pos = ["NOUN", "PROPN", "NOUN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "güney Afrika ülkelerinden Mozambik "
def test_tr_noun_chunks_det_amod_nmod(tr_tokenizer):
text = "bazı eski oyun kuralları"
heads = [3, 3, 3, 3]
deps = ["det", "nmod", "nmod", "ROOT"]
pos = ["DET", "ADJ", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "bazı eski oyun kuralları "
def test_tr_noun_chunks_acl_simple(tr_tokenizer):
text = "bahçesi olan okul"
heads = [2, 0, 2]
deps = ["acl", "cop", "ROOT"]
pos = ["NOUN", "AUX", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "bahçesi olan okul "
def test_tr_noun_chunks_acl_verb(tr_tokenizer):
text = "sevdiğim sanatçılar"
heads = [1, 1]
deps = ["acl", "ROOT"]
pos = ["VERB", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "sevdiğim sanatçılar "
def test_tr_noun_chunks_acl_nmod(tr_tokenizer):
text = "en sevdiğim ses sanatçısı"
heads = [1, 3, 3, 3]
deps = ["advmod", "acl", "nmod", "ROOT"]
pos = ["ADV", "VERB", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "en sevdiğim ses sanatçısı "
def test_tr_noun_chunks_acl_nmod2(tr_tokenizer):
text = "bildiğim bir turizm şirketi"
heads = [3, 3, 3, 3]
deps = ["acl", "det", "nmod", "ROOT"]
pos = ["VERB", "DET", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "bildiğim bir turizm şirketi "
def test_tr_noun_chunks_np_recursive_nsubj_to_root(tr_tokenizer):
text = "Simge'nin okuduğu kitap"
heads = [1, 2, 2]
deps = ["nsubj", "acl", "ROOT"]
pos = ["PROPN", "VERB", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "Simge'nin okuduğu kitap "
def test_tr_noun_chunks_np_recursive_nsubj_attached_to_pron_root(tr_tokenizer):
text = "Simge'nin konuşabileceği birisi"
heads = [1, 2, 2]
deps = ["nsubj", "acl", "ROOT"]
pos = ["PROPN", "VERB", "PRON"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "Simge'nin konuşabileceği birisi "
def test_tr_noun_chunks_np_recursive_nsubj_in_subnp(tr_tokenizer):
text = "Simge'nin yarın gideceği yer"
heads = [2, 2, 3, 3]
deps = ["nsubj", "obl", "acl", "ROOT"]
pos = ["PROPN", "NOUN", "VERB", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "Simge'nin yarın gideceği yer "
def test_tr_noun_chunks_np_recursive_two_nmods(tr_tokenizer):
text = "ustanın kapısını degiştireceği çamasır makinası"
heads = [2, 2, 4, 4, 4]
deps = ["nsubj", "obj", "acl", "nmod", "ROOT"]
pos = ["NOUN", "NOUN", "VERB", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "ustanın kapısını degiştireceği çamasır makinası "
def test_tr_noun_chunks_np_recursive_four_nouns(tr_tokenizer):
text = "kızına piyano dersi verdiğim hanım"
heads = [3, 2, 3, 4, 4]
deps = ["obl", "nmod", "obj", "acl", "ROOT"]
pos = ["NOUN", "NOUN", "NOUN", "VERB", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "kızına piyano dersi verdiğim hanım "
def test_tr_noun_chunks_np_recursive_no_nmod(tr_tokenizer):
text = "içine birkaç çiçek konmuş olan bir vazo"
heads = [3, 2, 3, 6, 3, 6, 6]
deps = ["obl", "det", "nsubj", "acl", "aux", "det", "ROOT"]
pos = ["ADP", "DET", "NOUN", "VERB", "AUX", "DET", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "içine birkaç çiçek konmuş olan bir vazo "
def test_tr_noun_chunks_np_recursive_long_two_acls(tr_tokenizer):
text = "içine Simge'nin bahçesinden toplanmış birkaç çiçeğin konmuş olduğu bir vazo"
heads = [6, 2, 3, 5, 5, 6, 9, 6, 9, 9]
deps = ["obl", "nmod", "obl", "acl", "det", "nsubj", "acl", "aux", "det", "ROOT"]
pos = ["ADP", "PROPN", "NOUN", "VERB", "DET", "NOUN", "VERB", "AUX", "DET", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert (
chunks[0].text_with_ws
== "içine Simge'nin bahçesinden toplanmış birkaç çiçeğin konmuş olduğu bir vazo "
)
def test_tr_noun_chunks_two_nouns_in_nmod(tr_tokenizer):
text = "kız ve erkek çocuklar"
heads = [3, 2, 0, 3]
deps = ["nmod", "cc", "conj", "ROOT"]
pos = ["NOUN", "CCONJ", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "kız ve erkek çocuklar "
def test_tr_noun_chunks_two_nouns_in_nmod2(tr_tokenizer):
text = "tatlı ve gürbüz çocuklar"
heads = [3, 2, 0, 3]
deps = ["amod", "cc", "conj", "ROOT"]
pos = ["ADJ", "CCONJ", "NOUN", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "tatlı ve gürbüz çocuklar "
def test_tr_noun_chunks_conj_simple(tr_tokenizer):
text = "Sen ya da ben"
heads = [0, 3, 1, 0]
deps = ["ROOT", "cc", "fixed", "conj"]
pos = ["PRON", "CCONJ", "CCONJ", "PRON"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 2
assert chunks[0].text_with_ws == "ben "
assert chunks[1].text_with_ws == "Sen "
def test_tr_noun_chunks_conj_three(tr_tokenizer):
text = "sen, ben ve ondan"
heads = [0, 2, 0, 4, 0]
deps = ["ROOT", "punct", "conj", "cc", "conj"]
pos = ["PRON", "PUNCT", "PRON", "CCONJ", "PRON"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 3
assert chunks[0].text_with_ws == "ondan "
assert chunks[1].text_with_ws == "ben "
assert chunks[2].text_with_ws == "sen "
def test_tr_noun_chunks_conj_three2(tr_tokenizer):
text = "ben ya da sen ya da onlar"
heads = [0, 3, 1, 0, 6, 4, 3]
deps = ["ROOT", "cc", "fixed", "conj", "cc", "fixed", "conj"]
pos = ["PRON", "CCONJ", "CCONJ", "PRON", "CCONJ", "CCONJ", "PRON"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 3
assert chunks[0].text_with_ws == "onlar "
assert chunks[1].text_with_ws == "sen "
assert chunks[2].text_with_ws == "ben "
def test_tr_noun_chunks_conj_and_adj_phrase(tr_tokenizer):
text = "ben ve akıllı çocuk"
heads = [0, 3, 3, 0]
deps = ["ROOT", "cc", "amod", "conj"]
pos = ["PRON", "CCONJ", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 2
assert chunks[0].text_with_ws == "akıllı çocuk "
assert chunks[1].text_with_ws == "ben "
def test_tr_noun_chunks_conj_fixed_adj_phrase(tr_tokenizer):
text = "ben ya da akıllı çocuk"
heads = [0, 4, 1, 4, 0]
deps = ["ROOT", "cc", "fixed", "amod", "conj"]
pos = ["PRON", "CCONJ", "CCONJ", "ADJ", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 2
assert chunks[0].text_with_ws == "akıllı çocuk "
assert chunks[1].text_with_ws == "ben "
def test_tr_noun_chunks_conj_subject(tr_tokenizer):
text = "Sen ve ben iyi anlaşıyoruz"
heads = [4, 2, 0, 2, 4]
deps = ["nsubj", "cc", "conj", "adv", "ROOT"]
pos = ["PRON", "CCONJ", "PRON", "ADV", "VERB"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 2
assert chunks[0].text_with_ws == "ben "
assert chunks[1].text_with_ws == "Sen "
def test_tr_noun_chunks_conj_noun_head_verb(tr_tokenizer):
text = "Simge babasını görmüyormuş, annesini değil"
heads = [2, 2, 2, 4, 2, 4]
deps = ["nsubj", "obj", "ROOT", "punct", "conj", "aux"]
pos = ["PROPN", "NOUN", "VERB", "PUNCT", "NOUN", "AUX"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 3
assert chunks[0].text_with_ws == "annesini "
assert chunks[1].text_with_ws == "babasını "
assert chunks[2].text_with_ws == "Simge "
def test_tr_noun_chunks_flat_simple(tr_tokenizer):
text = "New York"
heads = [0, 0]
deps = ["ROOT", "flat"]
pos = ["PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "New York "
def test_tr_noun_chunks_flat_names_and_title(tr_tokenizer):
text = "<NAME>"
heads = [1, 1, 1]
deps = ["nmod", "ROOT", "flat"]
pos = ["PROPN", "PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "<NAME> "
def test_tr_noun_chunks_flat_names_and_title2(tr_tokenizer):
text = "<NAME>"
heads = [2, 0, 2]
deps = ["nmod", "flat", "ROOT"]
pos = ["PROPN", "PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "<NAME> "
def test_tr_noun_chunks_flat_name_lastname_and_title(tr_tokenizer):
text = "<NAME>"
heads = [1, 1, 1, 1]
deps = ["nmod", "ROOT", "flat", "flat"]
pos = ["NOUN", "PROPN", "PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "<NAME> "
def test_tr_noun_chunks_flat_in_nmod(tr_tokenizer):
text = "<NAME> adında bir ögrenci"
heads = [2, 0, 4, 4, 4]
deps = ["nmod", "flat", "nmod", "det", "ROOT"]
pos = ["PROPN", "PROPN", "NOUN", "DET", "NOUN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "Ahmet Sezer adında bir ögrenci "
def test_tr_noun_chunks_flat_and_chain_nmod(tr_tokenizer):
text = "Batı Afrika ülkelerinden Sierra Leone"
heads = [1, 2, 3, 3, 3]
deps = ["nmod", "nmod", "nmod", "ROOT", "flat"]
pos = ["NOUN", "PROPN", "NOUN", "PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 1
assert chunks[0].text_with_ws == "Batı Afrika ülkelerinden Sierra Leone "
def test_tr_noun_chunks_two_flats_conjed(tr_tokenizer):
text = "New York ve Sierra Leone"
heads = [0, 0, 3, 0, 3]
deps = ["ROOT", "flat", "cc", "conj", "flat"]
pos = ["PROPN", "PROPN", "CCONJ", "PROPN", "PROPN"]
tokens = tr_tokenizer(text)
doc = Doc(
tokens.vocab, words=[t.text for t in tokens], pos=pos, heads=heads, deps=deps
)
chunks = list(doc.noun_chunks)
assert len(chunks) == 2
assert chunks[0].text_with_ws == "Sierra Leone "
assert chunks[1].text_with_ws == "New York "
| 9,190 |
3,252 | <filename>tests/utils/test_text.py<gh_stars>1000+
# Copyright (c) Facebook, Inc. and its affiliates.
import os
import unittest
import mmf.utils.text as text_utils
import numpy as np
import torch
from mmf.common.registry import registry
from mmf.common.sample import Sample, SampleList
from mmf.utils.configuration import Configuration
from mmf.utils.env import setup_imports
from mmf.utils.general import get_mmf_root
from tests.test_utils import dummy_args
from tests.utils.test_model import TestDecoderModel
class TestUtilsText(unittest.TestCase):
TOKENS = ["this", "will", "be", "a", "test", "of", "tokens"]
TOKENIZE_EXAMPLE = "This will be a test of tokens?"
VOCAB_EXAMPLE_SENTENCES = [
"Are there more big green things than large purple shiny cubes?"
"How many other things are there of the same shape as the tiny "
+ "cyan matte object?",
"Is the color of the large sphere the same as the large matte cube?"
"What material is the big object that is right of the brown cylinder and "
"left of the large brown sphere?",
"How big is the brown shiny sphere? ;",
]
def setUp(self):
setup_imports()
torch.manual_seed(1234)
config_path = os.path.join(
get_mmf_root(),
"..",
"projects",
"butd",
"configs",
"coco",
"nucleus_sampling.yaml",
)
config_path = os.path.abspath(config_path)
args = dummy_args(model="butd", dataset="coco")
args.opts.append(f"config={config_path}")
configuration = Configuration(args)
configuration.config.datasets = "coco"
configuration.config.model_config.butd.inference.params.sum_threshold = 0.5
configuration.freeze()
self.config = configuration.config
registry.register("config", self.config)
def test_tokenize(self):
tokens = text_utils.tokenize(self.TOKENIZE_EXAMPLE)
self.assertEqual(list(tokens), self.TOKENS)
def test_generate_ngrams(self):
ngrams = text_utils.generate_ngrams(self.TOKENS, 2)
self.assertEqual(
list(ngrams),
["this will", "will be", "be a", "a test", "test of", "of tokens"],
)
ngrams = text_utils.generate_ngrams(self.TOKENS, 3)
self.assertEqual(
list(ngrams),
["this will be", "will be a", "be a test", "a test of", "test of tokens"],
)
def test_generate_ngrams_range(self):
# Test generation of 1grams to 3gram
ngrams = text_utils.generate_ngrams_range(self.TOKENS, (1, 4))
expected_ngrams = self.TOKENS + [
"this will",
"will be",
"be a",
"a test",
"test of",
"of tokens",
"this will be",
"will be a",
"be a test",
"a test of",
"test of tokens",
]
self.assertEqual(list(ngrams), expected_ngrams)
def test_vocab_from_text(self):
vocab = text_utils.VocabFromText(self.VOCAB_EXAMPLE_SENTENCES)
self.assertEqual(vocab.get_size(), 41)
self.assertEqual(len(vocab), 41)
self.assertEqual(vocab.get_unk_index(), 1)
self.assertEqual(vocab.itos[0], vocab.DEFAULT_TOKENS[0])
self.assertEqual(vocab.itos[34], "that")
self.assertEqual(vocab.itos[31], "cube")
self.assertEqual(vocab.itos[25], "cyan")
self.assertEqual(vocab.itos[20], "the")
self.assertEqual(vocab.itos[10], "than")
self.assertEqual(vocab.stoi["sphere"], 30)
self.assertEqual(vocab.stoi["shape"], 22)
vocab = text_utils.VocabFromText(self.VOCAB_EXAMPLE_SENTENCES, min_count=10)
self.assertEqual(vocab.get_size(), 5)
self.assertEqual(vocab.itos[vocab.get_size() - 1], "the")
vocab = text_utils.VocabFromText(self.VOCAB_EXAMPLE_SENTENCES, min_count=11)
self.assertEqual(vocab.get_size(), 4)
vocab = text_utils.VocabFromText(
self.VOCAB_EXAMPLE_SENTENCES, min_count=11, only_unk_extra=True
)
self.assertEqual(vocab.get_size(), 1)
self.assertEqual(vocab.itos[vocab.get_size() - 1], "<unk>")
vocab = text_utils.VocabFromText(
self.VOCAB_EXAMPLE_SENTENCES, min_count=1, remove=[";"]
)
self.assertEqual(vocab.get_size(), 40)
vocab = text_utils.VocabFromText(
self.VOCAB_EXAMPLE_SENTENCES, min_count=1, remove=[";", ",", "?"]
)
self.assertEqual(vocab.get_size(), 38)
vocab = text_utils.VocabFromText(
self.VOCAB_EXAMPLE_SENTENCES, min_count=1, keep=["?"], remove=";"
)
self.assertEqual(vocab.get_size(), 40)
def test_nucleus_sampling(self):
vocab = text_utils.VocabFromText(self.VOCAB_EXAMPLE_SENTENCES)
model_config = self.config.model_config.butd
model = TestDecoderModel(model_config, vocab)
model.build()
model.eval()
sample = Sample()
sample.dataset_name = "coco"
sample.dataset_type = "test"
sample.image_feature_0 = torch.randn(100, 2048)
sample.answers = torch.zeros((5, 10), dtype=torch.long)
sample_list = SampleList([sample])
tokens = model(sample_list)["captions"]
# these are expected tokens for sum_threshold = 0.5
expected_tokens = [1.0, 23.0, 38.0, 30.0, 5.0, 11.0, 2.0]
self.assertEqual(tokens[0].tolist(), expected_tokens)
class TestUtilsTextBeamSearch(unittest.TestCase):
TOKENS = ["this", "<PASSWORD>", "be", "a", "test", "of", "tokens"]
TOKENIZE_EXAMPLE = "This will be a test of tokens?"
VOCAB_EXAMPLE_SENTENCES = [
"Are there more big green things than large purple shiny cubes?"
"How many other things are there of the same shape as the tiny "
+ "cyan matte object?",
"Is the color of the large sphere the same as the large matte cube?"
"What material is the big object that is right of the brown cylinder and "
"left of the large brown sphere?",
"How big is the brown shiny sphere? ;",
]
def setUp(self):
setup_imports()
torch.manual_seed(1234)
config_path = os.path.join(
get_mmf_root(),
"..",
"projects",
"butd",
"configs",
"coco",
"beam_search.yaml",
)
config_path = os.path.abspath(config_path)
args = dummy_args(model="butd", dataset="coco")
args.opts.append(f"config={config_path}")
configuration = Configuration(args)
configuration.config.datasets = "coco"
configuration.freeze()
self.config = configuration.config
registry.register("config", self.config)
def test_beam_search(self):
vocab = text_utils.VocabFromText(self.VOCAB_EXAMPLE_SENTENCES)
model_config = self.config.model_config.butd
model = TestDecoderModel(model_config, vocab)
model.build()
model.eval()
expected_tokens = {
1: [1.0, 23.0, 1.0, 24.0, 29.0, 37.0, 40.0, 17.0, 29.0, 2.0],
2: [1.0, 0.0, 8.0, 1.0, 28.0, 25.0, 2.0],
8: [1.0, 34.0, 1.0, 13.0, 1.0, 2.0],
16: [1.0, 25.0, 18.0, 2.0],
}
for batch_size in [1, 2, 8, 16]:
samples = []
for _ in range(batch_size):
sample = Sample()
sample.dataset_name = "coco"
sample.dataset_type = "test"
sample.image_feature_0 = torch.randn(100, 2048)
sample.answers = torch.zeros((5, 10), dtype=torch.long)
samples.append(sample)
sample_list = SampleList(samples)
tokens = model(sample_list)["captions"]
self.assertEqual(
np.trim_zeros(tokens[0].tolist()), expected_tokens[batch_size]
)
| 3,845 |
938 | {
"replace": false,
"values": [
"#tconstruct:slimy_nylium/ichor",
"#tconstruct:slimy_nylium/blood"
]
} | 56 |
28,899 | <gh_stars>1000+
"""Tests for Table Schema integration."""
from collections import OrderedDict
import json
import sys
import numpy as np
import pytest
from pandas.core.dtypes.dtypes import (
CategoricalDtype,
DatetimeTZDtype,
PeriodDtype,
)
import pandas as pd
from pandas import DataFrame
import pandas._testing as tm
from pandas.io.json._table_schema import (
as_json_table_type,
build_table_schema,
convert_json_field_to_pandas_type,
convert_pandas_type_to_json_field,
set_default_names,
)
class TestBuildSchema:
def setup_method(self, method):
self.df = DataFrame(
{
"A": [1, 2, 3, 4],
"B": ["a", "b", "c", "c"],
"C": pd.date_range("2016-01-01", freq="d", periods=4),
"D": pd.timedelta_range("1H", periods=4, freq="T"),
},
index=pd.Index(range(4), name="idx"),
)
def test_build_table_schema(self):
result = build_table_schema(self.df, version=False)
expected = {
"fields": [
{"name": "idx", "type": "integer"},
{"name": "A", "type": "integer"},
{"name": "B", "type": "string"},
{"name": "C", "type": "datetime"},
{"name": "D", "type": "duration"},
],
"primaryKey": ["idx"],
}
assert result == expected
result = build_table_schema(self.df)
assert "pandas_version" in result
def test_series(self):
s = pd.Series([1, 2, 3], name="foo")
result = build_table_schema(s, version=False)
expected = {
"fields": [
{"name": "index", "type": "integer"},
{"name": "foo", "type": "integer"},
],
"primaryKey": ["index"],
}
assert result == expected
result = build_table_schema(s)
assert "pandas_version" in result
def test_series_unnamed(self):
result = build_table_schema(pd.Series([1, 2, 3]), version=False)
expected = {
"fields": [
{"name": "index", "type": "integer"},
{"name": "values", "type": "integer"},
],
"primaryKey": ["index"],
}
assert result == expected
def test_multiindex(self):
df = self.df.copy()
idx = pd.MultiIndex.from_product([("a", "b"), (1, 2)])
df.index = idx
result = build_table_schema(df, version=False)
expected = {
"fields": [
{"name": "level_0", "type": "string"},
{"name": "level_1", "type": "integer"},
{"name": "A", "type": "integer"},
{"name": "B", "type": "string"},
{"name": "C", "type": "datetime"},
{"name": "D", "type": "duration"},
],
"primaryKey": ["level_0", "level_1"],
}
assert result == expected
df.index.names = ["idx0", None]
expected["fields"][0]["name"] = "idx0"
expected["primaryKey"] = ["idx0", "level_1"]
result = build_table_schema(df, version=False)
assert result == expected
class TestTableSchemaType:
@pytest.mark.parametrize("int_type", [int, np.int16, np.int32, np.int64])
def test_as_json_table_type_int_data(self, int_type):
int_data = [1, 2, 3]
assert as_json_table_type(np.array(int_data, dtype=int_type).dtype) == "integer"
@pytest.mark.parametrize("float_type", [float, np.float16, np.float32, np.float64])
def test_as_json_table_type_float_data(self, float_type):
float_data = [1.0, 2.0, 3.0]
assert (
as_json_table_type(np.array(float_data, dtype=float_type).dtype) == "number"
)
@pytest.mark.parametrize("bool_type", [bool, np.bool_])
def test_as_json_table_type_bool_data(self, bool_type):
bool_data = [True, False]
assert (
as_json_table_type(np.array(bool_data, dtype=bool_type).dtype) == "boolean"
)
@pytest.mark.parametrize(
"date_data",
[
pd.to_datetime(["2016"]),
pd.to_datetime(["2016"], utc=True),
pd.Series(pd.to_datetime(["2016"])),
pd.Series(pd.to_datetime(["2016"], utc=True)),
pd.period_range("2016", freq="A", periods=3),
],
)
def test_as_json_table_type_date_data(self, date_data):
assert as_json_table_type(date_data.dtype) == "datetime"
@pytest.mark.parametrize("str_data", [pd.Series(["a", "b"]), pd.Index(["a", "b"])])
def test_as_json_table_type_string_data(self, str_data):
assert as_json_table_type(str_data.dtype) == "string"
@pytest.mark.parametrize(
"cat_data",
[
pd.Categorical(["a"]),
pd.Categorical([1]),
pd.Series(pd.Categorical([1])),
pd.CategoricalIndex([1]),
pd.Categorical([1]),
],
)
def test_as_json_table_type_categorical_data(self, cat_data):
assert as_json_table_type(cat_data.dtype) == "any"
# ------
# dtypes
# ------
@pytest.mark.parametrize("int_dtype", [int, np.int16, np.int32, np.int64])
def test_as_json_table_type_int_dtypes(self, int_dtype):
assert as_json_table_type(int_dtype) == "integer"
@pytest.mark.parametrize("float_dtype", [float, np.float16, np.float32, np.float64])
def test_as_json_table_type_float_dtypes(self, float_dtype):
assert as_json_table_type(float_dtype) == "number"
@pytest.mark.parametrize("bool_dtype", [bool, np.bool_])
def test_as_json_table_type_bool_dtypes(self, bool_dtype):
assert as_json_table_type(bool_dtype) == "boolean"
@pytest.mark.parametrize(
"date_dtype",
[
np.datetime64,
np.dtype("<M8[ns]"),
PeriodDtype("D"),
DatetimeTZDtype("ns", "US/Central"),
],
)
def test_as_json_table_type_date_dtypes(self, date_dtype):
# TODO: datedate.date? datetime.time?
assert as_json_table_type(date_dtype) == "datetime"
@pytest.mark.parametrize("td_dtype", [np.timedelta64, np.dtype("<m8[ns]")])
def test_as_json_table_type_timedelta_dtypes(self, td_dtype):
assert as_json_table_type(td_dtype) == "duration"
@pytest.mark.parametrize("str_dtype", [object]) # TODO
def test_as_json_table_type_string_dtypes(self, str_dtype):
assert as_json_table_type(str_dtype) == "string"
def test_as_json_table_type_categorical_dtypes(self):
# TODO: I think before is_categorical_dtype(Categorical)
# returned True, but now it's False. Figure out why or
# if it matters
assert as_json_table_type(pd.Categorical(["a"]).dtype) == "any"
assert as_json_table_type(CategoricalDtype()) == "any"
class TestTableOrient:
def setup_method(self, method):
self.df = DataFrame(
{
"A": [1, 2, 3, 4],
"B": ["a", "b", "c", "c"],
"C": pd.date_range("2016-01-01", freq="d", periods=4),
"D": pd.timedelta_range("1H", periods=4, freq="T"),
"E": pd.Series(pd.Categorical(["a", "b", "c", "c"])),
"F": pd.Series(pd.Categorical(["a", "b", "c", "c"], ordered=True)),
"G": [1.0, 2.0, 3, 4.0],
"H": pd.date_range("2016-01-01", freq="d", periods=4, tz="US/Central"),
},
index=pd.Index(range(4), name="idx"),
)
def test_build_series(self):
s = pd.Series([1, 2], name="a")
s.index.name = "id"
result = s.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
assert "pandas_version" in result["schema"]
result["schema"].pop("pandas_version")
fields = [{"name": "id", "type": "integer"}, {"name": "a", "type": "integer"}]
schema = {"fields": fields, "primaryKey": ["id"]}
expected = OrderedDict(
[
("schema", schema),
(
"data",
[
OrderedDict([("id", 0), ("a", 1)]),
OrderedDict([("id", 1), ("a", 2)]),
],
),
]
)
assert result == expected
def test_read_json_from_to_json_results(self):
# GH32383
df = DataFrame(
{
"_id": {"row_0": 0},
"category": {"row_0": "Goods"},
"recommender_id": {"row_0": 3},
"recommender_name_jp": {"row_0": "浦田"},
"recommender_name_en": {"row_0": "Urata"},
"name_jp": {"row_0": "博多人形(松尾吉将まつお よしまさ)"},
"name_en": {"row_0": "<NAME>"},
}
)
result1 = pd.read_json(df.to_json())
result2 = DataFrame.from_dict(json.loads(df.to_json()))
tm.assert_frame_equal(result1, df)
tm.assert_frame_equal(result2, df)
@pytest.mark.filterwarnings(
"ignore:an integer is required (got type float)*:DeprecationWarning"
)
def test_to_json(self):
df = self.df.copy()
df.index.name = "idx"
result = df.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
assert "pandas_version" in result["schema"]
result["schema"].pop("pandas_version")
fields = [
{"name": "idx", "type": "integer"},
{"name": "A", "type": "integer"},
{"name": "B", "type": "string"},
{"name": "C", "type": "datetime"},
{"name": "D", "type": "duration"},
{
"constraints": {"enum": ["a", "b", "c"]},
"name": "E",
"ordered": False,
"type": "any",
},
{
"constraints": {"enum": ["a", "b", "c"]},
"name": "F",
"ordered": True,
"type": "any",
},
{"name": "G", "type": "number"},
{"name": "H", "type": "datetime", "tz": "US/Central"},
]
schema = {"fields": fields, "primaryKey": ["idx"]}
data = [
OrderedDict(
[
("idx", 0),
("A", 1),
("B", "a"),
("C", "2016-01-01T00:00:00.000Z"),
("D", "P0DT1H0M0S"),
("E", "a"),
("F", "a"),
("G", 1.0),
("H", "2016-01-01T06:00:00.000Z"),
]
),
OrderedDict(
[
("idx", 1),
("A", 2),
("B", "b"),
("C", "2016-01-02T00:00:00.000Z"),
("D", "P0DT1H1M0S"),
("E", "b"),
("F", "b"),
("G", 2.0),
("H", "2016-01-02T06:00:00.000Z"),
]
),
OrderedDict(
[
("idx", 2),
("A", 3),
("B", "c"),
("C", "2016-01-03T00:00:00.000Z"),
("D", "P0DT1H2M0S"),
("E", "c"),
("F", "c"),
("G", 3.0),
("H", "2016-01-03T06:00:00.000Z"),
]
),
OrderedDict(
[
("idx", 3),
("A", 4),
("B", "c"),
("C", "2016-01-04T00:00:00.000Z"),
("D", "P0DT1H3M0S"),
("E", "c"),
("F", "c"),
("G", 4.0),
("H", "2016-01-04T06:00:00.000Z"),
]
),
]
expected = OrderedDict([("schema", schema), ("data", data)])
assert result == expected
def test_to_json_float_index(self):
data = pd.Series(1, index=[1.0, 2.0])
result = data.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
result["schema"].pop("pandas_version")
expected = OrderedDict(
[
(
"schema",
{
"fields": [
{"name": "index", "type": "number"},
{"name": "values", "type": "integer"},
],
"primaryKey": ["index"],
},
),
(
"data",
[
OrderedDict([("index", 1.0), ("values", 1)]),
OrderedDict([("index", 2.0), ("values", 1)]),
],
),
]
)
assert result == expected
def test_to_json_period_index(self):
idx = pd.period_range("2016", freq="Q-JAN", periods=2)
data = pd.Series(1, idx)
result = data.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
result["schema"].pop("pandas_version")
fields = [
{"freq": "Q-JAN", "name": "index", "type": "datetime"},
{"name": "values", "type": "integer"},
]
schema = {"fields": fields, "primaryKey": ["index"]}
data = [
OrderedDict([("index", "2015-11-01T00:00:00.000Z"), ("values", 1)]),
OrderedDict([("index", "2016-02-01T00:00:00.000Z"), ("values", 1)]),
]
expected = OrderedDict([("schema", schema), ("data", data)])
assert result == expected
def test_to_json_categorical_index(self):
data = pd.Series(1, pd.CategoricalIndex(["a", "b"]))
result = data.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
result["schema"].pop("pandas_version")
expected = OrderedDict(
[
(
"schema",
{
"fields": [
{
"name": "index",
"type": "any",
"constraints": {"enum": ["a", "b"]},
"ordered": False,
},
{"name": "values", "type": "integer"},
],
"primaryKey": ["index"],
},
),
(
"data",
[
OrderedDict([("index", "a"), ("values", 1)]),
OrderedDict([("index", "b"), ("values", 1)]),
],
),
]
)
assert result == expected
@pytest.mark.filterwarnings(
"ignore:an integer is required (got type float)*:DeprecationWarning"
)
def test_date_format_raises(self):
msg = (
"Trying to write with `orient='table'` and `date_format='epoch'`. Table "
"Schema requires dates to be formatted with `date_format='iso'`"
)
with pytest.raises(ValueError, match=msg):
self.df.to_json(orient="table", date_format="epoch")
# others work
self.df.to_json(orient="table", date_format="iso")
self.df.to_json(orient="table")
def test_convert_pandas_type_to_json_field_int(self, index_or_series):
kind = index_or_series
data = [1, 2, 3]
result = convert_pandas_type_to_json_field(kind(data, name="name"))
expected = {"name": "name", "type": "integer"}
assert result == expected
def test_convert_pandas_type_to_json_field_float(self, index_or_series):
kind = index_or_series
data = [1.0, 2.0, 3.0]
result = convert_pandas_type_to_json_field(kind(data, name="name"))
expected = {"name": "name", "type": "number"}
assert result == expected
@pytest.mark.parametrize(
"dt_args,extra_exp", [({}, {}), ({"utc": True}, {"tz": "UTC"})]
)
@pytest.mark.parametrize("wrapper", [None, pd.Series])
def test_convert_pandas_type_to_json_field_datetime(
self, dt_args, extra_exp, wrapper
):
data = [1.0, 2.0, 3.0]
data = pd.to_datetime(data, **dt_args)
if wrapper is pd.Series:
data = pd.Series(data, name="values")
result = convert_pandas_type_to_json_field(data)
expected = {"name": "values", "type": "datetime"}
expected.update(extra_exp)
assert result == expected
def test_convert_pandas_type_to_json_period_range(self):
arr = pd.period_range("2016", freq="A-DEC", periods=4)
result = convert_pandas_type_to_json_field(arr)
expected = {"name": "values", "type": "datetime", "freq": "A-DEC"}
assert result == expected
@pytest.mark.parametrize("kind", [pd.Categorical, pd.CategoricalIndex])
@pytest.mark.parametrize("ordered", [True, False])
def test_convert_pandas_type_to_json_field_categorical(self, kind, ordered):
data = ["a", "b", "c"]
if kind is pd.Categorical:
arr = pd.Series(kind(data, ordered=ordered), name="cats")
elif kind is pd.CategoricalIndex:
arr = kind(data, ordered=ordered, name="cats")
result = convert_pandas_type_to_json_field(arr)
expected = {
"name": "cats",
"type": "any",
"constraints": {"enum": data},
"ordered": ordered,
}
assert result == expected
@pytest.mark.parametrize(
"inp,exp",
[
({"type": "integer"}, "int64"),
({"type": "number"}, "float64"),
({"type": "boolean"}, "bool"),
({"type": "duration"}, "timedelta64"),
({"type": "datetime"}, "datetime64[ns]"),
({"type": "datetime", "tz": "US/Hawaii"}, "datetime64[ns, US/Hawaii]"),
({"type": "any"}, "object"),
(
{
"type": "any",
"constraints": {"enum": ["a", "b", "c"]},
"ordered": False,
},
CategoricalDtype(categories=["a", "b", "c"], ordered=False),
),
(
{
"type": "any",
"constraints": {"enum": ["a", "b", "c"]},
"ordered": True,
},
CategoricalDtype(categories=["a", "b", "c"], ordered=True),
),
({"type": "string"}, "object"),
],
)
def test_convert_json_field_to_pandas_type(self, inp, exp):
field = {"name": "foo"}
field.update(inp)
assert convert_json_field_to_pandas_type(field) == exp
@pytest.mark.parametrize("inp", ["geopoint", "geojson", "fake_type"])
def test_convert_json_field_to_pandas_type_raises(self, inp):
field = {"type": inp}
with pytest.raises(
ValueError, match=f"Unsupported or invalid field type: {inp}"
):
convert_json_field_to_pandas_type(field)
def test_categorical(self):
s = pd.Series(pd.Categorical(["a", "b", "a"]))
s.index.name = "idx"
result = s.to_json(orient="table", date_format="iso")
result = json.loads(result, object_pairs_hook=OrderedDict)
result["schema"].pop("pandas_version")
fields = [
{"name": "idx", "type": "integer"},
{
"constraints": {"enum": ["a", "b"]},
"name": "values",
"ordered": False,
"type": "any",
},
]
expected = OrderedDict(
[
("schema", {"fields": fields, "primaryKey": ["idx"]}),
(
"data",
[
OrderedDict([("idx", 0), ("values", "a")]),
OrderedDict([("idx", 1), ("values", "b")]),
OrderedDict([("idx", 2), ("values", "a")]),
],
),
]
)
assert result == expected
@pytest.mark.parametrize(
"idx,nm,prop",
[
(pd.Index([1]), "index", "name"),
(pd.Index([1], name="myname"), "myname", "name"),
(
pd.MultiIndex.from_product([("a", "b"), ("c", "d")]),
["level_0", "level_1"],
"names",
),
(
pd.MultiIndex.from_product(
[("a", "b"), ("c", "d")], names=["n1", "n2"]
),
["n1", "n2"],
"names",
),
(
pd.MultiIndex.from_product(
[("a", "b"), ("c", "d")], names=["n1", None]
),
["n1", "level_1"],
"names",
),
],
)
def test_set_names_unset(self, idx, nm, prop):
data = pd.Series(1, idx)
result = set_default_names(data)
assert getattr(result.index, prop) == nm
@pytest.mark.parametrize(
"idx",
[
pd.Index([], name="index"),
pd.MultiIndex.from_arrays([["foo"], ["bar"]], names=("level_0", "level_1")),
pd.MultiIndex.from_arrays([["foo"], ["bar"]], names=("foo", "level_1")),
],
)
def test_warns_non_roundtrippable_names(self, idx):
# GH 19130
df = DataFrame(index=idx)
df.index.name = "index"
with tm.assert_produces_warning():
set_default_names(df)
def test_timestamp_in_columns(self):
df = DataFrame(
[[1, 2]], columns=[pd.Timestamp("2016"), pd.Timedelta(10, unit="s")]
)
result = df.to_json(orient="table")
js = json.loads(result)
assert js["schema"]["fields"][1]["name"] == "2016-01-01T00:00:00.000Z"
assert js["schema"]["fields"][2]["name"] == "P0DT0H0M10S"
@pytest.mark.parametrize(
"case",
[
pd.Series([1], index=pd.Index([1], name="a"), name="a"),
DataFrame({"A": [1]}, index=pd.Index([1], name="A")),
DataFrame(
{"A": [1]},
index=pd.MultiIndex.from_arrays([["a"], [1]], names=["A", "a"]),
),
],
)
def test_overlapping_names(self, case):
with pytest.raises(ValueError, match="Overlapping"):
case.to_json(orient="table")
def test_mi_falsey_name(self):
# GH 16203
df = DataFrame(
np.random.randn(4, 4),
index=pd.MultiIndex.from_product([("A", "B"), ("a", "b")]),
)
result = [x["name"] for x in build_table_schema(df)["fields"]]
assert result == ["level_0", "level_1", 0, 1, 2, 3]
class TestTableOrientReader:
@pytest.mark.parametrize(
"index_nm",
[None, "idx", pytest.param("index", marks=pytest.mark.xfail), "level_0"],
)
@pytest.mark.parametrize(
"vals",
[
{"ints": [1, 2, 3, 4]},
{"objects": ["a", "b", "c", "d"]},
{"objects": ["1", "2", "3", "4"]},
{"date_ranges": pd.date_range("2016-01-01", freq="d", periods=4)},
{"categoricals": pd.Series(pd.Categorical(["a", "b", "c", "c"]))},
{
"ordered_cats": pd.Series(
pd.Categorical(["a", "b", "c", "c"], ordered=True)
)
},
{"floats": [1.0, 2.0, 3.0, 4.0]},
{"floats": [1.1, 2.2, 3.3, 4.4]},
{"bools": [True, False, False, True]},
{
"timezones": pd.date_range(
"2016-01-01", freq="d", periods=4, tz="US/Central"
) # added in # GH 35973
},
],
)
@pytest.mark.skipif(sys.version_info[:3] == (3, 7, 0), reason="GH-35309")
def test_read_json_table_orient(self, index_nm, vals, recwarn):
df = DataFrame(vals, index=pd.Index(range(4), name=index_nm))
out = df.to_json(orient="table")
result = pd.read_json(out, orient="table")
tm.assert_frame_equal(df, result)
@pytest.mark.parametrize("index_nm", [None, "idx", "index"])
@pytest.mark.parametrize(
"vals",
[{"timedeltas": pd.timedelta_range("1H", periods=4, freq="T")}],
)
def test_read_json_table_orient_raises(self, index_nm, vals, recwarn):
df = DataFrame(vals, index=pd.Index(range(4), name=index_nm))
out = df.to_json(orient="table")
with pytest.raises(NotImplementedError, match="can not yet read "):
pd.read_json(out, orient="table")
@pytest.mark.parametrize(
"idx",
[
pd.Index(range(4)),
pd.date_range(
"2020-08-30",
freq="d",
periods=4,
)._with_freq(None),
pd.date_range(
"2020-08-30", freq="d", periods=4, tz="US/Central"
)._with_freq(None),
pd.MultiIndex.from_product(
[
pd.date_range("2020-08-30", freq="d", periods=2, tz="US/Central"),
["x", "y"],
],
),
],
)
@pytest.mark.parametrize(
"vals",
[
{"floats": [1.1, 2.2, 3.3, 4.4]},
{"dates": pd.date_range("2020-08-30", freq="d", periods=4)},
{
"timezones": pd.date_range(
"2020-08-30", freq="d", periods=4, tz="Europe/London"
)
},
],
)
@pytest.mark.skipif(sys.version_info[:3] == (3, 7, 0), reason="GH-35309")
def test_read_json_table_timezones_orient(self, idx, vals, recwarn):
# GH 35973
df = DataFrame(vals, index=idx)
out = df.to_json(orient="table")
result = pd.read_json(out, orient="table")
tm.assert_frame_equal(df, result)
@pytest.mark.filterwarnings(
"ignore:an integer is required (got type float)*:DeprecationWarning"
)
def test_comprehensive(self):
df = DataFrame(
{
"A": [1, 2, 3, 4],
"B": ["a", "b", "c", "c"],
"C": pd.date_range("2016-01-01", freq="d", periods=4),
# 'D': pd.timedelta_range('1H', periods=4, freq='T'),
"E": pd.Series(pd.Categorical(["a", "b", "c", "c"])),
"F": pd.Series(pd.Categorical(["a", "b", "c", "c"], ordered=True)),
"G": [1.1, 2.2, 3.3, 4.4],
"H": pd.date_range("2016-01-01", freq="d", periods=4, tz="US/Central"),
"I": [True, False, False, True],
},
index=pd.Index(range(4), name="idx"),
)
out = df.to_json(orient="table")
result = pd.read_json(out, orient="table")
tm.assert_frame_equal(df, result)
@pytest.mark.parametrize(
"index_names",
[[None, None], ["foo", "bar"], ["foo", None], [None, "foo"], ["index", "foo"]],
)
def test_multiindex(self, index_names):
# GH 18912
df = DataFrame(
[["Arr", "alpha", [1, 2, 3, 4]], ["Bee", "Beta", [10, 20, 30, 40]]],
index=[["A", "B"], ["Null", "Eins"]],
columns=["Aussprache", "Griechisch", "Args"],
)
df.index.names = index_names
out = df.to_json(orient="table")
result = pd.read_json(out, orient="table")
tm.assert_frame_equal(df, result)
def test_empty_frame_roundtrip(self):
# GH 21287
df = DataFrame(columns=["a", "b", "c"])
expected = df.copy()
out = df.to_json(orient="table")
result = pd.read_json(out, orient="table")
tm.assert_frame_equal(expected, result)
| 15,680 |
432 | package us.parr.bookish.model;
import us.parr.bookish.entity.EntityDef;
import java.util.List;
public class Section extends ContainerWithTitle {
public Section(EntityDef def,
String title,
String anchor,
List<OutputModelObject> elements,
List<ContainerWithTitle> subsections)
{
super(def, title, anchor, elements);
this.subcontainers = subsections;
}
public void connectContainerTree(ContainerWithTitle parent, int n) {
this.parent = parent;
sectionNumber = n;
int i = 1;
if ( subcontainers!=null ) {
for (ContainerWithTitle subcontainer : subcontainers) {
subcontainer.connectContainerTree(this, i++);
}
}
}
public String getAnchor() {
if ( anchor!=null ) return anchor;
return "sec"+sectionNumber;
}
}
| 312 |
678 | <reponame>bzxy/cydia<gh_stars>100-1000
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/IMCore.framework/IMCore
*/
/* iOSOpenDev: commented-out (since file not found)
#import <Marco/_FZServicePropertyRequiredEmailDomain.h>
*/
__attribute__((visibility("hidden")))
/* iOSOpenDev: replaced with next line (since file not found)
@interface IMServiceAgentCenter : _FZServicePropertyRequiredEmailDomain {
*/
@interface IMServiceAgentCenter : NSObject {
id _delegate; // 56 = 0x38
}
@property(assign) id delegate; // G=0x2d00d; S=0x2cffd; converted property
- (void)removeObserver:(id)observer name:(id)name object:(id)object; // 0x2d09d
- (void)addObserver:(id)observer selector:(SEL)selector name:(id)name object:(id)object; // 0x2d01d
// converted property getter: - (id)delegate; // 0x2d00d
// converted property setter: - (void)setDelegate:(id)delegate; // 0x2cffd
- (void)finalize; // 0x2cfbd
- (void)dealloc; // 0x2cf7d
@end
| 362 |
348 | {"nom":"Dargnies","circ":"3ème circonscription","dpt":"Somme","inscrits":898,"abs":506,"votants":392,"blancs":51,"nuls":25,"exp":316,"res":[{"nuance":"LR","nom":"M. <NAME>","voix":224},{"nuance":"REM","nom":"M. <NAME>","voix":92}]} | 95 |
318 | package testng.groupThreadParallel;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.testng.Assert;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Tests grouping/threading/parallel functionality of TestNG.
*
* @author jkuhnert
*/
public class TestNGTest
{
private static volatile int m_testCount = 0;
/**
* Sets up testObject
*/
@BeforeClass( groups = "functional" )
public void configureTest()
{
testObject = new Object();
}
@AfterSuite( alwaysRun = true, groups = "functional" )
public void check_Test_Count()
{
System.out.println( "check_Test_Count(): " + m_testCount );
Assert.assertEquals( m_testCount, 3 );
}
Object testObject;
@Test( groups = { "functional", "notincluded" } )
public void test1()
throws InterruptedException
{
doTest( "test1" );
}
private void doTest( String test )
throws InterruptedException
{
incrementTestCount();
System.out.println( "running " + test );
Assert.assertNotNull( testObject, "testObject" );
waitForTestCountToBeThree();
}
private static synchronized void incrementTestCount()
{
m_testCount++;
}
@Test( groups = { "functional", "notincluded" } )
public void test2()
throws InterruptedException
{
doTest( "test2" );
}
@Test( groups = { "functional", "notincluded" } )
public void test3()
throws InterruptedException
{
doTest( "test3" );
}
private void waitForTestCountToBeThree()
throws InterruptedException
{
if ( m_testCount == 3 )
return;
long now = System.currentTimeMillis();
long timeout = 5 * 1000;
long finish = now + timeout;
while ( m_testCount < 3 && System.currentTimeMillis() < finish )
{
Thread.sleep( 10 );
}
Assert.assertTrue( m_testCount >= 3, "Expected TestCount >= 3, but was: " + m_testCount );
}
/**
* Sample method that shouldn't be run by test suite.
*/
@Test( groups = "notincluded" )
public void shouldNotRun()
{
Assert.fail( "Group specified by test shouldnt be run." );
}
} | 1,128 |
717 | # -*- coding: utf-8 -*-
import struct
from PyQt5.QtGui import QQuaternion, QVector3D
class VmdBoneFrame():
def __init__(self, frame=0):
self.name = ''
self.frame = frame
self.position = QVector3D(0, 0, 0)
self.rotation = QQuaternion()
def write(self, fout):
fout.write(self.name)
fout.write(bytearray([0 for i in range(len(self.name), 15)])) # ボーン名15Byteの残りを\0で埋める
fout.write(struct.pack('<L', self.frame))
fout.write(struct.pack('<f', self.position.x()))
fout.write(struct.pack('<f', self.position.y()))
fout.write(struct.pack('<f', self.position.z()))
v = self.rotation.toVector4D()
fout.write(struct.pack('<f', v.x()))
fout.write(struct.pack('<f', v.y()))
fout.write(struct.pack('<f', v.z()))
fout.write(struct.pack('<f', v.w()))
fout.write(bytearray([0 for i in range(0, 64)])) # 補間パラメータ(64Byte)
class VmdInfoIk():
def __init__(self, name='', onoff=0):
self.name = name
self.onoff = onoff
class VmdShowIkFrame():
def __init__(self):
self.frame = 0
self.show = 0
self.ik = []
def write(self, fout):
fout.write(struct.pack('<L', self.frame))
fout.write(struct.pack('b', self.show))
fout.write(struct.pack('<L', len(self.ik)))
for k in (self.ik):
fout.write(k.name)
fout.write(bytearray([0 for i in range(len(k.name), 20)])) # IKボーン名20Byteの残りを\0で埋める
fout.write(struct.pack('b', k.onoff))
class VmdWriter():
def __init__(self):
pass
def write_vmd_file(self, filename, bone_frames, showik_frames):
"""Write VMD data to a file"""
fout = open(filename, "wb")
# header
fout.write(b'Vocaloid Motion Data 0002\x00\x00\x00\x00\x00')
fout.write(b'Dummy Model Name ')
# bone frames
fout.write(struct.pack('<L', len(bone_frames))) # ボーンフレーム数
for bf in bone_frames:
bf.write(fout)
fout.write(struct.pack('<L', 0)) # 表情キーフレーム数
fout.write(struct.pack('<L', 0)) # カメラキーフレーム数
fout.write(struct.pack('<L', 0)) # 照明キーフレーム数
fout.write(struct.pack('<L', 0)) # セルフ影キーフレーム数
if showik_frames == None:
fout.write(struct.pack('<L', 0)) # モデル表示・IK on/offキーフレーム数
else:
fout.write(struct.pack('<L', len(showik_frames))) # モデル表示・IK on/offキーフレーム数
for sf in showik_frames:
sf.write(fout)
fout.close()
| 1,407 |
567 | # Generated by Django 2.0.3 on 2018-03-28 22:40
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
atomic = False
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("todo", "0005_auto_20180212_2325"),
]
operations = [migrations.RenameModel(old_name="Item", new_name="Task")]
| 150 |
3,508 | <reponame>Anshul1507/Leetcode
package com.fishercoder;
import com.fishercoder.solutions._165;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class _165Test {
private static _165.Solution1 solution1;
@BeforeClass
public static void setup() {
solution1 = new _165.Solution1();
}
@Test
public void test1() {
assertEquals(-1, solution1.compareVersion("1.1", "1.2"));
}
@Test
public void test2() {
assertEquals(1, solution1.compareVersion("1.0.1", "1"));
}
@Test
public void test3() {
assertEquals(-0, solution1.compareVersion("1.0", "1"));
}
}
| 250 |
354 | <filename>CalendarFXView/src/main/java/impl/com/calendarfx/view/MonthSheetViewSkin.java<gh_stars>100-1000
/*
* Copyright (C) 2017 <NAME> Software & Consulting (dlsc.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package impl.com.calendarfx.view;
import com.calendarfx.model.Calendar;
import com.calendarfx.model.CalendarEvent;
import com.calendarfx.model.CalendarSource;
import com.calendarfx.model.Entry;
import com.calendarfx.util.LoggingDomain;
import com.calendarfx.view.DateControl;
import com.calendarfx.view.DateSelectionModel;
import com.calendarfx.view.MonthSheetView;
import com.calendarfx.view.MonthSheetView.DateCell;
import com.calendarfx.view.MonthSheetView.WeekDayLayoutStrategy;
import javafx.beans.InvalidationListener;
import javafx.event.EventHandler;
import javafx.event.WeakEventHandler;
import javafx.geometry.Bounds;
import javafx.scene.Node;
import javafx.scene.control.Control;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Priority;
import javafx.scene.layout.Region;
import javafx.scene.layout.RowConstraints;
import javafx.util.Callback;
import java.time.DayOfWeek;
import java.time.LocalDate;
import java.time.YearMonth;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
public class MonthSheetViewSkin extends DateControlSkin<MonthSheetView> implements LoadDataSettingsProvider {
private final GridPane grid = new GridPane();
private final DataLoader dataLoader = new DataLoader(this);
private final Map<LocalDate, List<Entry<?>>> dataMap = new HashMap<>();
private final Map<LocalDate, DateCell> cellMap = new HashMap<>();
private final Map<Position, DateCell> positionToDateCellMap = new HashMap<>();
private final Map<LocalDate, Position> dateToPositionMap = new HashMap<>();
public MonthSheetViewSkin(MonthSheetView control) {
super(control);
grid.getStyleClass().add("container");
grid.setMaxSize(Double.MAX_VALUE, Double.MAX_VALUE);
updateRowConstraints();
control.weekDayLayoutProperty().addListener(it -> updateRowConstraints());
InvalidationListener builder = obs -> buildCells();
control.dateProperty().addListener(builder);
control.viewUnitProperty().addListener(builder);
control.extendedViewUnitProperty().addListener(builder);
control.extendedUnitsForwardProperty().addListener(builder);
control.extendedUnitsBackwardProperty().addListener(builder);
control.weekDayLayoutProperty().addListener(builder);
control.weekFieldsProperty().addListener(builder);
control.cellFactoryProperty().addListener(builder);
control.headerCellFactoryProperty().addListener(builder);
control.enableHyperlinksProperty().addListener(builder);
control.getCalendars().addListener((javafx.beans.Observable obs) -> updateEntries("list of calendars changed"));
control.clickBehaviourProperty().addListener(it -> control.getDateSelectionModel().clear());
InvalidationListener selectedUpdater = obs -> updateSelected();
control.getDateSelectionModel().getSelectedDates().addListener(selectedUpdater);
InvalidationListener todayUpdater = obs -> updateToday();
control.todayProperty().addListener(todayUpdater);
control.showTodayProperty().addListener(todayUpdater);
EventHandler<KeyEvent> keyPressedHandler = evt -> {
DateSelectionModel selectionModel = getSkinnable().getDateSelectionModel();
LocalDate lastSelected = selectionModel.getLastSelected();
if (lastSelected != null) {
Position lastPosition = dateToPositionMap.get(lastSelected);
LocalDate newSelection = null;
boolean isNavigationKey = true;
switch (evt.getCode()) {
case UP:
newSelection = lastSelected.plusDays(-1);
break;
case DOWN:
newSelection = lastSelected.plusDays(1);
break;
case LEFT:
Position newPosition = new Position(Math.max(0, lastPosition.getColumn() - 1), lastPosition.getRow());
DateCell newCell = positionToDateCellMap.get(newPosition);
if (newCell != null) {
newSelection = newCell.getDate();
}
break;
case RIGHT:
newPosition = new Position(lastPosition.getColumn() + 1, lastPosition.getRow());
newCell = positionToDateCellMap.get(newPosition);
if (newCell != null) {
newSelection = newCell.getDate();
}
break;
default:
isNavigationKey = false;
break;
}
if (getSkinnable().isVisibleDate(newSelection)) {
if (evt.isShiftDown()) {
selectionModel.selectUntil(newSelection);
} else if (evt.isShortcutDown()) {
selectionModel.select(newSelection);
} else {
selectionModel.clearAndSelect(newSelection);
}
}
if (isNavigationKey) {
evt.consume();
}
}
};
control.addEventFilter(KeyEvent.KEY_PRESSED, keyPressedHandler);
control.setFocusTraversable(true);
buildCells();
getChildren().add(grid);
updateEntries("initial load");
updateToday();
}
private void updateRowConstraints() {
int rowCount = 32; // header + 31 days
if (getSkinnable().getWeekDayLayout() == WeekDayLayoutStrategy.ALIGNED) {
rowCount += 6; // 6 = max number of empty slots / cells at the top
}
List<RowConstraints> rowConstraints = new ArrayList<>();
for (int i = 0; i <= rowCount; i++) {
RowConstraints con = new RowConstraints();
con.setFillHeight(true);
con.setPrefHeight(Region.USE_COMPUTED_SIZE);
con.setMinHeight(Region.USE_PREF_SIZE);
con.setMaxHeight(Double.MAX_VALUE);
con.setVgrow(i == 0 ? Priority.NEVER : Priority.ALWAYS);
rowConstraints.add(con);
}
grid.getRowConstraints().setAll(rowConstraints);
}
@Override
protected void calendarVisibilityChanged() {
updateEntries("calendar visibility changed");
}
private void buildCells() {
positionToDateCellMap.clear();
dateToPositionMap.clear();
cellMap.clear();
YearMonth start = getSkinnable().getExtendedStartMonth();
YearMonth end = getSkinnable().getExtendedEndMonth();
int colIndex = 0;
grid.getColumnConstraints().clear();
grid.getChildren().clear();
while (!start.isAfter(end)) {
ColumnConstraints columnConstraints = new ColumnConstraints();
columnConstraints.setFillWidth(true);
columnConstraints.setMinWidth(Region.USE_PREF_SIZE);
columnConstraints.setMaxWidth(Double.MAX_VALUE);
grid.getColumnConstraints().add(columnConstraints);
buildCells(start, colIndex);
start = start.plusMonths(1);
colIndex++;
}
grid.getColumnConstraints().forEach(con -> con.setPercentWidth(100d / (double) grid.getColumnConstraints().size()));
updateEntries("cells were rebuild");
updateToday();
updateSelected();
}
private void buildCells(YearMonth yearMonth, int colIndex) {
List<Node> cells = new ArrayList<>();
Node header = buildHeaderCell(yearMonth);
header.getStyleClass().add("month-header");
cells.add(header);
LocalDate start = yearMonth.atDay(1);
LocalDate end = yearMonth.atEndOfMonth();
if (getSkinnable().getWeekDayLayout() == WeekDayLayoutStrategy.ALIGNED) {
DayOfWeek firstDayOfWeek = getSkinnable().getFirstDayOfWeek();
DayOfWeek startDayOfWeek = start.getDayOfWeek();
int distanceDays = Math.abs(firstDayOfWeek.getValue() - startDayOfWeek.getValue());
while (distanceDays-- > 0) {
cells.add(buildCell(null));
}
}
while (start.isBefore(end) || start.isEqual(end)) {
cells.add(buildCell(start));
start = start.plusDays(1);
}
buildEmptyCellBottom(cells);
final YearMonth extendedStart = getSkinnable().getExtendedStartMonth();
final YearMonth extendedEnd = getSkinnable().getExtendedEndMonth();
cells.forEach(cell -> {
if (extendedStart.equals(yearMonth)) {
cell.getStyleClass().add("first-month");
} else if (extendedEnd.equals(yearMonth)) {
cell.getStyleClass().add("last-month");
} else {
cell.getStyleClass().add("middle-month");
}
});
for (int i = 0; i < cells.size(); i++) {
Node node = cells.get(i);
grid.add(node, colIndex, i + 1);
if (node instanceof DateCell) {
final Position position = new Position(colIndex, i);
final DateCell dateCell = (DateCell) node;
final LocalDate date = dateCell.getDate();
cellMap.put(date, dateCell);
positionToDateCellMap.put(position, dateCell);
dateToPositionMap.put(date, position);
}
}
}
private DateCell buildCell(LocalDate date) {
DateCell cell = getSkinnable().getCellFactory().call(new MonthSheetView.DateParameter(getSkinnable(), date));
cell.setMaxSize(Double.MAX_VALUE, Double.MAX_VALUE);
cell.setOnMouseClicked(weakCellClickedHandler);
return cell;
}
private Node buildHeaderCell(YearMonth yearMonth) {
return getSkinnable().getHeaderCellFactory().
call(new MonthSheetView.HeaderParameter(getSkinnable(), yearMonth));
}
private void buildEmptyCellBottom(List<Node> cells) {
int maximumCells = 31;
if (getSkinnable().getWeekDayLayout().equals(WeekDayLayoutStrategy.ALIGNED)) {
maximumCells = 37;
}
int cellsNumber = cells.size() - 1;
if (cellsNumber < maximumCells) {
while (cellsNumber < maximumCells) {
cells.add(buildCell(null));
cellsNumber++;
}
}
}
private void updateSelected() {
List<LocalDate> selectedDates = getSkinnable().getDateSelectionModel().getSelectedDates();
grid.getChildren().stream()
.filter(child -> child instanceof DateCell)
.map(child -> (DateCell) child)
.forEach(cell -> cell.setSelected(selectedDates.contains(cell.getDate())));
}
private void updateToday() {
LocalDate today = getSkinnable().getToday();
grid.getChildren().stream()
.filter(child -> child instanceof DateCell)
.map(child -> (DateCell) child)
.forEach(cell -> cell.setToday(getSkinnable().isShowToday() && today.equals(cell.getDate())));
}
private static final class Position {
int column;
int row;
private Position(int column, int row) {
this.column = column;
this.row = row;
}
public int getColumn() {
return column;
}
public int getRow() {
return row;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Position position = (Position) o;
if (column != position.column) {
return false;
}
return row == position.row;
}
@Override
public int hashCode() {
int result = column;
result = 31 * result + row;
return result;
}
}
private final EventHandler<MouseEvent> cellClickedHandler = evt -> {
if (!(evt.getSource() instanceof DateCell)) {
return;
}
DateCell cell = (DateCell) evt.getSource();
cell.requestFocus();
LocalDate date = cell.getDate();
if (date != null) {
switch (getSkinnable().getClickBehaviour()) {
case NONE:
break;
case PERFORM_SELECTION:
DateSelectionModel selectionModel = getSkinnable().getDateSelectionModel();
if (selectionModel.isSelected(date)) {
/*
* Only deselect if the user uses the left / the primary button.
* The right button for the context menu will not deselect the cell.
*/
if (evt.getButton() == MouseButton.PRIMARY) {
if (evt.isShortcutDown()) {
selectionModel.deselect(date);
} else {
selectionModel.clear();
}
}
} else {
if (evt.isShiftDown() && evt.getButton() == MouseButton.PRIMARY) {
selectionModel.selectUntil(date);
} else if (evt.isShortcutDown() && evt.getButton() == MouseButton.PRIMARY) {
selectionModel.select(date);
} else {
selectionModel.clearAndSelect(date);
}
}
break;
case SHOW_DETAILS:
showDateDetails(date);
break;
}
}
};
private void showDateDetails(LocalDate date) {
DateCell cell = cellMap.get(date);
Bounds bounds = cell.localToScreen(cell.getLayoutBounds());
Callback<DateControl.DateDetailsParameter, Boolean> callback = getSkinnable().getDateDetailsCallback();
DateControl.DateDetailsParameter param = new DateControl.DateDetailsParameter(null, getSkinnable(), cell, date, bounds.getMinX(), bounds.getMinY());
callback.call(param);
}
private final WeakEventHandler<MouseEvent> weakCellClickedHandler = new WeakEventHandler<>(cellClickedHandler);
@Override
protected void calendarChanged(Calendar calendar) {
updateEntries("calendar changed");
}
@Override
protected void entryCalendarChanged(CalendarEvent evt) {
updateEntries("entry calendar changed");
}
@Override
protected void entryIntervalChanged(CalendarEvent evt) {
updateEntries("entry interval changed");
}
@Override
protected void entryFullDayChanged(CalendarEvent evt) {
updateEntries("entry full day flag changed");
}
@Override
protected void entryRecurrenceRuleChanged(CalendarEvent evt) {
updateEntries("entry recurrence rule changed");
}
private void updateEntries(String reason) {
if (LoggingDomain.VIEW.isLoggable(Level.FINE)) {
LoggingDomain.VIEW.fine("updating entries because: " + reason);
}
dataMap.clear();
dataLoader.loadEntries(dataMap);
for (LocalDate date : cellMap.keySet()) {
List<Entry<?>> entries = dataMap.get(date);
DateCell cell = cellMap.get(date);
cell.updateEntries(entries == null ? Collections.emptyList() : entries);
}
}
@Override
public String getLoaderName() {
return "Month Sheet View";
}
@Override
public List<CalendarSource> getCalendarSources() {
return getSkinnable().getCalendarSources();
}
@Override
public Control getControl() {
return getSkinnable();
}
@Override
public LocalDate getLoadStartDate() {
return getSkinnable().getExtendedStartMonth().atDay(1);
}
@Override
public LocalDate getLoadEndDate() {
return getSkinnable().getExtendedEndMonth().atEndOfMonth();
}
@Override
public ZoneId getZoneId() {
return ZoneId.systemDefault();
}
@Override
public boolean isCalendarVisible(Calendar calendar) {
return getSkinnable().isCalendarVisible(calendar);
}
}
| 7,809 |
14,668 | <gh_stars>1000+
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/payments/content/can_make_payment_query_factory.h"
#include "base/memory/singleton.h"
#include "components/keyed_service/content/browser_context_dependency_manager.h"
#include "components/payments/core/can_make_payment_query.h"
namespace payments {
// static
CanMakePaymentQueryFactory* CanMakePaymentQueryFactory::GetInstance() {
return base::Singleton<CanMakePaymentQueryFactory>::get();
}
CanMakePaymentQuery* CanMakePaymentQueryFactory::GetForContext(
content::BrowserContext* context) {
return static_cast<CanMakePaymentQuery*>(
GetInstance()->GetServiceForBrowserContext(context, true));
}
CanMakePaymentQueryFactory::CanMakePaymentQueryFactory()
: BrowserContextKeyedServiceFactory(
"CanMakePaymentQuery",
BrowserContextDependencyManager::GetInstance()) {}
CanMakePaymentQueryFactory::~CanMakePaymentQueryFactory() {}
content::BrowserContext* CanMakePaymentQueryFactory::GetBrowserContextToUse(
content::BrowserContext* context) const {
// Create a separate instance of the service for the Incognito context.
return context;
}
KeyedService* CanMakePaymentQueryFactory::BuildServiceInstanceFor(
content::BrowserContext* context) const {
return new CanMakePaymentQuery;
}
} // namespace payments
| 443 |
1,045 | <gh_stars>1000+
import org.luaj.vm2.Globals;
import org.luaj.vm2.LuaThread;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.lib.jse.JsePlatform;
/** Example that continually launches coroutines, and illustrates how to make
* sure the orphaned coroutines are cleaned up properly.
*
* Main points:
* <ul><li>Each coroutine consumes one Java Thread while active or reference anywhere</li>
* <li>All references to a coroutine must be dropped for the coroutine to be collected</li>
* <li>Garbage collection must be run regularly to remove weak references to lua threads</li>
* <li>LuaThread.thread_orphan_check_interval must be short enough to find orphaned references quickly</li>
* </ul>
*/
public class CollectingOrphanedCoroutines {
// Script that launches coroutines over and over in a loop.
// Garbage collection is done periodically to find and remove orphaned threads.
// Coroutines yield out when they are done.
static String script =
"i,n = 0,0\n print(i)\n"
+ "f = function() n=n+1; coroutine.yield(false) end\n"
+ "while true do\n"
+ " local cor = coroutine.wrap(f)\n"
+ " cor()\n"
+ " i = i + 1\n"
+ " if i % 1000 == 0 then\n"
+ " collectgarbage()\n"
+ " print('threads:', i, 'executions:', n, collectgarbage('count'))\n"
+ " end\n"
+ "end\n";
public static void main(String[] args) throws InterruptedException {
// This timer controls how often each Java thread wakes up and checks if
// it has been orhaned or not. A large number here will produce a long
// delay between orphaning and colleciton, and a small number here will
// consumer resources polling for orphaned status if there are many threads.
LuaThread.thread_orphan_check_interval = 500;
// Should work with standard or debug globals.
Globals globals = JsePlatform.standardGlobals();
// Globals globals = JsePlatform.debugGlobals();
// Should work with plain compiler or lua-to-Java compiler.
// org.luaj.vm2.luajc.LuaJC.install(globals);;
// Load and run the script, which launches coroutines over and over forever.
LuaValue chunk = globals.load(script, "main");
chunk.call();
}
}
| 723 |
679 | <gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _CHART2_DLG_INSERT_ERRORBARS_HXX
#define _CHART2_DLG_INSERT_ERRORBARS_HXX
#include <vcl/dialog.hxx>
#include <vcl/button.hxx>
#include <svl/itemset.hxx>
#include <memory>
#include <com/sun/star/frame/XModel.hpp>
#include "res_ErrorBar.hxx"
//.............................................................................
namespace chart
{
//.............................................................................
class InsertErrorBarsDialog : public ModalDialog
{
public:
InsertErrorBarsDialog( Window* pParent, const SfxItemSet& rMyAttrs,
const ::com::sun::star::uno::Reference<
::com::sun::star::chart2::XChartDocument > & xChartDocument,
ErrorBarResources::tErrorBarType eType = ErrorBarResources::ERROR_BAR_Y );
virtual ~InsertErrorBarsDialog();
void SetAxisMinorStepWidthForErrorBarDecimals( double fMinorStepWidth );
static double getAxisMinorStepWidthForErrorBarDecimals(
const ::com::sun::star::uno::Reference<
::com::sun::star::frame::XModel >& xChartModel,
const ::com::sun::star::uno::Reference<
::com::sun::star::uno::XInterface >& xChartView,
const ::rtl::OUString& rSelectedObjectCID );
void FillItemSet( SfxItemSet& rOutAttrs );
virtual void DataChanged( const DataChangedEvent& rDCEvt );
private:
const SfxItemSet & rInAttrs;
OKButton aBtnOK;
CancelButton aBtnCancel;
HelpButton aBtnHelp;
::std::auto_ptr< ErrorBarResources > m_apErrorBarResources;
};
//.............................................................................
} //namespace chart
//.............................................................................
#endif
| 870 |
778 | /*
* Copyright (C) 2020-2021 Intel Corporation
*
* SPDX-License-Identifier: MIT
*
*/
#pragma once
extern "C" {
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleCreate_Tracing(ze_context_handle_t hContext,
ze_device_handle_t hDevice,
const ze_module_desc_t *desc,
ze_module_handle_t *phModule,
ze_module_build_log_handle_t *phBuildLog);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleDestroy_Tracing(ze_module_handle_t hModule);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleBuildLogDestroy_Tracing(ze_module_build_log_handle_t hModuleBuildLog);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleBuildLogGetString_Tracing(ze_module_build_log_handle_t hModuleBuildLog,
size_t *pSize,
char *pBuildLog);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleGetNativeBinary_Tracing(ze_module_handle_t hModule,
size_t *pSize,
uint8_t *pModuleNativeBinary);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleGetGlobalPointer_Tracing(ze_module_handle_t hModule,
const char *pGlobalName,
size_t *pSize,
void **pptr);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleDynamicLink_Tracing(uint32_t numModules,
ze_module_handle_t *phModules,
ze_module_build_log_handle_t *phLinkLog);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleGetProperties_Tracing(ze_module_handle_t hModule,
ze_module_properties_t *pModuleProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelCreate_Tracing(ze_module_handle_t hModule,
const ze_kernel_desc_t *desc,
ze_kernel_handle_t *phFunction);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelDestroy_Tracing(ze_kernel_handle_t hKernel);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleGetFunctionPointer_Tracing(ze_module_handle_t hModule,
const char *pKernelName,
void **pfnFunction);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSetGroupSize_Tracing(ze_kernel_handle_t hKernel,
uint32_t groupSizeX,
uint32_t groupSizeY,
uint32_t groupSizeZ);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSuggestGroupSize_Tracing(ze_kernel_handle_t hKernel,
uint32_t globalSizeX,
uint32_t globalSizeY,
uint32_t globalSizeZ,
uint32_t *groupSizeX,
uint32_t *groupSizeY,
uint32_t *groupSizeZ);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSetArgumentValue_Tracing(ze_kernel_handle_t hKernel,
uint32_t argIndex,
size_t argSize,
const void *pArgValue);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelGetProperties_Tracing(ze_kernel_handle_t hKernel,
ze_kernel_properties_t *pKernelProperties);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeCommandListAppendLaunchKernel_Tracing(ze_command_list_handle_t hCommandList,
ze_kernel_handle_t hKernel,
const ze_group_count_t *pLaunchFuncArgs,
ze_event_handle_t hSignalEvent,
uint32_t numWaitEvents,
ze_event_handle_t *phWaitEvents);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeCommandListAppendLaunchKernelIndirect_Tracing(ze_command_list_handle_t hCommandList,
ze_kernel_handle_t hKernel,
const ze_group_count_t *pLaunchArgumentsBuffer,
ze_event_handle_t hSignalEvent,
uint32_t numWaitEvents,
ze_event_handle_t *phWaitEvents);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeCommandListAppendLaunchMultipleKernelsIndirect_Tracing(ze_command_list_handle_t hCommandList,
uint32_t numKernels,
ze_kernel_handle_t *phKernels,
const uint32_t *pCountBuffer,
const ze_group_count_t *pLaunchArgumentsBuffer,
ze_event_handle_t hSignalEvent,
uint32_t numWaitEvents,
ze_event_handle_t *phWaitEvents);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeCommandListAppendLaunchCooperativeKernel_Tracing(ze_command_list_handle_t hCommandList,
ze_kernel_handle_t hKernel,
const ze_group_count_t *pLaunchFuncArgs,
ze_event_handle_t hSignalEvent,
uint32_t numWaitEvents,
ze_event_handle_t *phWaitEvents);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeModuleGetKernelNames_Tracing(ze_module_handle_t hModule,
uint32_t *pCount,
const char **pNames);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSuggestMaxCooperativeGroupCount_Tracing(ze_kernel_handle_t hKernel,
uint32_t *totalGroupCount);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelGetIndirectAccess_Tracing(ze_kernel_handle_t hKernel,
ze_kernel_indirect_access_flags_t *pFlags);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelGetName_Tracing(ze_kernel_handle_t hKernel,
size_t *pSize,
char *pName);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelGetSourceAttributes_Tracing(ze_kernel_handle_t hKernel,
uint32_t *pSize,
char **pString);
ZE_APIEXPORT ze_result_t ZE_APICALL
zeKernelSetIndirectAccess_Tracing(ze_kernel_handle_t hKernel,
ze_kernel_indirect_access_flags_t flags);
}
| 4,005 |
1,444 | package org.mage.test.cards.single.iko;
import mage.constants.PhaseStep;
import mage.constants.Zone;
import org.junit.Test;
import org.mage.test.serverside.base.CardTestPlayerBase;
/**
* @author JayDi85
*/
public class GenesisUltimatumTest extends CardTestPlayerBase {
@Test
public void test_Playable() {
removeAllCardsFromLibrary(playerA);
removeAllCardsFromHand(playerA);
// Look at the top five cards of your library. Put any number of permanent cards from among them onto
// the battlefield and the rest into your hand. Exile Genesis Ultimatum.
addCard(Zone.HAND, playerA, "Genesis Ultimatum"); // {G}{G}{U}{U}{U}{R}{R}
addCard(Zone.BATTLEFIELD, playerA, "Forest", 2);
addCard(Zone.BATTLEFIELD, playerA, "Island", 3);
addCard(Zone.BATTLEFIELD, playerA, "Mountain", 2);
//
addCard(Zone.LIBRARY, playerA, "Grizzly Bears", 1);
addCard(Zone.LIBRARY, playerA, "Alpha Tyrranax", 1);
addCard(Zone.LIBRARY, playerA, "Kitesail Corsair", 1);
addCard(Zone.LIBRARY, playerA, "Riverglide Pathway", 1); // mdf card
// cast spell and put 3 cards to battle
castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Genesis Ultimatum");
setChoice(playerA, "Grizzly Bears^Kitesail Corsair^Riverglide Pathway");
setStrictChooseMode(true);
setStopAt(1, PhaseStep.END_TURN);
execute();
assertAllCommandsUsed();
assertExileCount(playerA, "Genesis Ultimatum", 1);
assertPermanentCount(playerA, "Grizzly Bears", 1);
assertPermanentCount(playerA, "Kitesail Corsair", 1);
assertPermanentCount(playerA, "Riverglide Pathway", 1);
assertHandCount(playerA, "Alpha Tyrranax", 1);
assertLibraryCount(playerA, 0);
}
} | 731 |
1,338 | <reponame>Kirishikesan/haiku
/*
* Copyright 2003-2012 Haiku, Inc. All Rights Reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef _STDIO_POST_H_
#define _STDIO_POST_H_
/* "Private"/inline functions of our BeOS compatible stdio implementation */
/* ToDo: this is a work in progress to make our stdio
* BeOS' GNU/libio (almost) binary compatible
* We may not yet be compatible! */
#ifndef _STDIO_H_
# error "This file must be included from stdio.h!"
#endif
#ifdef __cplusplus
extern "C" {
#endif
extern char _single_threaded;
/* this boolean value is true (1) if there is only the main thread
* running - as soon as you spawn the first thread, it's set to
* false (0) */
#ifdef __cplusplus
}
#endif
#define getc(stream) \
(_single_threaded ? getc_unlocked(stream) : getc(stream))
#define putc(c, stream) \
(_single_threaded ? putc_unlocked(c, stream) : putc(c, stream))
#endif /* _STDIO_POST_H_ */
| 335 |
778 | <gh_stars>100-1000
package org.aion.zero.impl.sync;
import java.math.BigInteger;
import org.aion.p2p.INode;
/** Facilitates passing peer information to the API without relying on the p2p module interfaces. */
public class NodeWrapper {
INode peer;
public NodeWrapper(INode peer) {
this.peer = peer;
}
public String getIdShort() {
return peer.getIdShort();
}
public byte[] getId() {
return peer.getId();
}
public int getIdHash() {
return peer.getIdHash();
}
public String getBinaryVersion() {
return peer.getBinaryVersion();
}
public long getBestBlockNumber() {
return peer.getBestBlockNumber();
}
public BigInteger getTotalDifficulty() {
return peer.getTotalDifficulty();
}
public byte[] getIp() {
return peer.getIp();
}
public String getIpStr() {
return peer.getIpStr();
}
public int getPort() {
return peer.getPort();
}
public long getTimestamp() {
return peer.getTimestamp();
}
}
| 436 |
1,244 | <reponame>caiohamamura/libcxx
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <memory>
// template <class T, class D>
// struct hash<unique_ptr<T, D>>
// {
// typedef unique_ptr<T, D> argument_type;
// typedef size_t result_type;
// size_t operator()(const unique_ptr<T, D>& p) const;
// };
#include <memory>
#include <cassert>
int main()
{
int* ptr = new int;
std::unique_ptr<int> p(ptr);
std::hash<std::unique_ptr<int> > f;
std::size_t h = f(p);
assert(h == std::hash<int*>()(ptr));
}
| 303 |
3,519 | <filename>LibOVRProxy/OVR_CAPI.cpp<gh_stars>1000+
#define ovr_GetRenderDesc ovr_GetRenderDesc2
#define ovr_SubmitFrame ovr_SubmitFrame2
#include "OVR_CAPI.h"
#undef ovr_GetRenderDesc
#undef ovr_SubmitFrame
#include "Extras/OVR_Math.h"
typedef struct OVR_ALIGNAS(4) ovrEyeRenderDescPre117_ {
ovrEyeType Eye;
ovrFovPort Fov;
ovrRecti DistortedViewport;
ovrVector2f PixelsPerTanAngleAtCenter;
ovrVector3f HmdToEyeOffset;
} ovrEyeRenderDescPre117;
OVR_PUBLIC_FUNCTION(ovrEyeRenderDescPre117) ovr_GetRenderDesc(ovrSession session, ovrEyeType eyeType, ovrFovPort fov)
{
ovrEyeRenderDescPre117 legacy = {};
ovrEyeRenderDesc desc = ovr_GetRenderDesc2(session, eyeType, fov);
memcpy(&legacy, &desc, sizeof(ovrEyeRenderDescPre117));
legacy.HmdToEyeOffset = desc.HmdToEyePose.Position;
return legacy;
}
typedef struct OVR_ALIGNAS(4) ovrViewScaleDescPre117_ {
ovrVector3f HmdToEyeOffset[ovrEye_Count]; ///< Translation of each eye.
float HmdSpaceToWorldScaleInMeters; ///< Ratio of viewer units to meter units.
} ovrViewScaleDescPre117;
OVR_PUBLIC_FUNCTION(ovrResult) ovr_SubmitFrame(ovrSession session, long long frameIndex, const ovrViewScaleDescPre117* viewScaleDesc,
ovrLayerHeader const * const * layerPtrList, unsigned int layerCount)
{
if (viewScaleDesc)
{
ovrViewScaleDesc desc = {};
for (int i = 0; i < ovrEye_Count; i++)
desc.HmdToEyePose[i] = OVR::Posef(OVR::Quatf(), viewScaleDesc->HmdToEyeOffset[i]);
desc.HmdSpaceToWorldScaleInMeters = viewScaleDesc->HmdSpaceToWorldScaleInMeters;
return ovr_SubmitFrame2(session, frameIndex, &desc, layerPtrList, layerCount);
}
return ovr_SubmitFrame2(session, frameIndex, nullptr, layerPtrList, layerCount);
}
struct ovrSensorData_;
typedef struct ovrSensorData_ ovrSensorData;
OVR_PUBLIC_FUNCTION(ovrTrackingState) ovr_GetTrackingStateWithSensorData(ovrSession session, double absTime, ovrBool latencyMarker, ovrSensorData* sensorData)
{
return ovr_GetTrackingState(session, absTime, latencyMarker);
}
#include "OVR_CAPI_Vk.h"
#undef ovr_SetSynchonizationQueueVk
OVR_PUBLIC_FUNCTION(ovrResult) ovr_SetSynchonizationQueueVk(ovrSession session, VkQueue queue)
{
return ovr_SetSynchronizationQueueVk(session, queue);
}
OVR_PUBLIC_FUNCTION(unsigned int) ovr_GetEnabledCaps(ovrSession session)
{
return 0;
}
OVR_PUBLIC_FUNCTION(void) ovr_SetEnabledCaps(ovrSession session, unsigned int hmdCaps)
{
}
OVR_PUBLIC_FUNCTION(unsigned int) ovr_GetTrackingCaps(ovrSession session)
{
return 0;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_ConfigureTracking(
ovrSession session,
unsigned int requestedTrackingCaps,
unsigned int requiredTrackingCaps)
{
return ovrSuccess;
}
struct ovrDesktopWindowDesc_;
typedef struct ovrDesktopWindowDesc_ ovrDesktopWindowDesc;
struct ovrHybridInputFocusState_;
typedef struct ovrHybridInputFocusState_ ovrHybridInputFocusState;
typedef uint32_t ovrDesktopWindowHandle;
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_InitDesktopWindow(
ovrSession session,
ovrDesktopWindowHandle* outWindowHandle)
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_ShowDesktopWindow(
ovrSession session,
const ovrDesktopWindowDesc* windowDesc)
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_HideDesktopWindow(
ovrSession session,
ovrDesktopWindowHandle windowHandle)
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_GetHybridInputFocus(
ovrSession session,
ovrControllerType controllerType,
ovrHybridInputFocusState* outState)
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_ShowAvatarHands(
ovrSession session,
ovrBool showHands)
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_ShowKeyboard()
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_EnableHybridRaycast()
{
return ovrError_Unsupported;
}
OVR_PUBLIC_FUNCTION(ovrResult)
ovr_QueryDistortion()
{
return ovrError_Unsupported;
}
| 1,501 |
303 | #include <stdio.h>
int print_hash_value = 1;
static void platform_main_begin(void)
{
}
static unsigned crc32_tab[256];
static unsigned crc32_context = 0xFFFFFFFFUL;
static void
crc32_gentab (void)
{
unsigned crc;
unsigned poly = 0xEDB88320UL;
int i, j;
for (i = 0; i < 256; i++) {
crc = i;
for (j = 8; j > 0; j--) {
if (crc & 1) {
crc = (crc >> 1) ^ poly;
} else {
crc >>= 1;
}
}
crc32_tab[i] = crc;
}
}
static void
crc32_byte (unsigned char b) {
crc32_context =
((crc32_context >> 8) & 0x00FFFFFF) ^
crc32_tab[(crc32_context ^ b) & 0xFF];
}
extern int strcmp ( char *, char *);
static void
crc32_8bytes (unsigned val)
{
crc32_byte ((val>>0) & 0xff);
crc32_byte ((val>>8) & 0xff);
crc32_byte ((val>>16) & 0xff);
crc32_byte ((val>>24) & 0xff);
}
static void
transparent_crc (unsigned val, char* vname, int flag)
{
crc32_8bytes(val);
if (flag) {
printf("...checksum after hashing %s : %X\n", vname, crc32_context ^ 0xFFFFFFFFU);
}
}
static void
platform_main_end (int x, int flag)
{
if (!flag) printf ("checksum = %x\n", x);
}
static long __undefined;
void csmith_compute_hash(void);
void step_hash(int stmt_id);
static int g_2 = (-4L);
static int *g_40 = &g_2;
static int **g_39 = &g_40;
static int g_63 = 0x7BDE5156L;
static int g_95 = 0xE1600452L;
static unsigned g_176 = 4294967295UL;
static unsigned g_221 = 0xAC3D2276L;
static unsigned char g_277 = 0xF2L;
static int g_300 = 0x6D31335AL;
static unsigned short g_359 = 0x618FL;
static int g_367 = 0xDD253935L;
static unsigned short g_374 = 0xDBE7L;
static int g_403 = (-10L);
static unsigned short g_449 = 0xDF23L;
static unsigned char g_507 = 0x61L;
static unsigned func_1(void);
static int func_7(int p_8, unsigned p_9, unsigned p_10, int p_11, unsigned p_12);
static int func_13(unsigned short p_14, unsigned short p_15, int p_16, int p_17, unsigned char p_18);
static signed char func_23(unsigned char p_24, int p_25);
static unsigned short func_31(int p_32, unsigned short p_33);
static int func_36(int p_37, int ** p_38);
static signed char func_41(int p_42, int p_43, unsigned char p_44, int ** p_45, short p_46);
static unsigned char func_55(int p_56, short p_57, short p_58, short p_59, int p_60);
static int * func_64(int ** p_65, int p_66, int p_67, int ** p_68, short p_69);
static int * func_75(int p_76, int ** p_77);
static unsigned func_1(void)
{
unsigned l_6 = 7UL;
unsigned char l_466 = 7UL;
int *l_472 = &g_367;
int ***l_479 = &g_39;
int l_491 = 0x95FC5889L;
step_hash(294);
for (g_2 = 0; (g_2 > 24); g_2 += 1)
{
short l_28 = (-10L);
int *l_431 = (void*)0;
unsigned char l_456 = 0xD1L;
int ***l_467 = &g_39;
signed char l_511 = (-8L);
}
step_hash(295);
(**g_39) ^= ((**l_479) != (**l_479));
step_hash(296);
return g_63;
}
static int func_7(int p_8, unsigned p_9, unsigned p_10, int p_11, unsigned p_12)
{
int l_420 = 0x0AC639BBL;
int ***l_425 = (void*)0;
step_hash(232);
if (p_8)
{
int *l_408 = &g_63;
step_hash(223);
(*l_408) &= 0x88A73BF4L;
step_hash(228);
for (g_277 = 0; (g_277 <= 5); ++g_277)
{
signed char l_411 = 0xC0L;
step_hash(227);
return l_411;
}
}
else
{
unsigned char l_423 = 0UL;
int ***l_424 = &g_39;
int *l_428 = &g_367;
step_hash(230);
(*l_428) ^= ((signed char)p_12 / (signed char)((short)(((short)((signed char)l_420 / (signed char)(((short)l_423 * (short)p_11) | (l_423 | ((+(l_424 == l_425)) < ((((unsigned short)g_2 * (unsigned short)((void*)0 == &p_8)) & g_221) || p_9))))) << (short)3) == g_374) >> (short)15));
step_hash(231);
(*l_428) |= (((void*)0 == &g_40) || ((g_277 | p_10) > (((void*)0 == l_425) ^ p_9)));
}
step_hash(233);
return g_367;
}
static int func_13(unsigned short p_14, unsigned short p_15, int p_16, int p_17, unsigned char p_18)
{
signed char l_344 = 0x3FL;
int *l_392 = &g_63;
step_hash(219);
for (p_14 = 0; (p_14 < 25); p_14++)
{
int **l_345 = &g_40;
short l_346 = 0L;
int *l_354 = &g_2;
short l_400 = 0xE9ACL;
step_hash(174);
g_63 = (((signed char)(4294967295UL | (((short)(((unsigned)((short)((p_15 <= g_95) <= ((-8L) >= 0xAA05L)) - (short)l_344) - (unsigned)(*g_40)) == p_18) << (short)l_346) && 0x3855L)) * (signed char)(**l_345)) ^ l_344);
step_hash(217);
for (g_221 = 0; (g_221 >= 46); g_221++)
{
unsigned l_349 = 1UL;
int **l_380 = (void*)0;
int *l_393 = &g_95;
step_hash(216);
if (l_349)
{
step_hash(193);
for (l_349 = 6; (l_349 <= 19); l_349 += 6)
{
int l_365 = (-2L);
step_hash(186);
for (p_17 = 7; (p_17 == (-29)); p_17 -= 2)
{
step_hash(185);
(*l_345) = l_354;
}
step_hash(192);
for (g_277 = 0; (g_277 > 17); g_277++)
{
int *l_360 = &g_63;
int *l_366 = &g_367;
step_hash(190);
(*l_366) ^= ((g_359 & (((l_360 != (void*)0) >= func_23(l_349, ((signed char)p_18 * (signed char)g_300))) <= ((int)((l_349 || g_2) & g_2) + (int)0x1F6F16E7L))) != l_365);
step_hash(191);
if (p_15)
continue;
}
}
}
else
{
step_hash(195);
(*l_345) = &p_16;
step_hash(215);
for (p_17 = 0; (p_17 >= 11); p_17++)
{
unsigned char l_372 = 0x82L;
int *l_383 = (void*)0;
step_hash(205);
for (g_367 = 0; (g_367 != 9); ++g_367)
{
unsigned l_381 = 0UL;
int l_382 = 0x43FA7BD6L;
step_hash(202);
l_382 = ((l_372 & 0xDDL) == (-(unsigned char)(g_63 >= 0x1894L)));
step_hash(203);
(**l_345) = (((func_55(g_2, g_367, g_300, l_344, g_300) || ((void*)0 != l_383)) != (((signed char)(p_15 ^ l_381) * (signed char)p_16) < 0xE245L)) >= p_16);
step_hash(204);
return l_344;
}
step_hash(206);
(*l_392) |= (((short)((unsigned char)((p_18 > ((short)(**l_345) - (short)g_221)) <= (l_392 == l_393)) + (unsigned char)g_367) + (short)((unsigned)((void*)0 != &p_16) - (unsigned)((signed char)p_18 / (signed char)1L))) != g_2);
step_hash(207);
(*l_345) = l_392;
step_hash(214);
if (g_403)
{
step_hash(209);
(*l_393) = (((unsigned char)(7L > ((int)(**l_345) / (int)(*l_392))) % (unsigned char)(*l_392)) && p_16);
step_hash(210);
(*l_393) = p_15;
step_hash(211);
(*l_345) = &p_16;
}
else
{
step_hash(213);
(*l_345) = &p_16;
}
}
}
}
step_hash(218);
(*l_392) |= (-2L);
}
step_hash(220);
return (*l_392);
}
static signed char func_23(unsigned char p_24, int p_25)
{
int **l_313 = &g_40;
int *l_329 = &g_63;
step_hash(165);
if ((*g_40))
{
unsigned l_316 = 7UL;
int *l_317 = &g_63;
step_hash(154);
(*l_317) = ((unsigned char)l_316 >> (unsigned char)2);
}
else
{
int *l_318 = &g_63;
unsigned char l_323 = 0x56L;
step_hash(164);
if ((&g_40 != (void*)0))
{
step_hash(157);
(*l_313) = &g_63;
step_hash(158);
(*l_313) = l_318;
}
else
{
int l_319 = (-1L);
int **l_328 = &l_318;
step_hash(160);
(*l_318) &= (p_24 < 65534UL);
step_hash(161);
(*l_313) = l_318;
step_hash(162);
(*l_318) = l_319;
step_hash(163);
(*l_328) = (*l_313);
}
}
step_hash(166);
(*l_313) = (*l_313);
step_hash(167);
(*l_329) &= 8L;
step_hash(168);
g_95 &= ((*l_329) || ((unsigned char)0xE9L << (unsigned char)(((*l_329) != p_24) != (g_63 | ((unsigned char)255UL >> (unsigned char)(&l_313 == (void*)0))))));
step_hash(169);
return g_63;
}
static unsigned short func_31(int p_32, unsigned short p_33)
{
int *l_35 = (void*)0;
int **l_34 = &l_35;
unsigned l_226 = 0x5A92DE46L;
signed char l_309 = 0L;
int l_312 = 0xEA69420AL;
step_hash(5);
(*l_34) = (void*)0;
step_hash(145);
if (func_36(g_2, g_39))
{
unsigned char l_196 = 0x45L;
step_hash(99);
if (((unsigned char)(p_33 && g_95) / (unsigned char)l_196))
{
int l_199 = 0xB0BF173BL;
step_hash(90);
for (g_176 = 0; (g_176 >= 25); g_176 += 1)
{
step_hash(89);
return l_199;
}
step_hash(91);
(*l_34) = (void*)0;
}
else
{
int **l_209 = &g_40;
step_hash(97);
for (l_196 = 0; (l_196 > 51); ++l_196)
{
unsigned l_206 = 4294967295UL;
int l_210 = (-4L);
step_hash(96);
l_210 = ((((unsigned short)g_176 / (unsigned short)(g_176 || (~((signed char)(l_206 || ((short)(-1L) >> (short)(&g_40 != l_209))) * (signed char)(l_209 == &l_35))))) ^ p_32) >= 0x12L);
}
step_hash(98);
return g_95;
}
}
else
{
int ***l_211 = &l_34;
int *l_212 = &g_95;
short l_220 = 1L;
step_hash(101);
(*l_211) = &l_35;
step_hash(102);
(*l_212) = 0x001B998DL;
step_hash(144);
for (g_95 = 0; (g_95 >= (-3)); g_95 -= 8)
{
int *l_215 = &g_63;
step_hash(106);
g_63 |= (&l_34 == &l_34);
step_hash(107);
(*l_215) &= (&g_39 != &g_39);
step_hash(108);
(*l_215) &= (g_2 < g_95);
}
}
step_hash(150);
if (p_33)
{
int l_305 = 0L;
int **l_306 = &l_35;
int l_310 = 0x284332C9L;
step_hash(147);
g_95 |= ((unsigned)((short)p_32 / (short)0xD74CL) / (unsigned)l_310);
}
else
{
unsigned l_311 = 2UL;
step_hash(149);
l_312 ^= (((((p_32 || ((((5L > l_311) || (&l_35 == &g_40)) >= l_311) & l_311)) && g_221) == l_311) | 0xBDD3L) < 1L);
}
step_hash(151);
return p_33;
}
static int func_36(int p_37, int ** p_38)
{
unsigned l_61 = 0UL;
int *l_107 = &g_95;
unsigned short l_167 = 0UL;
int *l_177 = &g_95;
unsigned l_185 = 4294967293UL;
step_hash(81);
if (((func_41((((signed char)((void*)0 != p_38) << (signed char)((unsigned short)((signed char)((unsigned char)func_55(g_2, p_37, l_61, p_37, (l_61 <= (0xC1C73B01L >= (0xF851L && l_61)))) >> (unsigned char)g_2) * (signed char)l_61) + (unsigned short)0x1C25L)) & l_61), g_2, g_2, &g_40, p_37) < 0x2FL) != l_61))
{
int **l_88 = &g_40;
int *l_89 = &g_63;
step_hash(25);
for (p_37 = 0; (p_37 >= (-19)); p_37--)
{
step_hash(24);
l_88 = p_38;
}
step_hash(26);
(*l_89) = l_61;
}
else
{
signed char l_92 = 0xB2L;
int **l_96 = &g_40;
unsigned char l_113 = 0x49L;
int *l_146 = &g_2;
int l_148 = (-3L);
int l_155 = 3L;
step_hash(28);
(*p_38) = (*g_39);
step_hash(29);
g_95 &= func_55((p_37 || func_41((**p_38), (((unsigned short)((p_37 >= l_92) || l_61) - (unsigned short)(0xF1L <= ((unsigned short)((-1L) & (&g_40 != (void*)0)) >> (unsigned short)g_2))) & (**p_38)), l_92, &g_40, p_37)), p_37, p_37, g_2, g_2);
step_hash(80);
if (((l_92 == ((g_63 & p_37) > l_61)) == g_63))
{
signed char l_116 = (-1L);
int *l_133 = (void*)0;
int l_156 = (-10L);
unsigned l_172 = 4294967295UL;
signed char l_175 = 0x41L;
step_hash(68);
if (((void*)0 == l_96))
{
unsigned char l_110 = 255UL;
int l_118 = (-1L);
int *l_126 = &g_63;
int l_130 = 0x845370BFL;
step_hash(62);
if (((signed char)(-8L) - (signed char)((unsigned short)((l_61 >= g_95) == p_37) + (unsigned short)((unsigned short)((short)func_55((g_2 != ((unsigned)((void*)0 != l_107) / (unsigned)func_41((**p_38), func_55(((signed char)(0xECD02AC2L == 4294967295UL) << (signed char)3), p_37, p_37, p_37, (**l_96)), g_95, p_38, g_95))), g_2, g_2, p_37, l_110) % (short)5L) / (unsigned short)0x812EL))))
{
unsigned l_117 = 0x86D35063L;
step_hash(33);
(*l_107) ^= (-1L);
step_hash(40);
if ((l_110 == ((**l_96) <= (l_113 != (!func_55(g_95, (func_55(func_55((**l_96), p_37, g_2, g_95, ((func_55((((*g_39) != (*g_39)) | 0UL), l_116, g_95, l_117, l_116) > g_95) > p_37)), g_95, (**l_96), g_2, l_118) >= (**p_38)), g_95, p_37, (**l_96)))))))
{
step_hash(35);
(*l_107) = (l_117 || (l_116 & (l_118 <= (func_41(l_117, ((short)((*g_39) == (void*)0) >> (short)(+p_37)), p_37, p_38, (**l_96)) || p_37))));
}
else
{
step_hash(37);
(*l_107) |= (*g_40);
step_hash(38);
(*g_39) = (*g_39);
step_hash(39);
(*l_107) = 0xB11E678BL;
}
}
else
{
int *l_123 = &g_95;
unsigned short l_129 = 0x92F2L;
step_hash(42);
(*p_38) = func_75(func_41((**l_96), ((0x3AL ^ p_37) || (((short)func_55((*l_107), (l_123 != (*g_39)), ((short)(func_55(((l_126 == (void*)0) == ((unsigned short)(+(+func_55((*l_126), (*l_123), p_37, p_37, l_129))) + (unsigned short)g_95)), g_2, p_37, g_95, p_37) ^ 1UL) >> (short)p_37), l_116, g_2) % (short)p_37) || g_63)), l_130, p_38, g_2), g_39);
step_hash(49);
for (l_92 = (-4); (l_92 != (-13)); --l_92)
{
}
step_hash(55);
for (l_110 = (-10); (l_110 > 7); ++l_110)
{
step_hash(53);
(*l_123) |= (func_55((p_38 == (void*)0), (**l_96), p_37, p_37, g_2) & (((l_96 != l_96) | 65529UL) == 0xC8L));
step_hash(54);
if ((*l_123))
break;
}
step_hash(61);
for (g_95 = 24; (g_95 != 20); g_95 -= 9)
{
int **l_147 = (void*)0;
step_hash(59);
(*l_126) ^= l_148;
step_hash(60);
(*g_39) = func_64(&g_40, ((((((unsigned)p_37 - (unsigned)0xC86C8505L) == (p_37 | (g_95 >= (7UL != g_63)))) && ((signed char)0x13L / (signed char)(-1L))) != (*l_107)) ^ p_37), (**g_39), &g_40, (*l_123));
}
}
step_hash(63);
(*l_107) |= (**g_39);
step_hash(64);
l_133 = func_75(p_37, &g_40);
step_hash(65);
l_156 |= func_55(func_55(g_95, g_2, (*l_126), (p_37 < ((signed char)(g_2 <= g_95) >> (signed char)(((void*)0 == &l_146) | (*l_133)))), g_95), p_37, l_155, p_37, p_37);
}
else
{
step_hash(67);
(*l_107) = (((signed char)((**l_96) || ((short)0x2ABAL >> (short)14)) * (signed char)p_37) && (p_38 != &g_40));
}
step_hash(69);
(*l_107) = (((short)p_37 * (short)((((signed char)(((int)l_167 - (int)(*l_107)) > (g_95 || ((unsigned char)(((int)(*g_40) - (int)(*g_40)) < g_63) * (unsigned char)((l_172 > ((signed char)(**l_96) % (signed char)0x6DL)) ^ l_175)))) << (signed char)g_2) >= g_176) || p_37)) && g_2);
step_hash(70);
(*l_107) = (p_37 ^ func_55(p_37, (*l_107), (*l_146), g_95, g_95));
}
else
{
step_hash(72);
(*g_39) = l_177;
step_hash(73);
(*g_39) = (*l_96);
step_hash(78);
for (l_148 = 0; (l_148 < (-4)); l_148--)
{
int ***l_180 = &g_39;
step_hash(77);
(*l_180) = (void*)0;
}
step_hash(79);
(*l_96) = (*p_38);
}
}
step_hash(82);
(*l_177) = (*g_40);
step_hash(83);
return (*g_40);
}
static signed char func_41(int p_42, int p_43, unsigned char p_44, int ** p_45, short p_46)
{
int l_74 = 0x07675E6DL;
int *l_85 = (void*)0;
step_hash(18);
l_85 = func_64(p_45, (*g_40), (((signed char)((void*)0 == p_45) + (signed char)(g_2 != ((unsigned char)g_2 / (unsigned char)func_55(p_42, p_43, g_2, l_74, p_44)))) >= (*g_40)), &g_40, p_42);
step_hash(19);
return g_2;
}
static unsigned char func_55(int p_56, short p_57, short p_58, short p_59, int p_60)
{
int *l_62 = &g_63;
step_hash(8);
(*l_62) = 0x8592840CL;
step_hash(9);
return g_2;
}
static int * func_64(int ** p_65, int p_66, int p_67, int ** p_68, short p_69)
{
step_hash(16);
(*p_68) = func_75((((*p_65) != (void*)0) & 0x02L), p_68);
step_hash(17);
return (*p_68);
}
static int * func_75(int p_76, int ** p_77)
{
unsigned l_82 = 4294967295UL;
int **l_83 = &g_40;
int *l_84 = &g_63;
step_hash(13);
(*l_84) |= (g_2 <= ((unsigned short)(p_76 && ((short)g_2 + (short)(l_82 | (((void*)0 != &g_40) != 0x3044B131L)))) % (unsigned short)((p_76 < (l_83 != l_83)) | (**l_83))));
step_hash(14);
p_77 = &l_84;
step_hash(15);
return (*g_39);
}
void csmith_compute_hash(void)
{
transparent_crc(g_2, "g_2", print_hash_value);
transparent_crc(g_63, "g_63", print_hash_value);
transparent_crc(g_95, "g_95", print_hash_value);
transparent_crc(g_176, "g_176", print_hash_value);
transparent_crc(g_221, "g_221", print_hash_value);
transparent_crc(g_277, "g_277", print_hash_value);
transparent_crc(g_300, "g_300", print_hash_value);
transparent_crc(g_359, "g_359", print_hash_value);
transparent_crc(g_367, "g_367", print_hash_value);
transparent_crc(g_374, "g_374", print_hash_value);
transparent_crc(g_403, "g_403", print_hash_value);
transparent_crc(g_449, "g_449", print_hash_value);
transparent_crc(g_507, "g_507", print_hash_value);
}
void step_hash(int stmt_id)
{
int i = 0;
csmith_compute_hash();
printf("before stmt(%d): checksum = %X\n", stmt_id, crc32_context ^ 0xFFFFFFFFUL);
crc32_context = 0xFFFFFFFFUL;
for (i = 0; i < 256; i++) {
crc32_tab[i] = 0;
}
crc32_gentab();
}
int main (void)
{
int print_hash_value = 0;
platform_main_begin();
crc32_gentab();
func_1();
csmith_compute_hash();
platform_main_end(crc32_context ^ 0xFFFFFFFFUL, print_hash_value);
return 0;
}
| 11,795 |
611 | <filename>tensorflow_gnn/graph/schema_utils.py
"""Misc graph tensor utilities."""
from typing import Any, Iterator, Optional, Text, Tuple, Union
import tensorflow as tf
from tensorflow_gnn.graph import adjacency
from tensorflow_gnn.graph import graph_constants as gc
from tensorflow_gnn.graph import graph_tensor as gt
import tensorflow_gnn.proto.graph_schema_pb2 as schema_pb2
from google.protobuf import text_format
def parse_schema(schema_text: str) -> schema_pb2.GraphSchema:
"""Parse a schema from text-formatted protos.
Args:
schema_text: A string containing a text-formatted protocol buffer
rendition of a `GraphSchema` message.
Returns:
A `GraphSchema` instance.
"""
return text_format.Parse(schema_text, schema_pb2.GraphSchema())
def read_schema(filename: str) -> schema_pb2.GraphSchema:
"""Read a proto schema from a file with text-formatted contents.
Args:
filename: A string, the path to a file containing a text-formatted protocol
buffer rendition of a `GraphSchema` message.
Returns:
A `GraphSchema` instance.
"""
with tf.io.gfile.GFile(filename) as infile:
return text_format.Parse(infile.read(), schema_pb2.GraphSchema())
def write_schema(schema: schema_pb2.GraphSchema, filename: str):
"""Write a `GraphSchema` to a text-formatted proto file.
Args:
schema: A `GraphSchema` instance to write out.
filename: A string, the path to a file to render a text-formatted
rendition of the `GraphSchema` message to.
"""
with tf.io.gfile.GFile(filename, 'w') as schema_file:
schema_file.write(text_format.MessageToString(schema))
def create_graph_spec_from_schema_pb(
schema: schema_pb2.GraphSchema,
indices_dtype: tf.dtypes.DType = gc.default_indices_dtype
) -> gt.GraphTensorSpec:
"""Converts a graph schema proto message to a scalar GraphTensorSpec.
A `GraphSchema` message contains shape information in a serializable format.
The `GraphTensorSpec` is a runtime object fulfilling the type spec
requirements, that accompanies each `GraphTensor` instance and fulfills much
of the same goal. This function converts the proto to the corresponding type
spec.
Args:
schema: An instance of the graph schema proto message.
indices_dtype: A `tf.dtypes.DType` for GraphTensor edge set source and
target indices, node and edge sets sizes.
Returns:
A `GraphTensorSpec` specification for the scalar graph tensor (of rank 0).
"""
size_value_spec = tf.TensorSpec(shape=(1,), dtype=indices_dtype)
index_spec = tf.TensorSpec(shape=(None,), dtype=indices_dtype)
context_spec = gt.ContextSpec.from_field_specs(
features_spec=_create_fields_spec_from_schema(schema.context.features, 1,
indices_dtype),
shape=tf.TensorShape([]),
indices_dtype=indices_dtype)
nodes_spec = {}
for set_name, node in schema.node_sets.items():
nodes_spec[set_name] = gt.NodeSetSpec.from_field_specs(
sizes_spec=size_value_spec,
features_spec=_create_fields_spec_from_schema(node.features, None,
indices_dtype))
edges_spec = {}
for set_name, edge in schema.edge_sets.items():
edges_spec[set_name] = gt.EdgeSetSpec.from_field_specs(
sizes_spec=size_value_spec,
adjacency_spec=adjacency.AdjacencySpec.from_incident_node_sets(
source_node_set=edge.source,
target_node_set=edge.target,
index_spec=index_spec),
features_spec=_create_fields_spec_from_schema(edge.features, None,
indices_dtype))
return gt.GraphTensorSpec.from_piece_specs(
context_spec=context_spec,
node_sets_spec=nodes_spec,
edge_sets_spec=edges_spec)
def _is_ragged_dim(dim) -> bool:
if dim.size < -1:
raise ValueError(f'Dimension size must be >= -1, got {dim}.')
return dim.size == -1
def _get_ragged_rank(feature: schema_pb2.Feature) -> int:
return sum(_is_ragged_dim(dim) for dim in feature.shape.dim)
def _is_ragged(feature: schema_pb2.Feature) -> int:
return any(_is_ragged_dim(dim) for dim in feature.shape.dim)
def _create_fields_spec_from_schema(
features_schema: Any, dim0: Optional[int],
indices_dtype: tf.dtypes.DType) -> gc.FieldsSpec:
"""Convers a features schema to fields specification."""
def _get_shape(feature: schema_pb2.Feature) -> tf.TensorShape:
dim_fn = lambda dim: (None if dim.size == -1 else dim.size)
dims = [dim_fn(dim) for dim in feature.shape.dim]
dims = [dim0] + dims
return tf.TensorShape(dims)
result = {}
for fname, feature in features_schema.items():
shape = _get_shape(feature)
dtype = tf.dtypes.as_dtype(feature.dtype)
if _is_ragged(feature):
value_spec = tf.RaggedTensorSpec(
shape=shape,
dtype=dtype,
ragged_rank=_get_ragged_rank(feature),
row_splits_dtype=indices_dtype)
else:
value_spec = tf.TensorSpec(shape=shape, dtype=dtype)
result[fname] = value_spec
return result
def iter_sets(
schema: Union[schema_pb2.GraphSchema, gt.GraphTensor]
) -> Iterator[Tuple[str, str, Any]]:
"""Utility function to iterate over all the sets present in a graph schema.
This function iterates over the context set, each of the node sets, and
finally each of the edge sets.
Args:
schema: An instance of a `GraphSchema` proto message.
Yields:
Triplets of (set-type, set-name, features) where
* set-type: A type of set, which is either of "context", "nodes" or "edges".
* set-name: A string, the name of the set.
* features: A dict of feature-name to feature-value.
"""
if (not isinstance(schema, schema_pb2.GraphSchema) or
schema.HasField('context')):
yield (gc.CONTEXT, '', schema.context)
for set_name, set_ in schema.node_sets.items():
yield (gc.NODES, set_name, set_)
for set_name, set_ in schema.edge_sets.items():
yield (gc.EDGES, set_name, set_)
def iter_features(
schema: Union[schema_pb2.GraphSchema, gt.GraphTensor]
) -> Iterator[Tuple[Text, Text, Text, Union[schema_pb2.Feature, gt.Field]]]:
"""Utility function to iterate over the features of a graph schema.
This function iterates over all the feature values of each of the context set,
each of the node sets, and each of the edge sets.
Args:
schema: An instance of a `GraphSchema` proto message.
Yields:
Triplets of (set-type, set-name, feature-name, feature-value) where
* set-type: A type of set, which is either of "context", "nodes" or "edges".
* set-name: A string, the name of the set.
* feature-name: A string, the name of the feature in the set.
* feature-value: A potentially ragged tensor (either a `tf.Tensor` or a
`tf.RaggedTensor`).
"""
if schema.HasField('context'):
for feature_name, feature in schema.context.features.items():
yield (gc.CONTEXT, '', feature_name, feature)
for set_name, set_ in schema.node_sets.items():
for feature_name, feature in set_.features.items():
yield (gc.NODES, set_name, feature_name, feature)
for set_name, set_ in schema.edge_sets.items():
for feature_name, feature in set_.features.items():
yield (gc.EDGES, set_name, feature_name, feature)
| 2,803 |
443 | #pragma once
#include "inexor/vulkan-renderer/io/octree_parser.hpp"
#include <cstdint>
#include <memory>
#include <utility>
// forward declaration
namespace inexor::vulkan_renderer::world {
class Cube;
} // namespace inexor::vulkan_renderer::world
// forward declaration
namespace inexor::vulkan_renderer::io {
class ByteStream;
} // namespace inexor::vulkan_renderer::io
namespace inexor::vulkan_renderer::io {
class NXOCParser : public OctreeParser {
private:
static constexpr std::uint32_t LATEST_VERSION{0};
/// Specific version serialization.
template <std::size_t version>
[[nodiscard]] ByteStream serialize_impl(std::shared_ptr<const world::Cube> cube);
/// Specific version deserialization.
template <std::size_t version>
[[nodiscard]] std::shared_ptr<world::Cube> deserialize_impl(const ByteStream &stream);
public:
/// Serialization of an octree.
[[nodiscard]] ByteStream serialize(std::shared_ptr<const world::Cube> cube, std::uint32_t version) final;
/// Deserialization of an octree.
[[nodiscard]] std::shared_ptr<world::Cube> deserialize(const ByteStream &stream) final;
};
} // namespace inexor::vulkan_renderer::io
| 402 |
388 | <reponame>kysko/music-synthesizer-for-android
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.levien.synthesizer.core.midi;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* MessageOutputProcessor listens for midi events and writes calls onMessage() with the binary
* form of each one.
* @see MidiListener
*/
public abstract class MessageOutputProcessor implements MidiListener {
/**
* Creates a new MessageOutputProcessor.
*/
public MessageOutputProcessor() {
buffer_ = new ByteArrayOutputStream();
}
/**
* Called for each new midi event.
* @param message - The midi message in bytes.
*/
protected abstract void onMessage(byte[] message);
/**
* Internal function called to flush the internal byte buffer to onMessage().
*/
private void notifyMessage() {
onMessage(buffer_.toByteArray());
buffer_.reset();
}
//
// The rest of these methods are just midi listener events.
//
public void onNoteOff(int channel, int note, int velocity) {
notify3(0x80 | channel, note, velocity);
}
public void onNoteOn(int channel, int note, int velocity) {
notify3(0x90 | channel, note, velocity);
}
public void onNoteAftertouch(int channel, int note, int aftertouch) {
notify3(0xA0 | channel, note, aftertouch);
}
public void onController(int channel, int control, int value) {
notify3(0xB0 | channel, control, value);
}
public void onProgramChange(int channel, int program) {
notify2(0xC0 | channel, program);
}
public void onChannelAftertouch(int channel, int aftertouch) {
notify2(0xD0 | channel, aftertouch);
}
public void onPitchBend(int channel, int value) {
notify3(0xE0 | channel, value & 0x7F, (value >> 7) & 0x7F);
}
public void onTimingClock() {
buffer_.write(0xF8);
}
public void onActiveSensing() {
buffer_.write(0xFE);
}
public void onSequenceNumber(int sequenceNumber) {
try {
buffer_.write(0xFF);
buffer_.write(0x00);
MidiUtil.writeVarInt(buffer_, 2);
MidiUtil.writeWord(buffer_, sequenceNumber);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void onText(byte[] text) {
notifyMetaBytes(0x01, text);
}
public void onCopyrightNotice(byte[] text) {
notifyMetaBytes(0x02, text);
}
public void onSequenceName(byte[] text) {
notifyMetaBytes(0x03, text);
}
public void onInstrumentName(byte[] text) {
notifyMetaBytes(0x04, text);
}
public void onLyrics(byte[] text) {
notifyMetaBytes(0x05, text);
}
public void onMarker(byte[] text) {
notifyMetaBytes(0x05, text);
}
public void onCuePoint(byte[] text) {
notifyMetaBytes(0x07, text);
}
public void onChannelPrefix(int channel) {
try {
buffer_.write(0xFF);
buffer_.write(0x20);
MidiUtil.writeVarInt(buffer_, 1);
buffer_.write(channel);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void onPort(byte[] data) {
notifyMetaBytes(0x21, data);
}
public void onEndOfTrack() {
notify3(0xFF, 0x2F, 0x00);
}
public void onSetTempo(int microsecondsPerQuarterNote) {
try {
buffer_.write(0xFF);
buffer_.write(0x51);
MidiUtil.writeVarInt(buffer_, 3);
MidiUtil.writeWord(buffer_, (microsecondsPerQuarterNote >> 8) & 0xFFFF);
buffer_.write(microsecondsPerQuarterNote & 0xFF);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void onSmpteOffset(byte[] data) {
if (data.length != 5) {
throw new RuntimeException("Invalid length for smpte offset event " + data.length + ".");
}
notifyMetaBytes(0x54, data);
}
public void onTimeSignature(int numerator, int denominator, int metronomePulse,
int thirtySecondNotesPerQuarterNote) {
try {
buffer_.write(0xFF);
buffer_.write(0x58);
MidiUtil.writeVarInt(buffer_, 4);
buffer_.write(numerator);
buffer_.write(denominator);
buffer_.write(metronomePulse);
buffer_.write(thirtySecondNotesPerQuarterNote);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void onKeySignature(int key, boolean isMinor) {
try {
buffer_.write(0xFF);
buffer_.write(0x59);
MidiUtil.writeVarInt(buffer_, 2);
buffer_.write(key);
buffer_.write(isMinor ? 1 : 0);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void onSequencerSpecificEvent(byte[] data) {
notifyMetaBytes(0x7f, data);
}
/**
* TODO(klimt): This might be wrong. Double-check it. :)
*/
public void onSysEx(byte[] data) {
try {
buffer_.write(0xF0);
MidiUtil.writeVarInt(buffer_, data.length);
buffer_.write(data);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void notify2(int b0, int b1) {
if (buf2_ == null) {
buf2_ = new byte[2];
}
buf2_[0] = (byte) b0;
buf2_[1] = (byte) b1;
onMessage(buf2_);
}
private void notify3(int b0, int b1, int b2) {
if (buf3_ == null) {
buf3_ = new byte[3];
}
buf3_[0] = (byte) b0;
buf3_[1] = (byte) b1;
buf3_[2] = (byte) b2;
onMessage(buf3_);
}
private void notifyMetaBytes(int type, byte[] data) {
try {
buffer_.write(0xFF);
buffer_.write(type);
MidiUtil.writeVarInt(buffer_, data.length);
buffer_.write(data);
notifyMessage();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// An internal byte buffer to hold intermediate output.
private ByteArrayOutputStream buffer_;
private byte[] buf2_;
private byte[] buf3_;
}
| 2,452 |
379 | <reponame>joelostblom/dash-docs
"""
Original Demo: http://js.cytoscape.org/demos/cose-layout/
Note: This implementation looks different from the original implementation,
although the input parameters are exactly the same.
"""
import requests
import json
import dash
from dash.dependencies import Input, Output
import dash_html_components as html
import dash_cytoscape as cyto
def load_json(st):
if 'http' in st:
return requests.get(st).json()
else:
with open(st, 'rb') as f:
x = json.load(f)
return x
app = dash.Dash(__name__)
server = app.server
# Load Data
elements = load_json('https://js.cytoscape.org/demos/colajs-graph/data.json')
stylesheet = load_json('https://js.cytoscape.org/demos/colajs-graph/cy-style.json')
styles = {
'container': {
'position': 'fixed',
'display': 'flex',
'flex-direction': 'column',
'height': '100%',
'width': '100%'
},
'cy-container': {
'flex': '1',
'position': 'relative'
},
'cytoscape': {
'position': 'absolute',
'width': '100%',
'height': '100%',
'z-index': 999
}
}
# App
app.layout = html.Div(style=styles['container'], children=[
html.Div([
html.Button("Responsive Toggle", id='toggle-button'),
html.Div(id='toggle-text')
]),
html.Div(className='cy-container', style=styles['cy-container'], children=[
cyto.Cytoscape(
id='cytoscape-responsive-layout',
elements=elements,
stylesheet=stylesheet,
style=styles['cytoscape'],
layout={
'name': 'cose',
'idealEdgeLength': 100,
'nodeOverlap': 20,
'refresh': 20,
'fit': True,
'padding': 30,
'randomize': False,
'componentSpacing': 100,
'nodeRepulsion': 400000,
'edgeElasticity': 100,
'nestingFactor': 5,
'gravity': 80,
'numIter': 1000,
'initialTemp': 200,
'coolingFactor': 0.95,
'minTemp': 1.0
},
responsive=True
)
])
])
@app.callback(Output('cytoscape', 'responsive'), Input('toggle-button', 'n_clicks'))
def toggle_responsive(n_clicks):
n_clicks = 2 if n_clicks is None else n_clicks
toggle_on = n_clicks % 2 == 0
return toggle_on
@app.callback(Output('toggle-text', 'children'), Input('cytoscape', 'responsive'))
def update_toggle_text(responsive):
return '\t' + 'Responsive ' + ('On' if responsive else 'Off')
if __name__ == '__main__':
app.run_server(debug=True)
| 1,295 |
708 | <reponame>Edelweiss35/deep-machine-learning<filename>dml/tool/normalize.py
from __future__ import division
import numpy as np
import scipy as sp
__all__ = [
'normalize',
'disnormalize',
'normalize_by_extant',
'featurenormal'
]
def featurenormal(X,axis=0):
'''
X is N*M
axis==0: columns
axis==1: rows
'''
mu=np.array(X).mean(not axis)
X_norm=X-mu.reshape(X.shape[0],-1)
sigma=np.std(X_norm,axis=not axis)
X_norm=X_norm/sigma.reshape(X.shape[0],-1)
return X_norm,mu,sigma
def normalize(X,Ub=0,Lb=1,order=1):
'''
normalize the data
Ub and Lb is Upper and lower bounds
order = 0 normalized by all elements
= 1 normalized by rows
= 2 normalized by columns
return :
data: normalized data
scale,divd: to recover thedata
'''
MAX=0
MIN=0
X=np.array(X)
if (order==0):
MAX,MIN=X.max(),X.min()
elif (order==1):
MAX,MIN=X.max(1),X.min(1)
else:
MAX,MIN=X.max(0),X.min(0)
scale,divd = (MIN,MAX-MIN)
if order!=0:
scale[divd==0] = 0
divd[divd==0] = MAX[divd==0]
if (order==0):
X=(X-scale)/divd*(Lb-Ub)-Ub
elif (order==1):
X=(X-scale.reshape(-1,1))/divd.reshape(-1,1)*(Lb-Ub)-Ub
else:
X=(X-scale.reshape(1,-1))/divd.reshape(1,-1)*(Lb-Ub)-Ub
return X,scale,divd
def disnormalize(X,scale,divd,Ub=0,Lb=1,order=1):
if (order==0):
X=(X+Ub)/(Lb-Ub)*divd+scale
elif (order==1):
X=(X+Ub)/(Lb-Ub)*divd.reshape(-1,1)+scale.reshape(-1,1)
else:
X=(X+Ub)/(Lb-Ub)*divd.reshape(1,-1)+scale.reshape(1,-1)
return X
def normalize_by_extant(X,scale,divd,Ub=0,Lb=1,order=1):
if (order==0):
X=(X-scale)/divd*(Lb-Ub)-Ub
elif (order==1):
X=(X-scale.reshape(-1,1))/divd.reshape(-1,1)*(Lb-Ub)-Ub
else:
X=(X-scale.reshape(1,-1))/divd.reshape(1,-1)*(Lb-Ub)-Ub
return X | 867 |
1,346 | package com.ctrip.platform.dal.daogen.entity;
import com.ctrip.platform.dal.dao.DalPojo;
import com.ctrip.platform.dal.dao.annotation.Database;
import com.ctrip.platform.dal.dao.annotation.Type;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import java.sql.Types;
@Entity
@Database(name = "dao")
@Table(name = "api_list")
public class DalApi implements Comparable<DalApi>, DalPojo {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.AUTO)
@Type(value = Types.INTEGER)
private Integer id;
@Column(name = "language")
@Type(value = Types.VARCHAR)
private String language;
@Column(name = "db_type")
@Type(value = Types.VARCHAR)
private String db_type;
@Column(name = "crud_type")
@Type(value = Types.VARCHAR)
private String crud_type;
@Column(name = "method_declaration")
@Type(value = Types.VARCHAR)
private String method_declaration;
@Column(name = "method_description")
@Type(value = Types.LONGVARCHAR)
private String method_description;
@Column(name = "sp_type")
@Type(value = Types.VARCHAR)
private String sp_type;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public String getDb_type() {
return db_type;
}
public void setDb_type(String db_type) {
this.db_type = db_type;
}
public String getCrud_type() {
return crud_type;
}
public void setCrud_type(String crud_type) {
this.crud_type = crud_type;
}
public String getMethod_declaration() {
return method_declaration;
}
public void setMethod_declaration(String method_declaration) {
this.method_declaration = method_declaration;
}
public String getMethod_description() {
return method_description;
}
public void setMethod_description(String method_description) {
this.method_description = method_description;
}
public String getSp_type() {
return sp_type;
}
public void setSp_type(String sp_type) {
this.sp_type = sp_type;
}
@Override
public int compareTo(DalApi api) {
String str1 = language + db_type + crud_type + method_declaration + method_description + sp_type;
String str2 = api.getLanguage() + api.getDb_type() + api.getCrud_type() + api.getMethod_declaration()
+ api.getMethod_description() + api.getSp_type();
return str1.compareTo(str2);
}
}
| 1,262 |
1,249 | <gh_stars>1000+
#ifndef DEBUG_HPP
#define DEBUG_HPP
#ifndef WAYFIRE_PLUGIN
#include "config.h"
#endif
#define nonull(x) ((x) ? (x) : ("nil"))
#include <wayfire/util/log.hpp>
#include <bitset>
namespace wf
{
/**
* Print the current stacktrace at runtime.
*
* @param fast_mode If fast_mode is true, the stacktrace will be generated
* using the fastest possible method. However, this means that not all
* information will be printed (for ex., line numbers may be missing).
*/
void print_trace(bool fast_mode);
}
/* ------------------------ Logging categories -------------------------------*/
namespace wf
{
namespace log
{
/**
* A list of available logging categories.
* Logging categories need to be manually enabled.
*/
enum class logging_category : size_t
{
// Transactions - general
TXN = 0,
// Transactions - view instructions
TXNV = 1,
// Transactions - instructions lifetime (pending, ready, timeout, etc.)
TXNI = 2,
// Wlroots messages
WLR = 3,
TOTAL,
};
extern std::bitset<(size_t)logging_category::TOTAL> enabled_categories;
}
}
#define LOGC(CAT, ...) \
if (wf::log::enabled_categories[(size_t)wf::log::logging_category::CAT]) \
{ \
LOGD("[", #CAT, "] ", __VA_ARGS__); \
}
/* ------------------- Miscallaneous helpers for debugging ------------------ */
#include <ostream>
#include <glm/glm.hpp>
#include <wayfire/geometry.hpp>
#include <wayfire/view.hpp>
std::ostream& operator <<(std::ostream& out, const glm::mat4& mat);
wf::pointf_t operator *(const glm::mat4& m, const wf::pointf_t& p);
wf::pointf_t operator *(const glm::mat4& m, const wf::point_t& p);
namespace wf
{
std::ostream& operator <<(std::ostream& out, wayfire_view view);
}
#endif
| 636 |
310 | <reponame>dreeves/usesthis
{
"name": "Jazz Bass",
"description": "An electric bass.",
"url": "https://en.wikipedia.org/wiki/Fender_Jazz_Bass"
} | 61 |
743 | <filename>hermes-management/src/main/java/pl/allegro/tech/hermes/management/infrastructure/query/parser/ParseException.java
package pl.allegro.tech.hermes.management.infrastructure.query.parser;
public class ParseException extends RuntimeException {
public ParseException(String message) {
super(message);
}
public ParseException(String message, Throwable cause) {
super(message, cause);
}
}
| 141 |
777 | <reponame>google-ar/chromium<filename>components/cryptauth/ble/bluetooth_low_energy_weave_packet_generator.cc
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/cryptauth/ble/bluetooth_low_energy_weave_packet_generator.h"
#ifdef OS_WIN
#include <winsock2.h>
#else
#include <netinet/in.h>
#endif
#include <string.h>
#include <algorithm>
#include "base/logging.h"
namespace cryptauth {
namespace weave {
// static.
std::shared_ptr<BluetoothLowEnergyWeavePacketGenerator::Factory>
BluetoothLowEnergyWeavePacketGenerator::Factory::factory_instance_ =
nullptr;
// static.
std::unique_ptr<BluetoothLowEnergyWeavePacketGenerator>
BluetoothLowEnergyWeavePacketGenerator::Factory::NewInstance() {
if (!factory_instance_) {
factory_instance_.reset(new Factory());
}
return factory_instance_->BuildInstance();
}
// static.
void BluetoothLowEnergyWeavePacketGenerator::Factory::SetInstanceForTesting(
std::shared_ptr<Factory> factory) {
factory_instance_ = factory;
}
std::unique_ptr<BluetoothLowEnergyWeavePacketGenerator>
BluetoothLowEnergyWeavePacketGenerator::Factory::BuildInstance() {
return std::unique_ptr<BluetoothLowEnergyWeavePacketGenerator>(
new BluetoothLowEnergyWeavePacketGenerator());
}
BluetoothLowEnergyWeavePacketGenerator::BluetoothLowEnergyWeavePacketGenerator()
: max_packet_size_(kDefaultMaxPacketSize), next_packet_counter_(0) {}
Packet BluetoothLowEnergyWeavePacketGenerator::CreateConnectionRequest() {
Packet packet(kMinConnectionRequestSize, 0);
SetPacketTypeBit(PacketType::CONTROL, &packet);
// Since it only make sense for connection request to be the 0th packet,
// resets the packet counter.
next_packet_counter_ = 1;
SetControlCommand(ControlCommand::CONNECTION_REQUEST, &packet);
SetShortField(1, kWeaveVersion, &packet);
SetShortField(3, kWeaveVersion, &packet);
SetShortField(5, kSelectMaxPacketSize, &packet);
return packet;
}
Packet BluetoothLowEnergyWeavePacketGenerator::CreateConnectionResponse() {
Packet packet(kMinConnectionResponseSize, 0);
SetPacketTypeBit(PacketType::CONTROL, &packet);
// Since it only make sense for connection response to be the 0th packet,
// resets the next packet counter.
next_packet_counter_ = 1;
SetControlCommand(ControlCommand::CONNECTION_RESPONSE, &packet);
SetShortField(1, kWeaveVersion, &packet);
SetShortField(3, max_packet_size_, &packet);
return packet;
}
Packet BluetoothLowEnergyWeavePacketGenerator::CreateConnectionClose(
ReasonForClose reason_for_close) {
Packet packet(kMaxConnectionCloseSize, 0);
SetPacketTypeBit(PacketType::CONTROL, &packet);
SetPacketCounter(&packet);
SetControlCommand(ControlCommand::CONNECTION_CLOSE, &packet);
SetShortField(1, reason_for_close, &packet);
return packet;
}
void BluetoothLowEnergyWeavePacketGenerator::SetMaxPacketSize(uint16_t size) {
DCHECK(size >= kDefaultMaxPacketSize);
max_packet_size_ = size;
}
std::vector<Packet> BluetoothLowEnergyWeavePacketGenerator::EncodeDataMessage(
std::string message) {
DCHECK(!message.empty());
// The first byte of a packet is used by the uWeave protocol,
// hence the payload is 1 byte smaller than the packet size.
uint32_t packet_payload_size = max_packet_size_ - 1;
uint32_t message_length = message.length();
// (packet_payload_size - 1) is used to enforce rounding up.
uint32_t num_packets =
(message_length + (packet_payload_size - 1)) / packet_payload_size;
std::vector<Packet> weave_message(num_packets);
const char* byte_message = message.c_str();
for (uint32_t i = 0; i < num_packets; ++i) {
Packet* packet = &weave_message[i];
uint32_t begin = packet_payload_size * i;
uint32_t end = std::min(begin + packet_payload_size, message_length);
packet->push_back(0);
SetPacketTypeBit(PacketType::DATA, packet);
SetPacketCounter(packet);
for (uint32_t j = begin; j < end; ++j) {
packet->push_back(byte_message[j]);
}
}
// Guaranteed to have at least one packet since message is not empty.
SetDataFirstBit(&weave_message[0]);
SetDataLastBit(&weave_message[num_packets - 1]);
return weave_message;
}
void BluetoothLowEnergyWeavePacketGenerator::SetShortField(uint32_t byte_offset,
uint16_t val,
Packet* packet) {
DCHECK(packet);
DCHECK_LT(byte_offset, packet->size());
DCHECK_LT(byte_offset + 1, packet->size());
uint16_t network_val = htons(val);
uint8_t* network_val_ptr = reinterpret_cast<uint8_t*>(&network_val);
packet->at(byte_offset) = network_val_ptr[0];
packet->at(byte_offset + 1) = network_val_ptr[1];
}
void BluetoothLowEnergyWeavePacketGenerator::SetPacketTypeBit(PacketType type,
Packet* packet) {
DCHECK(packet);
DCHECK(!packet->empty());
// Type bit is the highest bit of the first byte of the packet.
// So clear the highest bit and set it according to val.
packet->at(0) = (packet->at(0) & 0x7F) | (type << 7);
}
void BluetoothLowEnergyWeavePacketGenerator::SetControlCommand(
ControlCommand command,
Packet* packet) {
DCHECK(packet);
DCHECK(!packet->empty());
// Control Command is the lower 4 bits of the packet's first byte.
// So clear the lower 4 bites and set it according to val.
packet->at(0) = (packet->at(0) & 0xF0) | command;
}
void BluetoothLowEnergyWeavePacketGenerator::SetPacketCounter(Packet* packet) {
DCHECK(packet);
DCHECK(!packet->empty());
uint8_t counter = next_packet_counter_ % kMaxPacketCounter;
// Packet counter is the bits 4, 5, and 6 of the packet's first byte.
// So clear those bits and set them according to current packet counter
// modular max packet counter.
packet->at(0) = (packet->at(0) & 0x8F) | (counter << 4);
next_packet_counter_++;
}
void BluetoothLowEnergyWeavePacketGenerator::SetDataFirstBit(Packet* packet) {
DCHECK(packet);
DCHECK(!packet->empty());
// First bit is bit 3 of the packet's first byte and set it to 1.
packet->at(0) = packet->at(0) | (1 << 3);
}
void BluetoothLowEnergyWeavePacketGenerator::SetDataLastBit(Packet* packet) {
DCHECK(packet);
DCHECK(!packet->empty());
// Last bit is the bit 2 of the packet's first byte and set it to 1.
packet->at(0) = packet->at(0) | (1 << 2);
}
} // namespace weave
} // namespace cryptauth
| 2,367 |
385 | <filename>Curves, Markov and Bayes/UniversalityGainsLosses.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# http://github.com/timestocome
# references
#
# http://www2.math.uu.se/~takis/
# https://www.quantamagazine.org/in-mysterious-pattern-math-and-nature-converge-20130205/
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
###############################################################################
# read in saved file
###############################################################################
data = pd.read_csv('StockDataWithVolume.csv', index_col='Date', parse_dates=True)
n_rows = len(data)
# compute and save volatility
djia_v = data['DJIA'] / data['DJIA'].shift(1)
nasdaq_v = data['NASDAQ'] / data['NASDAQ'].shift(1)
sp_v = data['S&P'] / data['S&P'].shift(1)
russell_v = data['Russell'] / data['Russell'].shift(1)
btc_v = data['BTC'] / data['BTC'].shift(1)
# if goes up, 0 if decreases
djia = np.where(djia_v > 1, 1, 0)
nasdaq = np.where(nasdaq_v > 1, 1, 0)
sp = np.where(sp_v > 1, 1, 0)
russell = np.where(russell_v > 1, 1, 0)
btc = np.where(btc_v > 1, 1, 0)
# random 1, 0
r = np.random.randint(2, size=len(djia))
# periodic
p = np.zeros(len(djia))
for i in range(len(p)):
if i % 2 == 1:
p[i] = 1
###############################################################################
# print bar code type plots
###############################################################################
def bar_plot(s):
# https://matplotlib.org/examples/pylab_examples/barcode_demo.html
axprops = dict(xticks=[], yticks=[])
barprops = dict(aspect='auto', cmap=plt.cm.binary, interpolation='nearest')
fig = plt.figure(figsize=(16, 4))
x = s.copy()
x.shape = 1, len(x)
ax = fig.add_axes([0.3, 0.1, 0.6, 0.1], **axprops)
ax.imshow(x, **barprops)
plt.show()
###############################################################################
# random, structured, universal ?
###############################################################################
def law_of_large_numbers(s):
total = len(s)
ones = np.sum(s)
zeros = total - ones
print('Probability %.2f, %.2f ' % (ones/total, zeros/total ))
def central_limit_thm(s):
m = np.mean(s)
v = np.var(s)
std = np.std(s)
n = len(s)
sk = np.sum((s - m)**3 / n) / (std**3)
skw = (np.sqrt(n*(n-1)))/(n-2) * sk
kurt = np.sum((s - m)**4 / n) / (std**4)
e_kurt = kurt - 3
print('Mean %.2f Var %.2f Std %.2f ' % (m, v, std))
print('Skew %.2f, %.2f Kurt %.2f, %.2f' % (sk, skw, kurt, e_kurt))
##############################################################################
# print info to screen
##############################################################################
print('----------------------------------------------------------------------')
print('Up days vs down days July 16, 2010 - March 7, 2018')
print('----------------------------------------------------------------------')
print('DJIA')
law_of_large_numbers(djia)
central_limit_thm(djia)
bar_plot(djia)
print('------------------------------')
print('NASDAQ')
law_of_large_numbers(nasdaq)
central_limit_thm(nasdaq)
bar_plot(nasdaq)
print('------------------------------')
print('S&P')
law_of_large_numbers(sp)
central_limit_thm(sp)
bar_plot(sp)
print('------------------------------')
print('Russell')
law_of_large_numbers(russell)
central_limit_thm(russell)
bar_plot(russell)
print('------------------------------')
print('BTC')
law_of_large_numbers(btc)
central_limit_thm(btc)
bar_plot(btc)
print('------------------------------')
print('Random')
law_of_large_numbers(r)
central_limit_thm(r)
bar_plot(r)
print('------------------------------')
print('Periodic')
law_of_large_numbers(p)
central_limit_thm(p)
bar_plot(p)
print('------------------------------')
| 1,503 |
1,144 | package org.adempiere.ad.trx.api.impl;
import com.google.common.collect.ImmutableList;
import lombok.NonNull;
import org.adempiere.ad.trx.api.ITrx;
import org.adempiere.ad.trx.api.ITrxRunConfig;
import org.adempiere.ad.trx.api.ITrxRunConfig.OnRunnableFail;
import org.adempiere.ad.trx.api.ITrxRunConfig.OnRunnableSuccess;
import org.adempiere.ad.trx.api.ITrxRunConfig.TrxPropagation;
import org.adempiere.ad.trx.api.OnTrxMissingPolicy;
import org.adempiere.test.AdempiereTestHelper;
import org.adempiere.util.lang.Mutable;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.slf4j.MDC;
import java.util.ArrayList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class AbstractTrxManagerTest
{
@BeforeEach
void init()
{
AdempiereTestHelper.get().init();
}
@Test
public void rollback_NeverThrowsException()
{
final MockedTrxManager trxManager = new MockedTrxManager();
final MockedTrx trx = trxManager.createTrx("TestTrx", false);
trx.setOnRollbackException(new RuntimeException("test - fail on commit"));
trx.start();
trx.rollback();
}
@Test
void runInNewTrx_withException()
{
final MockedTrxManager trxManager = new MockedTrxManager();
assertThatThrownBy(
() -> trxManager.runInNewTrx(() -> {
throw new RuntimeException("something went wrong");
}))
.hasMessage("RuntimeException: something went wrong");
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx trx = (MockedTrx)removedTransactions.get(0);
assertThat(trx.isRollbackCalled()).isTrue();
assertThat(trx.isCloseCalled()).isTrue();
assertThat(trx.getCreatedSavepoints()).isEmpty();
assertThat(trx.getReleasedSavepoints()).isEmpty();
}
@Test
void runInNewTrx()
{
final MockedTrxManager trxManager = new MockedTrxManager();
trxManager.runInNewTrx(() -> {
/* nothing to do */ });
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx trx = (MockedTrx)removedTransactions.get(0);
assertThat(trx.isRollbackCalled()).isFalse();
assertThat(trx.isCommitCalled()).isTrue();
assertThat(trx.isCloseCalled()).isTrue();
assertThat(trx.getCreatedSavepoints()).isEmpty();
assertThat(trx.getReleasedSavepoints()).isEmpty();
}
@Test
void runInNewTrx_nested()
{
final Runnable innerRunnable = () -> {
/* nothing to do */ };
final MockedTrxManager trxManager = new MockedTrxManager();
final MockedTrx trx = invokedTrxManagerNested(trxManager, innerRunnable);
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx removedTrx = (MockedTrx)removedTransactions.get(0);
assertThat(removedTrx.getTrxName()).isEqualTo(trx.getTrxName());
assertThat(removedTrx.isRollbackCalled()).isFalse();
assertThat(removedTrx.isCloseCalled()).isTrue();
assertThat(removedTrx.getCreatedSavepoints()).hasSize(1);
assertThat(removedTrx.getReleasedSavepoints()).hasSize(1);
}
@Test
void runInNewTrx_nested_exception()
{
final Runnable innerRunnable = () -> {
throw new RuntimeException("something went wrong");
};
final MockedTrxManager trxManager = new MockedTrxManager();
assertThatThrownBy(() -> invokedTrxManagerNested(trxManager, innerRunnable))
.hasMessage("RuntimeException: something went wrong");
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx removedTrx = (MockedTrx)removedTransactions.get(0);
assertThat(removedTrx.isCommitCalled()).isFalse();
assertThat(removedTrx.isRollbackCalled()).isTrue();
assertThat(removedTrx.isCloseCalled()).isTrue();
assertThat(removedTrx.getCreatedSavepoints()).hasSize(1);
assertThat(removedTrx.getActiveSavepoints()).isEmpty();
}
private MockedTrx invokedTrxManagerNested(
@NonNull final MockedTrxManager trxManager,
@NonNull final Runnable innerRunnable)
{
final ITrxRunConfig outerTrxRunCfg = trxManager.newTrxRunConfigBuilder().build();
final Mutable<MockedTrx> trx = new Mutable<>();
trxManager.run(
ITrx.TRXNAME_None,
outerTrxRunCfg,
localTrxName -> {
trx.setValue((MockedTrx)trxManager.getThreadInheritedTrx(OnTrxMissingPolicy.Fail));
trxManager.run(
ITrx.TRXNAME_ThreadInherited,
innerRunnable);
});
assertThat(trxManager.getActiveTransactionsList()).isEmpty();
return trx.getValue();
}
@Test
void runInNewTrx_nested_twice_2ndWithException_trxNamePrefixNull()
{
final String trxNamePrefix = ITrx.TRXNAME_None;
perform_run_nested_twice_2ndWithException(trxNamePrefix);
}
@Test
void runInNewTrx_nested_twice_2ndWithException_trxNamePrefixNotNull()
{
final String trxNamePrefix = this.getClass().getSimpleName();
perform_run_nested_twice_2ndWithException(trxNamePrefix);
}
private void perform_run_nested_twice_2ndWithException(final String trxNamePrefix)
{
// @formatter:off
final Runnable successfulInnerRunnable = () -> { /* nothing to do */ };
final Runnable failingInnerRunnable = () -> { throw new RuntimeException("something went wrong"); };
// @formatter:on
final MockedTrxManager trxManager = new MockedTrxManager();
final ITrxRunConfig outerTrxRunCfg = trxManager.newTrxRunConfigBuilder().setOnRunnableFail(OnRunnableFail.ROLLBACK).build();
assertThat(outerTrxRunCfg.getTrxPropagation()).isEqualTo(TrxPropagation.REQUIRES_NEW); // guard
final Mutable<MockedTrx> trx = new Mutable<>();
assertThatThrownBy(
() -> trxManager.run(
trxNamePrefix,
outerTrxRunCfg,
localTrxName -> {
trx.setValue((MockedTrx)trxManager.getThreadInheritedTrx(OnTrxMissingPolicy.Fail));
trxManager.run(ITrx.TRXNAME_ThreadInherited, successfulInnerRunnable);
trxManager.run(ITrx.TRXNAME_ThreadInherited, failingInnerRunnable);
}))
.hasMessage("RuntimeException: something went wrong");
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx removedTrx = (MockedTrx)removedTransactions.get(0);
assertThat(removedTrx.isCommitCalled()).isFalse();
assertThat(removedTrx.isRollbackCalled()).isTrue();
assertThat(removedTrx.isCloseCalled()).isTrue();
assertThat(removedTrx.getCreatedSavepoints()).hasSize(2);
assertThat(removedTrx.getReleasedSavepoints()).hasSize(1);
assertThat(removedTrx.getActiveSavepoints()).isEmpty();
}
@Test
void run_withTrxNamePrefix_OnRunnableFailRollback_exception()
{
final MockedTrxManager trxManager = new MockedTrxManager();
final String trxNamePrefix = this.getClass().getSimpleName();
final ITrxRunConfig trxRunConfig = trxManager.newTrxRunConfigBuilder()
.setTrxPropagation(TrxPropagation.REQUIRES_NEW).setOnRunnableSuccess(OnRunnableSuccess.COMMIT).setOnRunnableFail(OnRunnableFail.ROLLBACK)
.build();
assertThatThrownBy(() -> trxManager.run(
trxNamePrefix,
trxRunConfig,
trxName_IGNORED -> {
throw new RuntimeException("something went wrong");
}))
.hasMessage("RuntimeException: something went wrong");
final List<ITrx> removedTransactions = trxManager.getRemovedTransactions();
assertThat(removedTransactions).hasSize(1);
final MockedTrx removedTrx = (MockedTrx)removedTransactions.get(0);
assertThat(removedTrx.isCloseCalled()).isTrue();
assertThat(removedTrx.isCommitCalled()).isFalse();
assertThat(removedTrx.isRollbackCalled()).isTrue();
assertThat(removedTrx.getCreatedSavepoints()).isEmpty();
assertThat(removedTrx.getReleasedSavepoints()).isEmpty();
assertThat(removedTrx.getActiveSavepoints()).isEmpty();
}
@Test
void getPropertyAndProcessAfterCommit()
{
final PlainTrxManager trxManager = new PlainTrxManager();
final PlainTrx trx = trxManager.createTrx("TestTrx", false);
final List<String> expectedValues = ImmutableList.of("value1", "value2", "value3");
for (final String value : expectedValues)
{
final ValuesCollector collector = trx.getPropertyAndProcessAfterCommit(
"propertyName",
ValuesCollector::new,
ValuesCollector::markProcessed);
collector.collect(value);
}
//
// Check collector before processing
final ValuesCollector collector = trx.getProperty("propertyName");
{
assertThat(collector.getCollectedValues()).isEqualTo(expectedValues);
assertThat(collector.isProcessed()).isFalse();
}
//
// Simulate after commit
trx.getTrxListenerManager().fireAfterCommit(trx);
//
// Check collector after processing
{
assertThat(collector.getCollectedValues()).isEqualTo(expectedValues);
assertThat(collector.isProcessed()).isTrue();
}
}
private static class ValuesCollector
{
private final List<String> values = new ArrayList<>();
private boolean processed;
public void collect(final String value)
{
values.add(value);
}
public List<String> getCollectedValues()
{
return new ArrayList<>(values);
}
public void markProcessed()
{
this.processed = true;
}
public boolean isProcessed()
{
return processed;
}
}
@Nested
public class MDC_TrxName
{
@Test
public void setThreadInheritedTrxName()
{
final MockedTrxManager trxManager = new MockedTrxManager();
assertThat(trxManager.getThreadInheritedTrxName()).isNull();
assertThat(MDC.get("TrxName")).isNull();
trxManager.setThreadInheritedTrxName("trx1");
assertThat(trxManager.getThreadInheritedTrxName()).isEqualTo("trx1");
assertThat(MDC.get("TrxName")).isEqualTo("trx1");
trxManager.setThreadInheritedTrxName("trx2");
assertThat(trxManager.getThreadInheritedTrxName()).isEqualTo("trx2");
assertThat(MDC.get("TrxName")).isEqualTo("trx2");
trxManager.setThreadInheritedTrxName(null);
assertThat(trxManager.getThreadInheritedTrxName()).isNull();
assertThat(MDC.get("TrxName")).isNull();
}
@Test
public void runInNewTrx()
{
final MockedTrxManager trxManager = new MockedTrxManager();
assertThat(MDC.get("TrxName")).isNull();
trxManager.runInNewTrx(localTrxName -> assertThat(MDC.get("TrxName")).isEqualTo(localTrxName));
assertThat(MDC.get("TrxName")).isNull();
}
}
}
| 3,960 |
576 | # Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class ProductItem(
AbstractCrudObject,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isProductItem = True
super(ProductItem, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
additional_image_cdn_urls = 'additional_image_cdn_urls'
additional_image_urls = 'additional_image_urls'
additional_variant_attributes = 'additional_variant_attributes'
age_group = 'age_group'
applinks = 'applinks'
ar_data = 'ar_data'
availability = 'availability'
brand = 'brand'
capability_to_review_status = 'capability_to_review_status'
category = 'category'
category_specific_fields = 'category_specific_fields'
color = 'color'
commerce_insights = 'commerce_insights'
condition = 'condition'
currency = 'currency'
custom_data = 'custom_data'
custom_label_0 = 'custom_label_0'
custom_label_1 = 'custom_label_1'
custom_label_2 = 'custom_label_2'
custom_label_3 = 'custom_label_3'
custom_label_4 = 'custom_label_4'
description = 'description'
expiration_date = 'expiration_date'
fb_product_category = 'fb_product_category'
gender = 'gender'
gtin = 'gtin'
id = 'id'
image_cdn_urls = 'image_cdn_urls'
image_fetch_status = 'image_fetch_status'
image_url = 'image_url'
images = 'images'
importer_address = 'importer_address'
importer_name = 'importer_name'
invalidation_errors = 'invalidation_errors'
inventory = 'inventory'
manufacturer_info = 'manufacturer_info'
manufacturer_part_number = 'manufacturer_part_number'
marked_for_product_launch = 'marked_for_product_launch'
material = 'material'
mobile_link = 'mobile_link'
name = 'name'
ordering_index = 'ordering_index'
origin_country = 'origin_country'
parent_product_id = 'parent_product_id'
pattern = 'pattern'
price = 'price'
product_catalog = 'product_catalog'
product_feed = 'product_feed'
product_group = 'product_group'
product_type = 'product_type'
quantity_to_sell_on_facebook = 'quantity_to_sell_on_facebook'
retailer_id = 'retailer_id'
retailer_product_group_id = 'retailer_product_group_id'
review_rejection_reasons = 'review_rejection_reasons'
review_status = 'review_status'
sale_price = 'sale_price'
sale_price_end_date = 'sale_price_end_date'
sale_price_start_date = 'sale_price_start_date'
shipping_weight_unit = 'shipping_weight_unit'
shipping_weight_value = 'shipping_weight_value'
short_description = 'short_description'
size = 'size'
start_date = 'start_date'
url = 'url'
visibility = 'visibility'
additional_uploaded_image_ids = 'additional_uploaded_image_ids'
android_app_name = 'android_app_name'
android_class = 'android_class'
android_package = 'android_package'
android_url = 'android_url'
checkout_url = 'checkout_url'
commerce_tax_category = 'commerce_tax_category'
ios_app_name = 'ios_app_name'
ios_app_store_id = 'ios_app_store_id'
ios_url = 'ios_url'
ipad_app_name = 'ipad_app_name'
ipad_app_store_id = 'ipad_app_store_id'
ipad_url = 'ipad_url'
iphone_app_name = 'iphone_app_name'
iphone_app_store_id = 'iphone_app_store_id'
iphone_url = 'iphone_url'
launch_date = 'launch_date'
offer_price_amount = 'offer_price_amount'
offer_price_end_date = 'offer_price_end_date'
offer_price_start_date = 'offer_price_start_date'
return_policy_days = 'return_policy_days'
windows_phone_app_id = 'windows_phone_app_id'
windows_phone_app_name = 'windows_phone_app_name'
windows_phone_url = 'windows_phone_url'
class AgeGroup:
adult = 'adult'
all_ages = 'all ages'
infant = 'infant'
kids = 'kids'
newborn = 'newborn'
teen = 'teen'
toddler = 'toddler'
class Availability:
available_for_order = 'available for order'
discontinued = 'discontinued'
in_stock = 'in stock'
out_of_stock = 'out of stock'
pending = 'pending'
preorder = 'preorder'
class Condition:
cpo = 'cpo'
new = 'new'
open_box_new = 'open_box_new'
refurbished = 'refurbished'
used = 'used'
used_fair = 'used_fair'
used_good = 'used_good'
used_like_new = 'used_like_new'
class Gender:
female = 'female'
male = 'male'
unisex = 'unisex'
class ImageFetchStatus:
direct_upload = 'DIRECT_UPLOAD'
fetched = 'FETCHED'
fetch_failed = 'FETCH_FAILED'
no_status = 'NO_STATUS'
outdated = 'OUTDATED'
partial_fetch = 'PARTIAL_FETCH'
class ReviewStatus:
approved = 'approved'
outdated = 'outdated'
pending = 'pending'
rejected = 'rejected'
class ShippingWeightUnit:
value_g = 'g'
kg = 'kg'
lb = 'lb'
oz = 'oz'
class Visibility:
published = 'published'
staging = 'staging'
class CommerceTaxCategory:
fb_animal = 'FB_ANIMAL'
fb_animal_supp = 'FB_ANIMAL_SUPP'
fb_aprl = 'FB_APRL'
fb_aprl_accessories = 'FB_APRL_ACCESSORIES'
fb_aprl_athl_unif = 'FB_APRL_ATHL_UNIF'
fb_aprl_cases = 'FB_APRL_CASES'
fb_aprl_clothing = 'FB_APRL_CLOTHING'
fb_aprl_costume = 'FB_APRL_COSTUME'
fb_aprl_cstm = 'FB_APRL_CSTM'
fb_aprl_formal = 'FB_APRL_FORMAL'
fb_aprl_handbag = 'FB_APRL_HANDBAG'
fb_aprl_jewelry = 'FB_APRL_JEWELRY'
fb_aprl_shoe = 'FB_APRL_SHOE'
fb_aprl_shoe_acc = 'FB_APRL_SHOE_ACC'
fb_aprl_swim = 'FB_APRL_SWIM'
fb_aprl_swim_chil = 'FB_APRL_SWIM_CHIL'
fb_aprl_swim_cvr = 'FB_APRL_SWIM_CVR'
fb_arts = 'FB_ARTS'
fb_arts_hobby = 'FB_ARTS_HOBBY'
fb_arts_party = 'FB_ARTS_PARTY'
fb_arts_party_gift_card = 'FB_ARTS_PARTY_GIFT_CARD'
fb_arts_ticket = 'FB_ARTS_TICKET'
fb_baby = 'FB_BABY'
fb_baby_bath = 'FB_BABY_BATH'
fb_baby_blanket = 'FB_BABY_BLANKET'
fb_baby_diaper = 'FB_BABY_DIAPER'
fb_baby_gift_set = 'FB_BABY_GIFT_SET'
fb_baby_health = 'FB_BABY_HEALTH'
fb_baby_nursing = 'FB_BABY_NURSING'
fb_baby_potty_trn = 'FB_BABY_POTTY_TRN'
fb_baby_safe = 'FB_BABY_SAFE'
fb_baby_toys = 'FB_BABY_TOYS'
fb_baby_transport = 'FB_BABY_TRANSPORT'
fb_baby_transport_acc = 'FB_BABY_TRANSPORT_ACC'
fb_bags = 'FB_BAGS'
fb_bags_bkpk = 'FB_BAGS_BKPK'
fb_bags_boxes = 'FB_BAGS_BOXES'
fb_bags_brfcs = 'FB_BAGS_BRFCS'
fb_bags_csmt_bag = 'FB_BAGS_CSMT_BAG'
fb_bags_dffl = 'FB_BAGS_DFFL'
fb_bags_dipr = 'FB_BAGS_DIPR'
fb_bags_fnny = 'FB_BAGS_FNNY'
fb_bags_grmt = 'FB_BAGS_GRMT'
fb_bags_lugg = 'FB_BAGS_LUGG'
fb_bags_lug_acc = 'FB_BAGS_LUG_ACC'
fb_bags_msgr = 'FB_BAGS_MSGR'
fb_bags_tote = 'FB_BAGS_TOTE'
fb_bags_trn_cas = 'FB_BAGS_TRN_CAS'
fb_bldg = 'FB_BLDG'
fb_bldg_acc = 'FB_BLDG_ACC'
fb_bldg_cnsmb = 'FB_BLDG_CNSMB'
fb_bldg_fence = 'FB_BLDG_FENCE'
fb_bldg_fuel_tnk = 'FB_BLDG_FUEL_TNK'
fb_bldg_ht_vnt = 'FB_BLDG_HT_VNT'
fb_bldg_lock = 'FB_BLDG_LOCK'
fb_bldg_matrl = 'FB_BLDG_MATRL'
fb_bldg_plmb = 'FB_BLDG_PLMB'
fb_bldg_pump = 'FB_BLDG_PUMP'
fb_bldg_pwrs = 'FB_BLDG_PWRS'
fb_bldg_str_tank = 'FB_BLDG_STR_TANK'
fb_bldg_s_eng = 'FB_BLDG_S_ENG'
fb_bldg_tl_acc = 'FB_BLDG_TL_ACC'
fb_bldg_tool = 'FB_BLDG_TOOL'
fb_busind = 'FB_BUSIND'
fb_busind_advertising = 'FB_BUSIND_ADVERTISING'
fb_busind_agriculture = 'FB_BUSIND_AGRICULTURE'
fb_busind_automation = 'FB_BUSIND_AUTOMATION'
fb_busind_heavy_mach = 'FB_BUSIND_HEAVY_MACH'
fb_busind_lab = 'FB_BUSIND_LAB'
fb_busind_medical = 'FB_BUSIND_MEDICAL'
fb_busind_retail = 'FB_BUSIND_RETAIL'
fb_busind_sanitary_ct = 'FB_BUSIND_SANITARY_CT'
fb_busind_sign = 'FB_BUSIND_SIGN'
fb_busind_storage = 'FB_BUSIND_STORAGE'
fb_busind_storage_acc = 'FB_BUSIND_STORAGE_ACC'
fb_busind_work_gear = 'FB_BUSIND_WORK_GEAR'
fb_camera_acc = 'FB_CAMERA_ACC'
fb_camera_camera = 'FB_CAMERA_CAMERA'
fb_camera_optic = 'FB_CAMERA_OPTIC'
fb_camera_optics = 'FB_CAMERA_OPTICS'
fb_camera_photo = 'FB_CAMERA_PHOTO'
fb_elec = 'FB_ELEC'
fb_elec_acc = 'FB_ELEC_ACC'
fb_elec_arcdade = 'FB_ELEC_ARCDADE'
fb_elec_audio = 'FB_ELEC_AUDIO'
fb_elec_circuit = 'FB_ELEC_CIRCUIT'
fb_elec_comm = 'FB_ELEC_COMM'
fb_elec_computer = 'FB_ELEC_COMPUTER'
fb_elec_gps_acc = 'FB_ELEC_GPS_ACC'
fb_elec_gps_nav = 'FB_ELEC_GPS_NAV'
fb_elec_gps_trk = 'FB_ELEC_GPS_TRK'
fb_elec_marine = 'FB_ELEC_MARINE'
fb_elec_network = 'FB_ELEC_NETWORK'
fb_elec_part = 'FB_ELEC_PART'
fb_elec_print = 'FB_ELEC_PRINT'
fb_elec_radar = 'FB_ELEC_RADAR'
fb_elec_speed_rdr = 'FB_ELEC_SPEED_RDR'
fb_elec_toll = 'FB_ELEC_TOLL'
fb_elec_video = 'FB_ELEC_VIDEO'
fb_elec_vid_gm_acc = 'FB_ELEC_VID_GM_ACC'
fb_elec_vid_gm_cnsl = 'FB_ELEC_VID_GM_CNSL'
fb_food = 'FB_FOOD'
fb_furn = 'FB_FURN'
fb_furn_baby = 'FB_FURN_BABY'
fb_furn_bench = 'FB_FURN_BENCH'
fb_furn_cart = 'FB_FURN_CART'
fb_furn_chair = 'FB_FURN_CHAIR'
fb_furn_chair_acc = 'FB_FURN_CHAIR_ACC'
fb_furn_divide = 'FB_FURN_DIVIDE'
fb_furn_divide_acc = 'FB_FURN_DIVIDE_ACC'
fb_furn_ent_ctr = 'FB_FURN_ENT_CTR'
fb_furn_futn = 'FB_FURN_FUTN'
fb_furn_futn_pad = 'FB_FURN_FUTN_PAD'
fb_furn_office = 'FB_FURN_OFFICE'
fb_furn_office_acc = 'FB_FURN_OFFICE_ACC'
fb_furn_otto = 'FB_FURN_OTTO'
fb_furn_outdoor = 'FB_FURN_OUTDOOR'
fb_furn_outdoor_acc = 'FB_FURN_OUTDOOR_ACC'
fb_furn_sets = 'FB_FURN_SETS'
fb_furn_shelve_acc = 'FB_FURN_SHELVE_ACC'
fb_furn_shlf = 'FB_FURN_SHLF'
fb_furn_sofa = 'FB_FURN_SOFA'
fb_furn_sofa_acc = 'FB_FURN_SOFA_ACC'
fb_furn_storage = 'FB_FURN_STORAGE'
fb_furn_tabl = 'FB_FURN_TABL'
fb_furn_tabl_acc = 'FB_FURN_TABL_ACC'
fb_generic_taxable = 'FB_GENERIC_TAXABLE'
fb_hlth = 'FB_HLTH'
fb_hlth_hlth = 'FB_HLTH_HLTH'
fb_hlth_jwl_cr = 'FB_HLTH_JWL_CR'
fb_hlth_lilp_blm = 'FB_HLTH_LILP_BLM'
fb_hlth_ltn_spf = 'FB_HLTH_LTN_SPF'
fb_hlth_prsl_cr = 'FB_HLTH_PRSL_CR'
fb_hlth_skn_cr = 'FB_HLTH_SKN_CR'
fb_hmgn = 'FB_HMGN'
fb_hmgn_bath = 'FB_HMGN_BATH'
fb_hmgn_dcor = 'FB_HMGN_DCOR'
fb_hmgn_emgy = 'FB_HMGN_EMGY'
fb_hmgn_fplc = 'FB_HMGN_FPLC'
fb_hmgn_fplc_acc = 'FB_HMGN_FPLC_ACC'
fb_hmgn_gs_sft = 'FB_HMGN_GS_SFT'
fb_hmgn_hs_acc = 'FB_HMGN_HS_ACC'
fb_hmgn_hs_app = 'FB_HMGN_HS_APP'
fb_hmgn_hs_spl = 'FB_HMGN_HS_SPL'
fb_hmgn_ktcn = 'FB_HMGN_KTCN'
fb_hmgn_lawn = 'FB_HMGN_LAWN'
fb_hmgn_lght = 'FB_HMGN_LGHT'
fb_hmgn_linn = 'FB_HMGN_LINN'
fb_hmgn_lt_acc = 'FB_HMGN_LT_ACC'
fb_hmgn_otdr = 'FB_HMGN_OTDR'
fb_hmgn_pool = 'FB_HMGN_POOL'
fb_hmgn_scty = 'FB_HMGN_SCTY'
fb_hmgn_smk_acc = 'FB_HMGN_SMK_ACC'
fb_hmgn_umbr = 'FB_HMGN_UMBR'
fb_hmgn_umbr_acc = 'FB_HMGN_UMBR_ACC'
fb_mdia = 'FB_MDIA'
fb_mdia_book = 'FB_MDIA_BOOK'
fb_mdia_dvds = 'FB_MDIA_DVDS'
fb_mdia_mag = 'FB_MDIA_MAG'
fb_mdia_manl = 'FB_MDIA_MANL'
fb_mdia_musc = 'FB_MDIA_MUSC'
fb_mdia_prj_pln = 'FB_MDIA_PRJ_PLN'
fb_mdia_sht_mus = 'FB_MDIA_SHT_MUS'
fb_offc = 'FB_OFFC'
fb_offc_bkac = 'FB_OFFC_BKAC'
fb_offc_crts = 'FB_OFFC_CRTS'
fb_offc_dskp = 'FB_OFFC_DSKP'
fb_offc_eqip = 'FB_OFFC_EQIP'
fb_offc_flng = 'FB_OFFC_FLNG'
fb_offc_gnrl = 'FB_OFFC_GNRL'
fb_offc_instm = 'FB_OFFC_INSTM'
fb_offc_lp_dsk = 'FB_OFFC_LP_DSK'
fb_offc_mats = 'FB_OFFC_MATS'
fb_offc_nm_plt = 'FB_OFFC_NM_PLT'
fb_offc_ppr_hndl = 'FB_OFFC_PPR_HNDL'
fb_offc_prsnt_spl = 'FB_OFFC_PRSNT_SPL'
fb_offc_sealr = 'FB_OFFC_SEALR'
fb_offc_ship_spl = 'FB_OFFC_SHIP_SPL'
fb_rlgn = 'FB_RLGN'
fb_rlgn_cmny = 'FB_RLGN_CMNY'
fb_rlgn_item = 'FB_RLGN_ITEM'
fb_rlgn_wedd = 'FB_RLGN_WEDD'
fb_sftwr = 'FB_SFTWR'
fb_sfwr_cmptr = 'FB_SFWR_CMPTR'
fb_sfwr_dgtl_gd = 'FB_SFWR_DGTL_GD'
fb_sfwr_game = 'FB_SFWR_GAME'
fb_shipping = 'FB_SHIPPING'
fb_spor = 'FB_SPOR'
fb_sport_athl = 'FB_SPORT_ATHL'
fb_sport_athl_clth = 'FB_SPORT_ATHL_CLTH'
fb_sport_athl_shoe = 'FB_SPORT_ATHL_SHOE'
fb_sport_athl_sprt = 'FB_SPORT_ATHL_SPRT'
fb_sport_exrcs = 'FB_SPORT_EXRCS'
fb_sport_indr_gm = 'FB_SPORT_INDR_GM'
fb_sport_otdr_gm = 'FB_SPORT_OTDR_GM'
fb_toys = 'FB_TOYS'
fb_toys_eqip = 'FB_TOYS_EQIP'
fb_toys_game = 'FB_TOYS_GAME'
fb_toys_pzzl = 'FB_TOYS_PZZL'
fb_toys_tmrs = 'FB_TOYS_TMRS'
fb_toys_toys = 'FB_TOYS_TOYS'
fb_vehi = 'FB_VEHI'
fb_vehi_part = 'FB_VEHI_PART'
class MarkedForProductLaunch:
value_default = 'default'
marked = 'marked'
not_marked = 'not_marked'
class OriginCountry:
ad = 'AD'
ae = 'AE'
af = 'AF'
ag = 'AG'
ai = 'AI'
al = 'AL'
am = 'AM'
an = 'AN'
ao = 'AO'
aq = 'AQ'
ar = 'AR'
value_as = 'AS'
at = 'AT'
au = 'AU'
aw = 'AW'
ax = 'AX'
az = 'AZ'
ba = 'BA'
bb = 'BB'
bd = 'BD'
be = 'BE'
bf = 'BF'
bg = 'BG'
bh = 'BH'
bi = 'BI'
bj = 'BJ'
bl = 'BL'
bm = 'BM'
bn = 'BN'
bo = 'BO'
bq = 'BQ'
br = 'BR'
bs = 'BS'
bt = 'BT'
bv = 'BV'
bw = 'BW'
by = 'BY'
bz = 'BZ'
ca = 'CA'
cc = 'CC'
cd = 'CD'
cf = 'CF'
cg = 'CG'
ch = 'CH'
ci = 'CI'
ck = 'CK'
cl = 'CL'
cm = 'CM'
cn = 'CN'
co = 'CO'
cr = 'CR'
cu = 'CU'
cv = 'CV'
cw = 'CW'
cx = 'CX'
cy = 'CY'
cz = 'CZ'
de = 'DE'
dj = 'DJ'
dk = 'DK'
dm = 'DM'
do = 'DO'
dz = 'DZ'
ec = 'EC'
ee = 'EE'
eg = 'EG'
eh = 'EH'
er = 'ER'
es = 'ES'
et = 'ET'
fi = 'FI'
fj = 'FJ'
fk = 'FK'
fm = 'FM'
fo = 'FO'
fr = 'FR'
ga = 'GA'
gb = 'GB'
gd = 'GD'
ge = 'GE'
gf = 'GF'
gg = 'GG'
gh = 'GH'
gi = 'GI'
gl = 'GL'
gm = 'GM'
gn = 'GN'
gp = 'GP'
gq = 'GQ'
gr = 'GR'
gs = 'GS'
gt = 'GT'
gu = 'GU'
gw = 'GW'
gy = 'GY'
hk = 'HK'
hm = 'HM'
hn = 'HN'
hr = 'HR'
ht = 'HT'
hu = 'HU'
id = 'ID'
ie = 'IE'
il = 'IL'
im = 'IM'
value_in = 'IN'
io = 'IO'
iq = 'IQ'
ir = 'IR'
value_is = 'IS'
it = 'IT'
je = 'JE'
jm = 'JM'
jo = 'JO'
jp = 'JP'
ke = 'KE'
kg = 'KG'
kh = 'KH'
ki = 'KI'
km = 'KM'
kn = 'KN'
kp = 'KP'
kr = 'KR'
kw = 'KW'
ky = 'KY'
kz = 'KZ'
la = 'LA'
lb = 'LB'
lc = 'LC'
li = 'LI'
lk = 'LK'
lr = 'LR'
ls = 'LS'
lt = 'LT'
lu = 'LU'
lv = 'LV'
ly = 'LY'
ma = 'MA'
mc = 'MC'
md = 'MD'
me = 'ME'
mf = 'MF'
mg = 'MG'
mh = 'MH'
mk = 'MK'
ml = 'ML'
mm = 'MM'
mn = 'MN'
mo = 'MO'
mp = 'MP'
mq = 'MQ'
mr = 'MR'
ms = 'MS'
mt = 'MT'
mu = 'MU'
mv = 'MV'
mw = 'MW'
mx = 'MX'
my = 'MY'
mz = 'MZ'
na = 'NA'
nc = 'NC'
ne = 'NE'
nf = 'NF'
ng = 'NG'
ni = 'NI'
nl = 'NL'
no = 'NO'
np = 'NP'
nr = 'NR'
nu = 'NU'
nz = 'NZ'
om = 'OM'
pa = 'PA'
pe = 'PE'
pf = 'PF'
pg = 'PG'
ph = 'PH'
pk = 'PK'
pl = 'PL'
pm = 'PM'
pn = 'PN'
pr = 'PR'
ps = 'PS'
pt = 'PT'
pw = 'PW'
py = 'PY'
qa = 'QA'
re = 'RE'
ro = 'RO'
rs = 'RS'
ru = 'RU'
rw = 'RW'
sa = 'SA'
sb = 'SB'
sc = 'SC'
sd = 'SD'
se = 'SE'
sg = 'SG'
sh = 'SH'
si = 'SI'
sj = 'SJ'
sk = 'SK'
sl = 'SL'
sm = 'SM'
sn = 'SN'
so = 'SO'
sr = 'SR'
ss = 'SS'
st = 'ST'
sv = 'SV'
sx = 'SX'
sy = 'SY'
sz = 'SZ'
tc = 'TC'
td = 'TD'
tf = 'TF'
tg = 'TG'
th = 'TH'
tj = 'TJ'
tk = 'TK'
tl = 'TL'
tm = 'TM'
tn = 'TN'
to = 'TO'
tr = 'TR'
tt = 'TT'
tv = 'TV'
tw = 'TW'
tz = 'TZ'
ua = 'UA'
ug = 'UG'
um = 'UM'
us = 'US'
uy = 'UY'
uz = 'UZ'
va = 'VA'
vc = 'VC'
ve = 'VE'
vg = 'VG'
vi = 'VI'
vn = 'VN'
vu = 'VU'
wf = 'WF'
ws = 'WS'
xk = 'XK'
ye = 'YE'
yt = 'YT'
za = 'ZA'
zm = 'ZM'
zw = 'ZW'
# @deprecated get_endpoint function is deprecated
@classmethod
def get_endpoint(cls):
return 'products'
# @deprecated api_create is being deprecated
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.productcatalog import ProductCatalog
return ProductCatalog(api=self._api, fbid=parent_id).create_product(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='DELETE',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'catalog_id': 'string',
'image_height': 'unsigned int',
'image_width': 'unsigned int',
'override_country': 'string',
'override_language': 'string',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=ProductItem,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'additional_image_urls': 'list<string>',
'additional_uploaded_image_ids': 'list<string>',
'additional_variant_attributes': 'map',
'android_app_name': 'string',
'android_class': 'string',
'android_package': 'string',
'android_url': 'string',
'availability': 'availability_enum',
'brand': 'string',
'category': 'string',
'category_specific_fields': 'map',
'checkout_url': 'string',
'color': 'string',
'commerce_tax_category': 'commerce_tax_category_enum',
'condition': 'condition_enum',
'currency': 'string',
'custom_data': 'map',
'custom_label_0': 'string',
'custom_label_1': 'string',
'custom_label_2': 'string',
'custom_label_3': 'string',
'custom_label_4': 'string',
'description': 'string',
'expiration_date': 'string',
'fb_product_category': 'string',
'gender': 'gender_enum',
'gtin': 'string',
'image_url': 'string',
'importer_address': 'map',
'importer_name': 'string',
'inventory': 'unsigned int',
'ios_app_name': 'string',
'ios_app_store_id': 'unsigned int',
'ios_url': 'string',
'ipad_app_name': 'string',
'ipad_app_store_id': 'unsigned int',
'ipad_url': 'string',
'iphone_app_name': 'string',
'iphone_app_store_id': 'unsigned int',
'iphone_url': 'string',
'launch_date': 'string',
'manufacturer_info': 'string',
'manufacturer_part_number': 'string',
'marked_for_product_launch': 'marked_for_product_launch_enum',
'material': 'string',
'mobile_link': 'string',
'name': 'string',
'offer_price_amount': 'unsigned int',
'offer_price_end_date': 'datetime',
'offer_price_start_date': 'datetime',
'ordering_index': 'unsigned int',
'origin_country': 'origin_country_enum',
'pattern': 'string',
'price': 'unsigned int',
'product_type': 'string',
'quantity_to_sell_on_facebook': 'unsigned int',
'retailer_id': 'string',
'return_policy_days': 'unsigned int',
'sale_price': 'unsigned int',
'sale_price_end_date': 'datetime',
'sale_price_start_date': 'datetime',
'short_description': 'string',
'size': 'string',
'start_date': 'string',
'url': 'string',
'visibility': 'visibility_enum',
'windows_phone_app_id': 'string',
'windows_phone_app_name': 'string',
'windows_phone_url': 'string',
}
enums = {
'availability_enum': ProductItem.Availability.__dict__.values(),
'commerce_tax_category_enum': ProductItem.CommerceTaxCategory.__dict__.values(),
'condition_enum': ProductItem.Condition.__dict__.values(),
'gender_enum': ProductItem.Gender.__dict__.values(),
'marked_for_product_launch_enum': ProductItem.MarkedForProductLaunch.__dict__.values(),
'origin_country_enum': ProductItem.OriginCountry.__dict__.values(),
'visibility_enum': ProductItem.Visibility.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=ProductItem,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_channels_to_integrity_status(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.catalogitemchannelstointegritystatus import CatalogItemChannelsToIntegrityStatus
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/channels_to_integrity_status',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=CatalogItemChannelsToIntegrityStatus,
api_type='EDGE',
response_parser=ObjectParser(target_class=CatalogItemChannelsToIntegrityStatus, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_product_sets(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.productset import ProductSet
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/product_sets',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=ProductSet,
api_type='EDGE',
response_parser=ObjectParser(target_class=ProductSet, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'additional_image_cdn_urls': 'list<map<string, string>>',
'additional_image_urls': 'list<string>',
'additional_variant_attributes': 'map<string, string>',
'age_group': 'AgeGroup',
'applinks': 'CatalogItemAppLinks',
'ar_data': 'ProductItemARData',
'availability': 'Availability',
'brand': 'string',
'capability_to_review_status': 'map<Object, Object>',
'category': 'string',
'category_specific_fields': 'CatalogSubVerticalList',
'color': 'string',
'commerce_insights': 'ProductItemCommerceInsights',
'condition': 'Condition',
'currency': 'string',
'custom_data': 'map<string, string>',
'custom_label_0': 'string',
'custom_label_1': 'string',
'custom_label_2': 'string',
'custom_label_3': 'string',
'custom_label_4': 'string',
'description': 'string',
'expiration_date': 'string',
'fb_product_category': 'string',
'gender': 'Gender',
'gtin': 'string',
'id': 'string',
'image_cdn_urls': 'map<string, string>',
'image_fetch_status': 'ImageFetchStatus',
'image_url': 'string',
'images': 'list<string>',
'importer_address': 'ProductItemImporterAddress',
'importer_name': 'string',
'invalidation_errors': 'list<Object>',
'inventory': 'int',
'manufacturer_info': 'string',
'manufacturer_part_number': 'string',
'marked_for_product_launch': 'string',
'material': 'string',
'mobile_link': 'string',
'name': 'string',
'ordering_index': 'int',
'origin_country': 'string',
'parent_product_id': 'string',
'pattern': 'string',
'price': 'string',
'product_catalog': 'ProductCatalog',
'product_feed': 'ProductFeed',
'product_group': 'ProductGroup',
'product_type': 'string',
'quantity_to_sell_on_facebook': 'int',
'retailer_id': 'string',
'retailer_product_group_id': 'string',
'review_rejection_reasons': 'list<string>',
'review_status': 'ReviewStatus',
'sale_price': 'string',
'sale_price_end_date': 'string',
'sale_price_start_date': 'string',
'shipping_weight_unit': 'ShippingWeightUnit',
'shipping_weight_value': 'float',
'short_description': 'string',
'size': 'string',
'start_date': 'string',
'url': 'string',
'visibility': 'Visibility',
'additional_uploaded_image_ids': 'list<string>',
'android_app_name': 'string',
'android_class': 'string',
'android_package': 'string',
'android_url': 'string',
'checkout_url': 'string',
'commerce_tax_category': 'CommerceTaxCategory',
'ios_app_name': 'string',
'ios_app_store_id': 'unsigned int',
'ios_url': 'string',
'ipad_app_name': 'string',
'ipad_app_store_id': 'unsigned int',
'ipad_url': 'string',
'iphone_app_name': 'string',
'iphone_app_store_id': 'unsigned int',
'iphone_url': 'string',
'launch_date': 'string',
'offer_price_amount': 'unsigned int',
'offer_price_end_date': 'datetime',
'offer_price_start_date': 'datetime',
'return_policy_days': 'unsigned int',
'windows_phone_app_id': 'string',
'windows_phone_app_name': 'string',
'windows_phone_url': 'string',
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['AgeGroup'] = ProductItem.AgeGroup.__dict__.values()
field_enum_info['Availability'] = ProductItem.Availability.__dict__.values()
field_enum_info['Condition'] = ProductItem.Condition.__dict__.values()
field_enum_info['Gender'] = ProductItem.Gender.__dict__.values()
field_enum_info['ImageFetchStatus'] = ProductItem.ImageFetchStatus.__dict__.values()
field_enum_info['ReviewStatus'] = ProductItem.ReviewStatus.__dict__.values()
field_enum_info['ShippingWeightUnit'] = ProductItem.ShippingWeightUnit.__dict__.values()
field_enum_info['Visibility'] = ProductItem.Visibility.__dict__.values()
field_enum_info['CommerceTaxCategory'] = ProductItem.CommerceTaxCategory.__dict__.values()
field_enum_info['MarkedForProductLaunch'] = ProductItem.MarkedForProductLaunch.__dict__.values()
field_enum_info['OriginCountry'] = ProductItem.OriginCountry.__dict__.values()
return field_enum_info
| 18,513 |
977 | <filename>.vscode/cSpell.json
// cSpell Settings
{
// Version of the setting file. Always 0.1
"version": "0.1",
// language - current active spelling language
"language": "en",
// words - list of words to be always considered correct
"words": [
"neopixel",
"microbit",
"Toolkit",
"Versioning",
"multithreading",
"simshim",
"semver",
"user's",
"initializers",
"superset",
"microcontroller",
"microcontrollers",
"bitwise",
"pxtarget"
],
// flagWords - list of words to be always considered incorrect
// This is useful for offensive words and common spelling errors.
// For example "hte" should be "the"
"flagWords": [
"hte"
],
"enabledLanguageIds": [
"markdown",
"plaintext",
"text"
]
} | 396 |
14,668 | <gh_stars>1000+
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MOJO_PUBLIC_CPP_BINDINGS_SEQUENCE_LOCAL_SYNC_EVENT_WATCHER_H_
#define MOJO_PUBLIC_CPP_BINDINGS_SEQUENCE_LOCAL_SYNC_EVENT_WATCHER_H_
#include "base/callback.h"
#include "base/component_export.h"
#include "base/memory/weak_ptr.h"
namespace mojo {
// This encapsulates a SyncEventWatcher watching an event shared by all
// |SequenceLocalSyncEventWatcher| on the same sequence. This class is NOT
// sequence-safe in general, but |SignalEvent()| is safe to call from any
// sequence.
//
// Interfaces which support sync messages use a WaitableEvent to block and
// be signaled when messages are available, but having a WaitableEvent for every
// such interface endpoint would cause the number of WaitableEvents to grow
// arbitrarily large.
//
// Some platform constraints may limit the number of WaitableEvents the bindings
// layer can wait upon concurrently, so this type is used to keep the number
// of such events fixed at a small constant value per sequence regardless of the
// number of active interface endpoints supporting sync messages on that
// sequence.
class COMPONENT_EXPORT(MOJO_CPP_BINDINGS) SequenceLocalSyncEventWatcher {
public:
explicit SequenceLocalSyncEventWatcher(
const base::RepeatingClosure& callback);
SequenceLocalSyncEventWatcher(const SequenceLocalSyncEventWatcher&) = delete;
SequenceLocalSyncEventWatcher& operator=(
const SequenceLocalSyncEventWatcher&) = delete;
~SequenceLocalSyncEventWatcher();
// Signals the shared event on behalf of this specific watcher. Safe to call
// from any sequence.
void SignalEvent();
// Resets the shared event on behalf of this specific watcher.
void ResetEvent();
// Allows this watcher to be notified during sync wait operations invoked by
// other watchers (for example, other SequenceLocalSyncEventWatchers calling
// |SyncWatch()|) on the same sequence.
void AllowWokenUpBySyncWatchOnSameSequence();
// Blocks the calling sequence until the shared event is signaled on behalf of
// this specific watcher (i.e. until someone calls |SignalEvent()| on |this|).
// Behaves similarly to SyncEventWatcher and SyncHandleWatcher, returning
// |true| when |*should_stop| is set to |true|, or |false| if some other
// (e.g. error) event interrupts the wait.
bool SyncWatch(const bool* should_stop);
private:
class Registration;
class SequenceLocalState;
friend class SequenceLocalState;
const std::unique_ptr<Registration> registration_;
const base::RepeatingClosure callback_;
bool can_wake_up_during_any_watch_ = false;
};
} // namespace mojo
#endif // MOJO_PUBLIC_CPP_BINDINGS_SEQUENCE_LOCAL_SYNC_EVENT_WATCHER_H_
| 809 |
782 | /*
* Copyright (c) 2021, <NAME>. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.feature.associate;
import boofcv.struct.feature.AssociatedIndex;
import org.ddogleg.struct.DogArray_B;
import org.ddogleg.struct.DogArray_I32;
import org.ddogleg.struct.FastAccess;
/**
* Given a list of associated features, find all the unassociated features.
*
* @author <NAME>
*/
public class FindUnassociated {
// list of indexes in source which are unassociated
DogArray_I32 unassociatedSrc = new DogArray_I32();
DogArray_I32 unassociatedDst = new DogArray_I32();
// list that indicates what was associated in the source list
DogArray_B matched = new DogArray_B();
/**
* Finds unassociated features in source
*
* @param matches List of matched features
* @param featureCount Number of source features
* @return indexes of unassociated features from source
*/
public DogArray_I32 checkSource( FastAccess<AssociatedIndex> matches, int featureCount ) {
matched.resize(featureCount);
matched.fill(false);
for (int i = 0; i < matches.size; i++) {
matched.data[matches.get(i).src] = true;
}
unassociatedSrc.reset();
for (int i = 0; i < featureCount; i++) {
if (!matched.data[i]) {
unassociatedSrc.add(i);
}
}
return unassociatedSrc;
}
/**
* Finds unassociated features in destination
*
* @param matches List of matched features
* @param featureCount Number of destination features
* @return indexes of unassociated features from destination
*/
public DogArray_I32 checkDestination( FastAccess<AssociatedIndex> matches, final int featureCount ) {
matched.resize(featureCount);
matched.fill(false);
for (int i = 0; i < matches.size; i++) {
matched.data[matches.get(i).dst] = true;
}
unassociatedDst.reset();
for (int i = 0; i < featureCount; i++) {
if (!matched.data[i]) {
unassociatedDst.add(i);
}
}
return unassociatedDst;
}
}
| 801 |
411 | <filename>intTests/test0027_crucible_llvm/test.c<gh_stars>100-1000
#include <stdlib.h>
#include <stdio.h>
uint32_t add_nums32( uint32_t x, uint32_t y ) {
return (x+y);
}
uint64_t add_nums64( uint64_t x, uint64_t y ) {
return (x+y);
}
int main() {
printf("%u + %u = %u\n", 12U, 30U, add_nums32( 12U, 30U ) );
printf("%llu + %llu = %llu\n", 12ULL, 30ULL, add_nums64( 12ULL, 30ULL ) );
return 0;
}
| 201 |
868 | /*
* Copyright 2016 <NAME>
* <p>
* All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.protocol.amqp.sasl.scram;
import java.nio.charset.Charset;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Base64;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.crypto.Mac;
import org.apache.activemq.artemis.spi.core.security.scram.ScramException;
import org.apache.activemq.artemis.spi.core.security.scram.ScramUtils;
import org.apache.activemq.artemis.spi.core.security.scram.StringPrep;
/**
* Provides building blocks for creating SCRAM authentication client
*/
@SuppressWarnings("unused")
public class ScramClientFunctionalityImpl implements ScramClientFunctionality {
private static final Pattern SERVER_FIRST_MESSAGE = Pattern.compile("r=([^,]*),s=([^,]*),i=(.*)$");
private static final Pattern SERVER_FINAL_MESSAGE = Pattern.compile("v=([^,]*)$");
private static final String GS2_HEADER = "n,,";
private static final Charset ASCII = Charset.forName("ASCII");
private final String mDigestName;
private final String mHmacName;
private final String mClientNonce;
private String mClientFirstMessageBare;
private final boolean mIsSuccessful = false;
private byte[] mSaltedPassword;
private String mAuthMessage;
private State mState = State.INITIAL;
/**
* Create new ScramClientFunctionalityImpl
* @param digestName Digest to be used
* @param hmacName HMAC to be used
*/
public ScramClientFunctionalityImpl(String digestName, String hmacName) {
this(digestName, hmacName, UUID.randomUUID().toString());
}
/**
* Create new ScramClientFunctionalityImpl
* @param digestName Digest to be used
* @param hmacName HMAC to be used
* @param clientNonce Client nonce to be used
*/
public ScramClientFunctionalityImpl(String digestName, String hmacName, String clientNonce) {
if (ScramUtils.isNullOrEmpty(digestName)) {
throw new NullPointerException("digestName cannot be null or empty");
}
if (ScramUtils.isNullOrEmpty(hmacName)) {
throw new NullPointerException("hmacName cannot be null or empty");
}
if (ScramUtils.isNullOrEmpty(clientNonce)) {
throw new NullPointerException("clientNonce cannot be null or empty");
}
mDigestName = digestName;
mHmacName = hmacName;
mClientNonce = clientNonce;
}
/**
* Prepares first client message You may want to use
* {@link StringPrep#isContainingProhibitedCharacters(String)} in order to check if the username
* contains only valid characters
* @param username Username
* @return prepared first message
* @throws ScramException if <code>username</code> contains prohibited characters
*/
@Override
public String prepareFirstMessage(String username) throws ScramException {
if (mState != State.INITIAL) {
throw new IllegalStateException("You can call this method only once");
}
try {
mClientFirstMessageBare = "n=" + StringPrep.prepAsQueryString(username) + ",r=" + mClientNonce;
mState = State.FIRST_PREPARED;
return GS2_HEADER + mClientFirstMessageBare;
} catch (StringPrep.StringPrepError e) {
mState = State.ENDED;
throw new ScramException("Username contains prohibited character");
}
}
@Override
public String prepareFinalMessage(String password, String serverFirstMessage) throws ScramException {
if (mState != State.FIRST_PREPARED) {
throw new IllegalStateException("You can call this method once only after " + "calling prepareFirstMessage()");
}
Matcher m = SERVER_FIRST_MESSAGE.matcher(serverFirstMessage);
if (!m.matches()) {
mState = State.ENDED;
return null;
}
String nonce = m.group(1);
if (!nonce.startsWith(mClientNonce)) {
mState = State.ENDED;
return null;
}
String salt = m.group(2);
String iterationCountString = m.group(3);
int iterations = Integer.parseInt(iterationCountString);
if (iterations <= 0) {
mState = State.ENDED;
return null;
}
try {
mSaltedPassword = ScramUtils.generateSaltedPassword(password, Base64.getDecoder().decode(salt), iterations,
Mac.getInstance(mHmacName));
String clientFinalMessageWithoutProof =
"c=" + Base64.getEncoder().encodeToString(GS2_HEADER.getBytes(ASCII)) + ",r=" + nonce;
mAuthMessage = mClientFirstMessageBare + "," + serverFirstMessage + "," + clientFinalMessageWithoutProof;
byte[] clientKey = ScramUtils.computeHmac(mSaltedPassword, mHmacName, "Client Key");
byte[] storedKey = MessageDigest.getInstance(mDigestName).digest(clientKey);
byte[] clientSignature = ScramUtils.computeHmac(storedKey, mHmacName, mAuthMessage);
byte[] clientProof = clientKey.clone();
for (int i = 0; i < clientProof.length; i++) {
clientProof[i] ^= clientSignature[i];
}
mState = State.FINAL_PREPARED;
return clientFinalMessageWithoutProof + ",p=" + Base64.getEncoder().encodeToString(clientProof);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
mState = State.ENDED;
throw new ScramException(e);
}
}
@Override
public void checkServerFinalMessage(String serverFinalMessage) throws ScramException {
if (mState != State.FINAL_PREPARED) {
throw new IllegalStateException("You can call this method only once after " + "calling prepareFinalMessage()");
}
Matcher m = SERVER_FINAL_MESSAGE.matcher(serverFinalMessage);
if (!m.matches()) {
mState = State.ENDED;
throw new ScramException("invalid message format");
}
byte[] serverSignature = Base64.getDecoder().decode(m.group(1));
mState = State.ENDED;
if (!Arrays.equals(serverSignature, getExpectedServerSignature())) {
throw new ScramException("Server signature missmatch");
}
}
@Override
public boolean isSuccessful() {
if (mState == State.ENDED) {
return mIsSuccessful;
} else {
throw new IllegalStateException("You cannot call this method before authentication is ended. " +
"Use isEnded() to check that");
}
}
@Override
public boolean isEnded() {
return mState == State.ENDED;
}
@Override
public State getState() {
return mState;
}
private byte[] getExpectedServerSignature() throws ScramException {
try {
byte[] serverKey = ScramUtils.computeHmac(mSaltedPassword, mHmacName, "Server Key");
return ScramUtils.computeHmac(serverKey, mHmacName, mAuthMessage);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
mState = State.ENDED;
throw new ScramException(e);
}
}
}
| 2,854 |
368 | <filename>server/src/push_server/push_app.h
//
// push_app.h
// my_push_server
//
// Created by luoning on 14-11-4.
// Copyright (c) 2014年 luoning. All rights reserved.
//
#ifndef __my_push_server__push_app__
#define __my_push_server__push_app__
#include <stdio.h>
#include "type/base_type.h"
#include "socket/epoll_io_loop.h"
class CPushApp
{
public:
CPushApp();
virtual ~CPushApp();
static CPushApp* GetInstance();
BOOL Init();
BOOL UnInit();
BOOL Start();
BOOL Stop();
CEpollIOLoop& GetIOLoop() { return m_io; }
private:
BOOL m_bInit;
CEpollIOLoop m_io;
};
#endif /* defined(__my_push_server__push_app__) */
| 305 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#pragma once
#include "delegatedstatusrequest.h"
#include "statusdelegator.h"
#include <vespa/storageframework/generic/component/component.h>
namespace storage {
namespace distributor {
class StatusReporterDelegate
: public framework::StatusReporter
{
const StatusDelegator& _delegator;
const framework::StatusReporter& _target;
framework::Component _component;
public:
StatusReporterDelegate(framework::ComponentRegister& compReg,
const StatusDelegator& delegator,
const framework::StatusReporter& target);
void registerStatusPage();
vespalib::string getReportContentType(const framework::HttpUrlPath&) const override;
bool reportStatus(std::ostream&, const framework::HttpUrlPath&) const override;
};
} // distributor
} // storage
| 308 |
432 | <filename>sys/dev/pccard/exca/excareg.h
/* $NetBSD: i82365reg.h,v 1.3 1998/12/20 17:53:28 nathanw Exp $ */
/* $FreeBSD: src/sys/dev/exca/excareg.h,v 1.5 2005/01/06 01:42:40 imp Exp $ */
/*-
* Copyright (c) 2002 <NAME>. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software may be derived from NetBSD i82365.c and other files with
* the following copyright:
*
* Copyright (c) 1997 <NAME>. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by <NAME>.
* 4. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _SYS_DEV_EXCA_EXCAREG_H
#define _SYS_DEV_EXCA_EXCAREG_H
/*
* All information is from the intel 82365sl PC Card Interface Controller
* (PCIC) data sheet, marked "preliminary". Order number 290423-002, January
* 1993.
*/
#define EXCA_IOSIZE 2
#define EXCA_REG_INDEX 0
#define EXCA_REG_DATA 1
#define EXCA_NSLOTS 4 /* 2 in 2 chips */
/*
* I/o ports
*/
#define EXCA_INDEX0 0x3e0
/*
* The PCIC allows two chips to share the same address. In order not to run
* afoul of the bsd device model, this driver will treat those chips as
* the same device.
*/
#define EXCA_CHIP0_BASE 0x00
#define EXCA_CHIP1_BASE 0x80
/* Each PCIC chip can drive two sockets */
#define EXCA_SOCKET_SIZE 0x40
#define EXCA_SOCKETA_INDEX 0x00
#define EXCA_SOCKETB_INDEX EXCA_SOCKET_SIZE
/* general setup registers */
#define EXCA_IDENT 0x00 /* RO */
#define EXCA_IDENT_IFTYPE_MASK 0xC0
#define EXCA_IDENT_IFTYPE_IO_ONLY 0x00
#define EXCA_IDENT_IFTYPE_MEM_ONLY 0x40
#define EXCA_IDENT_IFTYPE_MEM_AND_IO 0x80
#define EXCA_IDENT_IFTYPE_RESERVED 0xC0
#define EXCA_IDENT_ZERO 0x30
#define EXCA_IDENT_REV_MASK 0x0F
#define EXCA_IDENT_REV_I82365SLR0 0x02 /* step a/b */
#define EXCA_IDENT_REV_I82365SLR1 0x03 /* step c */
#define EXCA_IDENT_REV_I82365SLDF 0x04 /* step df */
#define EXCA_IDENT_REV_IBM1 0x08 /* ibm clone */
#define EXCA_IDENT_REV_IBM2 0x09 /* ibm clone */
#define EXCA_IDENT_REV_IBM_KING 0x0a /* ibm king */
#define EXCA_IF_STATUS 0x01 /* RO */
#define EXCA_IF_STATUS_GPI 0x80 /* General Purpose Input */
#define EXCA_IF_STATUS_POWERACTIVE 0x40
#define EXCA_IF_STATUS_READY 0x20 /* really READY/!BUSY */
#define EXCA_IF_STATUS_MEM_WP 0x10
#define EXCA_IF_STATUS_CARDDETECT_MASK 0x0C
#define EXCA_IF_STATUS_CARDDETECT_PRESENT 0x0C
#define EXCA_IF_STATUS_BATTERY_MASK 0x03
#define EXCA_IF_STATUS_BATTERY_DEAD1 0x00
#define EXCA_IF_STATUS_BATTERY_DEAD2 0x01
#define EXCA_IF_STATUS_BATTERY_WARNING 0x02
#define EXCA_IF_STATUS_BATTERY_GOOD 0x03
#define EXCA_PWRCTL 0x02 /* RW */
#define EXCA_PWRCTL_OE 0x80 /* output enable */
#define EXCA_PWRCTL_DISABLE_RESETDRV 0x40
#define EXCA_PWRCTL_AUTOSWITCH_ENABLE 0x20
#define EXCA_PWRCTL_PWR_ENABLE 0x10
#define EXCA_PWRCTL_VPP2_MASK 0x0C
/* XXX these are a little unclear from the data sheet */
#define EXCA_PWRCTL_VPP2_RESERVED 0x0C
#define EXCA_PWRCTL_VPP2_EN1 0x08
#define EXCA_PWRCTL_VPP2_EN0 0x04
#define EXCA_PWRCTL_VPP2_ENX 0x00
#define EXCA_PWRCTL_VPP1_MASK 0x03
/* XXX these are a little unclear from the data sheet */
#define EXCA_PWRCTL_VPP1_RESERVED 0x03
#define EXCA_PWRCTL_VPP1_EN1 0x02
#define EXCA_PWRCTL_VPP1_EN0 0x01
#define EXCA_PWRCTL_VPP1_ENX 0x00
#define EXCA_CSC 0x04 /* RW */
#define EXCA_CSC_ZERO 0xE0
#define EXCA_CSC_GPI 0x10
#define EXCA_CSC_CD 0x08 /* Card Detect Change */
#define EXCA_CSC_READY 0x04
#define EXCA_CSC_BATTWARN 0x02
#define EXCA_CSC_BATTDEAD 0x01 /* for memory cards */
#define EXCA_CSC_RI 0x01 /* for i/o cards */
#define EXCA_ADDRWIN_ENABLE 0x06 /* RW */
#define EXCA_ADDRWIN_ENABLE_IO1 0x80
#define EXCA_ADDRWIN_ENABLE_IO0 0x40
#define EXCA_ADDRWIN_ENABLE_MEMCS16 0x20 /* rtfds if you care */
#define EXCA_ADDRWIN_ENABLE_MEM4 0x10
#define EXCA_ADDRWIN_ENABLE_MEM3 0x08
#define EXCA_ADDRWIN_ENABLE_MEM2 0x04
#define EXCA_ADDRWIN_ENABLE_MEM1 0x02
#define EXCA_ADDRWIN_ENABLE_MEM0 0x01
#define EXCA_CARD_DETECT 0x16 /* RW */
#define EXCA_CARD_DETECT_RESERVED 0xC0
#define EXCA_CARD_DETECT_SW_INTR 0x20
#define EXCA_CARD_DETECT_RESUME_ENABLE 0x10
#define EXCA_CARD_DETECT_GPI_TRANSCTL 0x08
#define EXCA_CARD_DETECT_GPI_ENABLE 0x04
#define EXCA_CARD_DETECT_CFGRST_ENABLE 0x02
#define EXCA_CARD_DETECT_MEMDLY_INHIBIT 0x01
/* interrupt registers */
#define EXCA_INTR 0x03 /* RW */
#define EXCA_INTR_RI_ENABLE 0x80
#define EXCA_INTR_RESET 0x40 /* active low (zero) */
#define EXCA_INTR_CARDTYPE_MASK 0x20
#define EXCA_INTR_CARDTYPE_IO 0x20
#define EXCA_INTR_CARDTYPE_MEM 0x00
#define EXCA_INTR_ENABLE 0x10
#define EXCA_INTR_IRQ_MASK 0x0F
#define EXCA_INTR_IRQ_SHIFT 0
#define EXCA_INTR_IRQ_NONE 0x00
#define EXCA_INTR_IRQ_RESERVED1 0x01
#define EXCA_INTR_IRQ_RESERVED2 0x02
#define EXCA_INTR_IRQ3 0x03
#define EXCA_INTR_IRQ4 0x04
#define EXCA_INTR_IRQ5 0x05
#define EXCA_INTR_IRQ_RESERVED6 0x06
#define EXCA_INTR_IRQ7 0x07
#define EXCA_INTR_IRQ_RESERVED8 0x08
#define EXCA_INTR_IRQ9 0x09
#define EXCA_INTR_IRQ10 0x0A
#define EXCA_INTR_IRQ11 0x0B
#define EXCA_INTR_IRQ12 0x0C
#define EXCA_INTR_IRQ_RESERVED13 0x0D
#define EXCA_INTR_IRQ14 0x0E
#define EXCA_INTR_IRQ15 0x0F
#define EXCA_INTR_IRQ_VALIDMASK 0xDEB8 /* 1101 1110 1011 1000 */
#define EXCA_CSC_INTR 0x05 /* RW */
#define EXCA_CSC_INTR_IRQ_MASK 0xF0
#define EXCA_CSC_INTR_IRQ_SHIFT 4
#define EXCA_CSC_INTR_IRQ_NONE 0x00
#define EXCA_CSC_INTR_IRQ_RESERVED1 0x10
#define EXCA_CSC_INTR_IRQ_RESERVED2 0x20
#define EXCA_CSC_INTR_IRQ3 0x30
#define EXCA_CSC_INTR_IRQ4 0x40
#define EXCA_CSC_INTR_IRQ5 0x50
#define EXCA_CSC_INTR_IRQ_RESERVED6 0x60
#define EXCA_CSC_INTR_IRQ7 0x70
#define EXCA_CSC_INTR_IRQ_RESERVED8 0x80
#define EXCA_CSC_INTR_IRQ9 0x90
#define EXCA_CSC_INTR_IRQ10 0xA0
#define EXCA_CSC_INTR_IRQ11 0xB0
#define EXCA_CSC_INTR_IRQ12 0xC0
#define EXCA_CSC_INTR_IRQ_RESERVED13 0xD0
#define EXCA_CSC_INTR_IRQ14 0xE0
#define EXCA_CSC_INTR_IRQ15 0xF0
#define EXCA_CSC_INTR_CD_ENABLE 0x08
#define EXCA_CSC_INTR_READY_ENABLE 0x04
#define EXCA_CSC_INTR_BATTWARN_ENABLE 0x02
#define EXCA_CSC_INTR_BATTDEAD_ENABLE 0x01 /* for memory cards */
#define EXCA_CSC_INTR_RI_ENABLE 0x01 /* for I/O cards */
#define EXCA_CSC_INTR_IRQ_VALIDMASK 0xDEB8 /* 1101 1110 1011 1000 */
/* I/O registers */
#define EXCA_IO_WINS 2
#define EXCA_IOCTL 0x07 /* RW */
#define EXCA_IOCTL_IO1_WAITSTATE 0x80
#define EXCA_IOCTL_IO1_ZEROWAIT 0x40
#define EXCA_IOCTL_IO1_IOCS16SRC_MASK 0x20
#define EXCA_IOCTL_IO1_IOCS16SRC_CARD 0x20
#define EXCA_IOCTL_IO1_IOCS16SRC_DATASIZE 0x00
#define EXCA_IOCTL_IO1_DATASIZE_MASK 0x10
#define EXCA_IOCTL_IO1_DATASIZE_16BIT 0x10
#define EXCA_IOCTL_IO1_DATASIZE_8BIT 0x00
#define EXCA_IOCTL_IO0_WAITSTATE 0x08
#define EXCA_IOCTL_IO0_ZEROWAIT 0x04
#define EXCA_IOCTL_IO0_IOCS16SRC_MASK 0x02
#define EXCA_IOCTL_IO0_IOCS16SRC_CARD 0x02
#define EXCA_IOCTL_IO0_IOCS16SRC_DATASIZE 0x00
#define EXCA_IOCTL_IO0_DATASIZE_MASK 0x01
#define EXCA_IOCTL_IO0_DATASIZE_16BIT 0x01
#define EXCA_IOCTL_IO0_DATASIZE_8BIT 0x00
#define EXCA_IOADDR0_START_LSB 0x08
#define EXCA_IOADDR0_START_MSB 0x09
#define EXCA_IOADDR0_STOP_LSB 0x0A
#define EXCA_IOADDR0_STOP_MSB 0x0B
#define EXCA_IOADDR1_START_LSB 0x0C
#define EXCA_IOADDR1_START_MSB 0x0D
#define EXCA_IOADDR1_STOP_LSB 0x0E
#define EXCA_IOADDR1_STOP_MSB 0x0F
/* memory registers */
/*
* memory window addresses refer to bits A23-A12 of the ISA system memory
* address. This is a shift of 12 bits. The LSB contains A19-A12, and the
* MSB contains A23-A20, plus some other bits.
*/
#define EXCA_MEM_WINS 5
#define EXCA_MEM_SHIFT 12
#define EXCA_MEM_PAGESIZE (1<<EXCA_MEM_SHIFT)
#define EXCA_SYSMEM_ADDRX_SHIFT EXCA_MEM_SHIFT
#define EXCA_SYSMEM_ADDRX_START_MSB_DATASIZE_MASK 0x80
#define EXCA_SYSMEM_ADDRX_START_MSB_DATASIZE_16BIT 0x80
#define EXCA_SYSMEM_ADDRX_START_MSB_DATASIZE_8BIT 0x00
#define EXCA_SYSMEM_ADDRX_START_MSB_ZEROWAIT 0x40
#define EXCA_SYSMEM_ADDRX_START_MSB_SCRATCH_MASK 0x30
#define EXCA_SYSMEM_ADDRX_START_MSB_ADDR_MASK 0x0F
#define EXCA_SYSMEM_ADDRX_STOP_MSB_WAIT_MASK 0xC0
#define EXCA_SYSMEM_ADDRX_STOP_MSB_WAIT0 0x00
#define EXCA_SYSMEM_ADDRX_STOP_MSB_WAIT1 0x40
#define EXCA_SYSMEM_ADDRX_STOP_MSB_WAIT2 0x80
#define EXCA_SYSMEM_ADDRX_STOP_MSB_WAIT3 0xC0
#define EXCA_SYSMEM_ADDRX_STOP_MSB_ADDR_MASK 0x0F
/*
* The card side of a memory mapping consists of bits A19-A12 of the card
* memory address in the LSB, and A25-A20 plus some other bits in the MSB.
* Again, the shift is 12 bits.
*/
#define EXCA_CARDMEM_ADDRX_SHIFT EXCA_MEM_SHIFT
#define EXCA_CARDMEM_ADDRX_MSB_WP 0x80
#define EXCA_CARDMEM_ADDRX_MSB_REGACTIVE_MASK 0x40
#define EXCA_CARDMEM_ADDRX_MSB_REGACTIVE_ATTR 0x40
#define EXCA_CARDMEM_ADDRX_MSB_REGACTIVE_COMMON 0x00
#define EXCA_CARDMEM_ADDRX_MSB_ADDR_MASK 0x3F
#define EXCA_SYSMEM_ADDR0_START_LSB 0x10
#define EXCA_SYSMEM_ADDR0_START_MSB 0x11
#define EXCA_SYSMEM_ADDR0_STOP_LSB 0x12
#define EXCA_SYSMEM_ADDR0_STOP_MSB 0x13
#define EXCA_CARDMEM_ADDR0_LSB 0x14
#define EXCA_CARDMEM_ADDR0_MSB 0x15
/* #define EXCA_RESERVED 0x17 */
#define EXCA_SYSMEM_ADDR1_START_LSB 0x18
#define EXCA_SYSMEM_ADDR1_START_MSB 0x19
#define EXCA_SYSMEM_ADDR1_STOP_LSB 0x1A
#define EXCA_SYSMEM_ADDR1_STOP_MSB 0x1B
#define EXCA_CARDMEM_ADDR1_LSB 0x1C
#define EXCA_CARDMEM_ADDR1_MSB 0x1D
#define EXCA_SYSMEM_ADDR2_START_LSB 0x20
#define EXCA_SYSMEM_ADDR2_START_MSB 0x21
#define EXCA_SYSMEM_ADDR2_STOP_LSB 0x22
#define EXCA_SYSMEM_ADDR2_STOP_MSB 0x23
#define EXCA_CARDMEM_ADDR2_LSB 0x24
#define EXCA_CARDMEM_ADDR2_MSB 0x25
/* #define EXCA_RESERVED 0x26 */
/* #define EXCA_RESERVED 0x27 */
#define EXCA_SYSMEM_ADDR3_START_LSB 0x28
#define EXCA_SYSMEM_ADDR3_START_MSB 0x29
#define EXCA_SYSMEM_ADDR3_STOP_LSB 0x2A
#define EXCA_SYSMEM_ADDR3_STOP_MSB 0x2B
#define EXCA_CARDMEM_ADDR3_LSB 0x2C
#define EXCA_CARDMEM_ADDR3_MSB 0x2D
/* #define EXCA_RESERVED 0x2E */
/* #define EXCA_RESERVED 0x2F */
#define EXCA_SYSMEM_ADDR4_START_LSB 0x30
#define EXCA_SYSMEM_ADDR4_START_MSB 0x31
#define EXCA_SYSMEM_ADDR4_STOP_LSB 0x32
#define EXCA_SYSMEM_ADDR4_STOP_MSB 0x33
#define EXCA_CARDMEM_ADDR4_LSB 0x34
#define EXCA_CARDMEM_ADDR4_MSB 0x35
/* #define EXCA_RESERVED 0x36 */
/* #define EXCA_RESERVED 0x37 */
/* #define EXCA_RESERVED 0x38 */
/* #define EXCA_RESERVED 0x39 */
/* #define EXCA_RESERVED 0x3A */
/* #define EXCA_RESERVED 0x3B */
/* #define EXCA_RESERVED 0x3C */
/* #define EXCA_RESERVED 0x3D */
/* #define EXCA_RESERVED 0x3E */
/* #define EXCA_RESERVED 0x3F */
/* cardbus extensions - memory window page registers */
#define EXCA_MEMREG_WIN_SHIFT 24
#define EXCA_SYSMEM_ADDR0_WIN 0x40
#define EXCA_SYSMEM_ADDR1_WIN 0x41
#define EXCA_SYSMEM_ADDR2_WIN 0x42
#define EXCA_SYSMEM_ADDR3_WIN 0x43
#define EXCA_SYSMEM_ADDR4_WIN 0x44
/* vendor-specific registers */
#define EXCA_INTEL_GLOBAL_CTL 0x1E /* RW */
#define EXCA_INTEL_GLOBAL_CTL_RESERVED 0xF0
#define EXCA_INTEL_GLOBAL_CTL_IRQ14PULSE_ENABLE 0x08
#define EXCA_INTEL_GLOBAL_CTL_EXPLICIT_CSC_ACK 0x04
#define EXCA_INTEL_GLOBAL_CTL_IRQLEVEL_ENABLE 0x02
#define EXCA_INTEL_GLOBAL_CTL_POWERDOWN 0x01
#define EXCA_CIRRUS_MISC_CTL_2 0x1E
#define EXCA_CIRRUS_MISC_CTL_2_SUSPEND 0x04
#define EXCA_CIRRUS_CHIP_INFO 0x1F
#define EXCA_CIRRUS_CHIP_INFO_CHIP_ID 0xC0
#define EXCA_CIRRUS_CHIP_INFO_SLOTS 0x20
#define EXCA_CIRRUS_CHIP_INFO_REV 0x1F
#define EXCA_CIRRUS_EXTENDED_INDEX 0x2E
#define EXCA_CIRRUS_EXTENDED_DATA 0x2F
#define EXCA_CIRRUS_EXT_CONTROL_1 0x03
#define EXCA_CIRRUS_EXT_CONTROL_1_PCI_INTR_MASK 0x18
#define EXCA_VADEM_VMISC 0x3a
#define EXCA_VADEM_REV 0x40
#define EXCA_VADEM_COOKIE1 0x0E
#define EXCA_VADEM_COOKIE2 0x37
#define EXCA_RICOH_ID 0x3a
#define EXCA_RID_296 0x32
#define EXCA_RID_396 0xb2
/*
* o2 micro specific registers
*/
#define EXCA_O2MICRO_CTRL_C 0x3a
#define EXCA_O2CC_IREQ_INTC 0x80
#define EXCA_O2CC_STSCHG_INTC 0x20
/* Plug and play */
#define EXCA_PNP_ACTIONTEC 0x1802A904 /* AEI0218 */
#define EXCA_PNP_IBM3765 0x65374d24 /* IBM3765 */
#define EXCA_PNP_82365 0x000ED041 /* PNP0E00 */
#define EXCA_PNP_CL_PD6720 0x010ED041 /* PNP0E01 */
#define EXCA_PNP_VLSI_82C146 0x020ED041 /* PNP0E02 */
#define EXCA_PNP_82365_CARDBUS 0x030ED041 /* PNP0E03 */
#define EXCA_PNP_SCM_SWAPBOX 0x69046d4c /* SMC0469 */
/*
* Mask of allowable interrupts.
*
* For IBM-AT machines, irqs 3, 4, 5, 7, 9, 10, 11, 12, 14, 15 are
* allowed. Nearly all IBM-AT machines with pcic cards or bridges
* wire these interrupts (or a subset thereof) to the corresponding
* pins on the ISA bus. Some older laptops are reported to not route
* all the interrupt pins to the bus because the designers knew that
* some would conflict with builtin devices. Older versions of Windows
* NT had a special device that would probe for conflicts early in the
* boot process and formulate a mapping table. Maybe we should do
* something similar.
*
* For NEC PC-98 machines, irq 3, 5, 6, 9, 10, 11, 12, 13 are allowed.
* These correspond to the C-BUS signals INT 0, 1, 2, 3, 41, 42, 5, 6
* respectively.
*
* <NAME>SUKADA-san writes in FreeBSD98-testers that CBUS INT 2
* (mapped to IRQ 6) is routed to the IRQ 7 pin of the pcic in pc98
* cbus add-in cards. He has confirmed this routing with a visual
* inspection of his card or a VOM.
*/
#define EXCA_INT_MASK_ALLOWED 0xDEB8 /* AT */
#endif /* !_SYS_DEV_EXCA_EXCAREG_H */
| 7,885 |
1,444 | <reponame>bTest2018/AnimeTaste<filename>src/main/java/com/zhan_dui/utils/m3u8/ElementBuilder.java
package com.zhan_dui.utils.m3u8;
import java.net.URI;
/**
* @author dkuffner
*/
class ElementBuilder {
private double duration;
private URI uri;
private PlaylistInfo playlistInfo;
private EncryptionInfo encryptionInfo;
private String title;
private long programDate = -1;
private boolean discontinuity = false;
public ElementBuilder() {
}
public long programDate() {
return programDate;
}
public ElementBuilder programDate(long programDate) {
this.programDate = programDate;
return this;
}
public String getTitle() {
return title;
}
public ElementBuilder title(String title) {
this.title = title;
return this;
}
public double getDuration() {
return duration;
}
public ElementBuilder duration(double duration) {
this.duration = duration;
return this;
}
public ElementBuilder discontinuity(boolean d) {
this.discontinuity=d;
return this;
}
public URI getUri() {
return uri;
}
public ElementBuilder uri(URI uri) {
this.uri = uri;
return this;
}
public ElementBuilder playList(final int programId, final int bandWidth, final String codec) {
this.playlistInfo = new ElementImpl.PlaylistInfoImpl(programId, bandWidth, codec);
return this;
}
public ElementBuilder resetPlatListInfo() {
playlistInfo = null;
return this;
}
public ElementBuilder resetEncryptedInfo() {
encryptionInfo = null;
return this;
}
public ElementBuilder reset() {
duration = 0;
uri = null;
title = null;
programDate = -1;
discontinuity = false;
resetEncryptedInfo();
resetPlatListInfo();
return this;
}
public ElementBuilder encrypted(EncryptionInfo info) {
this.encryptionInfo = info;
return this;
}
public ElementBuilder encrypted(final URI uri, final String method) {
encryptionInfo = new ElementImpl.EncryptionInfoImpl(uri, method);
return this;
}
public Element create() {
return new ElementImpl(playlistInfo, encryptionInfo, duration, uri, title, programDate,discontinuity);
}
}
| 938 |
372 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.docs.v1.model;
/**
* A named style. Paragraphs in the document can inherit their TextStyle and ParagraphStyle from
* this named style when they have the same named style type.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Docs API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class NamedStyle extends com.google.api.client.json.GenericJson {
/**
* The type of this named style.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String namedStyleType;
/**
* The paragraph style of this named style.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ParagraphStyle paragraphStyle;
/**
* The text style of this named style.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TextStyle textStyle;
/**
* The type of this named style.
* @return value or {@code null} for none
*/
public java.lang.String getNamedStyleType() {
return namedStyleType;
}
/**
* The type of this named style.
* @param namedStyleType namedStyleType or {@code null} for none
*/
public NamedStyle setNamedStyleType(java.lang.String namedStyleType) {
this.namedStyleType = namedStyleType;
return this;
}
/**
* The paragraph style of this named style.
* @return value or {@code null} for none
*/
public ParagraphStyle getParagraphStyle() {
return paragraphStyle;
}
/**
* The paragraph style of this named style.
* @param paragraphStyle paragraphStyle or {@code null} for none
*/
public NamedStyle setParagraphStyle(ParagraphStyle paragraphStyle) {
this.paragraphStyle = paragraphStyle;
return this;
}
/**
* The text style of this named style.
* @return value or {@code null} for none
*/
public TextStyle getTextStyle() {
return textStyle;
}
/**
* The text style of this named style.
* @param textStyle textStyle or {@code null} for none
*/
public NamedStyle setTextStyle(TextStyle textStyle) {
this.textStyle = textStyle;
return this;
}
@Override
public NamedStyle set(String fieldName, Object value) {
return (NamedStyle) super.set(fieldName, value);
}
@Override
public NamedStyle clone() {
return (NamedStyle) super.clone();
}
}
| 1,037 |
2,633 |
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.ServletRequestEvent;
import javax.servlet.ServletRequestListener;
import javax.servlet.ServletRequestAttributeEvent;
import javax.servlet.ServletRequestAttributeListener;
import javax.servlet.annotation.WebServlet;
import javax.servlet.annotation.WebListener;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebListener
@WebServlet(urlPatterns = "/")
public class app extends HttpServlet implements
ServletRequestListener,
ServletRequestAttributeListener
{
private static String request_initialized = "";
private static String request_destroyed = "";
private static String attribute_added = "";
private static String attribute_removed = "";
private static String attribute_replaced = "";
@Override
public void requestInitialized(ServletRequestEvent sre)
{
HttpServletRequest r = (HttpServletRequest) sre.getServletRequest();
request_initialized = r.getRequestURI();
}
@Override
public void requestDestroyed(ServletRequestEvent sre)
{
HttpServletRequest r = (HttpServletRequest) sre.getServletRequest();
request_destroyed = r.getRequestURI();
attribute_added = "";
attribute_removed = "";
attribute_replaced = "";
}
@Override
public void attributeAdded(ServletRequestAttributeEvent event)
{
attribute_added += event.getName() + "=" + event.getValue() + ";";
}
@Override
public void attributeRemoved(ServletRequestAttributeEvent event)
{
attribute_removed += event.getName() + "=" + event.getValue() + ";";
}
@Override
public void attributeReplaced(ServletRequestAttributeEvent event)
{
attribute_replaced += event.getName() + "=" + event.getValue() + ";";
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
request.setAttribute("var", request.getParameter("var1"));
request.setAttribute("var", request.getParameter("var2"));
request.setAttribute("var", request.getParameter("var3"));
response.addHeader("X-Request-Initialized", request_initialized);
response.addHeader("X-Request-Destroyed", request_destroyed);
response.addHeader("X-Attr-Added", attribute_added);
response.addHeader("X-Attr-Removed", attribute_removed);
response.addHeader("X-Attr-Replaced", attribute_replaced);
}
}
| 918 |
4,168 | from common import get_patch_version
if __name__ == '__main__':
patch_version = get_patch_version()
print(f"release-{patch_version - 1}", end="")
| 56 |
993 | package com.netflix.servo.publish.atlas;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.netflix.servo.Metric;
import com.netflix.servo.tag.BasicTagList;
import com.netflix.servo.tag.TagList;
import org.testng.annotations.Test;
import java.io.StringWriter;
import static org.testng.Assert.assertEquals;
public class AtlasPrettyPrinterTest {
@Test
public void testPayload() throws Exception {
TagList commonTags = BasicTagList.of("nf.app", "example", "nf.cluster", "example-main", "nf.region", "us-west-3");
Metric m1 = new Metric("foo1", BasicTagList.of("id", "ab"), 1000L, 1.0);
Metric m2 = new Metric("foo2", BasicTagList.of("id", "bc", "class", "klz"), 1000L, 2.0);
Metric m3 = new Metric("foo3", BasicTagList.EMPTY, 1000L, 3.0);
Metric[] metrics = new Metric[] {m1, m2, m3};
JsonPayload update = new UpdateRequest(commonTags, metrics, metrics.length);
JsonFactory factory = new JsonFactory();
StringWriter writer = new StringWriter();
JsonGenerator generator = factory.createGenerator(writer);
generator.setPrettyPrinter(new AtlasPrettyPrinter());
update.toJson(generator);
generator.close();
writer.close();
String expected = "{\n\"tags\":{\"nf.app\":\"example\",\"nf.cluster\":\"example-main\",\"nf.region\":\"us-west-3\"},\n\"metrics\":[\n" +
"{\"tags\":{\"name\":\"foo1\",\"id\":\"ab\"},\"start\":1000,\"value\":1.0},\n" +
"{\"tags\":{\"name\":\"foo2\",\"class\":\"klz\",\"id\":\"bc\"},\"start\":1000,\"value\":2.0},\n" +
"{\"tags\":{\"name\":\"foo3\"},\"start\":1000,\"value\":3.0}]\n" +
"}";
assertEquals(writer.toString(), expected);
}
}
| 666 |
1,959 | <reponame>cherry003/xamarin-android
package com.xamarin.android.test.binding.resolveimport;
public class Lib3
{
public String field1;
public void foo () {}
}
| 57 |
353 | #include <assert.h>
#include <fcntl.h>
#include <stdlib.h>
#include <string.h>
#include "../md4.h"
#include "../extern.h"
int
main(int argc, char *argv[])
{
int fd;
struct opts opts;
size_t sz;
struct flist *fl;
memset(&opts, 0, sizeof(struct opts));
assert(2 == argc);
fd = open(argv[1], O_NONBLOCK | O_RDONLY, 0);
assert(fd != -1);
fl = flist_recv(&opts, fd, &sz);
flist_free(fl, sz);
return EXIT_SUCCESS;
}
| 215 |
1,056 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.bugzilla.issue;
import java.util.ArrayList;
import java.util.Collection;
import org.junit.Test;
import static org.junit.Assert.*;
import org.netbeans.modules.bugzilla.issue.AttachmentHyperlinkSupport.Attachement;
/**
*
* @author <NAME>
*/
public class AttachmentHyperlinkSupportTest {
public AttachmentHyperlinkSupportTest() {
}
@Test
public void test() {
checkBoundaries("", null, null);
checkBoundaries("C", null, null);
checkBoundaries("(id=123)", null, null);
checkBoundaries("Created an attachment", null, null);
checkBoundaries("Created an attachment (id=", null, null);
checkBoundaries("Created an attachment (id=1", null, null);
checkBoundaries("Created an attachment (id=12", null, null);
checkBoundaries("Created an attachment (id=123", null, null);
checkBoundaries("Created an attachment (id=)", null, null);
checkBoundaries("Created an attachment (id=1)", "attachment (id=1)", "1");
checkBoundaries("Created an attachment (id=12)", "attachment (id=12)", "12");
checkBoundaries("Created an attachment (id=123)", "attachment (id=123)", "123");
checkBoundaries("Created an atmachment (id=123)", null, null);
checkBoundaries("Created an attachment (id=1a5)", null, null);
checkBoundaries("Created an attachment (id=123) [details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)\t[details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)\t\t[details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)\t [details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) \t[details]", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] ", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] ", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\t", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t ", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] \t", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] \n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] \n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\t\n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t \n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] \t\n", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details] \n ", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\n ", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)\nfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\n\tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\n \tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\n\t foo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\t\nfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\t\t\n\t\tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123)\t\t\n\t\tfoo\tbar", "foo\tbar", "123");
checkBoundaries("Created an attachment (id=123)\t \n \tfoo\tbar", "foo\tbar", "123");
checkBoundaries("Created an attachment (id=123)\t \n \tfoo\tbar baz", "foo\tbar baz", "123");
checkBoundaries("Created an attachment (id=123)\t \n \tfoo\tbar baz", "foo\tbar baz", "123");
checkBoundaries("Created an attachment (id=123)\t \n \tfoo bar\nbaz", "foo bar", "123");
checkBoundaries("Created an attachment (id=123) [details]\nfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\n\tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\n \tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\n\t foo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\nfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\t\n\t\tfoo", "foo", "123");
checkBoundaries("Created an attachment (id=123) [details]\t\t\n\t\tfoo\tbar", "foo\tbar", "123");
checkBoundaries("Created an attachment (id=123) [details]\t \n \tfoo\tbar", "foo\tbar", "123");
checkBoundaries("Created an attachment (id=123) [details]\t \n \tfoo\tbar baz", "foo\tbar baz", "123");
checkBoundaries("Created an attachment (id=123) [details]\t \n \tfoo\tbar baz", "foo\tbar baz", "123");
checkBoundaries("Created an attachment (id=123) [details]\t \n \tfoo bar\nbaz", "foo bar", "123");
checkBoundaries("Created an attachment (id=123)\nScreenshot", "Screenshot", "123");
checkBoundaries("Created an attachment (id=123)\n\nScreenshot", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123) [details]\nScreenshot", "Screenshot", "123");
checkBoundaries("Created an attachment (id=123) [details]\n\nScreenshot", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=92562)\n"
+ "Screenshot\n"
+ '\n'
+ "I used NetBeans without connection to internet and when I tried to generate javadoc for openide.util project, strange dialog appeared. I suspect it is warning from Kenai about inability to connect to network.\n"
+ '\n'
+ "The dialog is shown when I right-click a node. This is not the right time to display dialogs (from UI point of view) nor to check internet connectivity (from performance point of view).\n"
+ '\n'
+ "Please eliminate such checks at this time.",
"Screenshot",
"92562");
checkBoundaries("Created an attachment (id=123)", "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)", new int[] {}, null, null);
checkBoundaries("Created an attachment (id=123)", new int[] {123}, "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=123)", new int[] {123, 789}, "attachment (id=123)", "123");
checkBoundaries("Created an attachment (id=789)", new int[] {123, 789}, "attachment (id=789)", "789");
checkBoundaries("Created an attachment (id=456)", new int[] {123, 456, 789}, "attachment (id=456)", "456");
checkBoundaries("Created an attachment (id=456)", new int[] {123, 473, 789}, null, null);
checkBoundaries("Created attachment", null, null);
checkBoundaries("Created attachment (id=", null, null);
checkBoundaries("Created attachment 1", null, null);
checkBoundaries("Created attachment 12", null, null);
checkBoundaries("Created attachment 123", null, null);
checkBoundaries("Created attachment )", null, null);
checkBoundaries("Created attachment 1", new int[] {1}, "attachment 1", "1");
checkBoundaries("Created attachment 12", new int[] {12}, "attachment 12", "12");
checkBoundaries("Created attachment 123", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created atmachment 123", null, null);
checkBoundaries("Created attachment 1a5", null, null);
checkBoundaries("Created attachment 123 [details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123\t[details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123\t\t[details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123\t [details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 \t[details]", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] ", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] ", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t\t", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t ", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] \t", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] \n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] \n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t\n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t\t\n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t \n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] \t\n", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details] \n ", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\t\n ", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123\nfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\n\tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\n \tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\n\t foo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\t\nfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\t\t\n\t\tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123\t\t\n\t\tfoo\tbar", new int[] {123}, "foo\tbar", "123");
checkBoundaries("Created attachment 123\t \n \tfoo\tbar", new int[] {123}, "foo\tbar", "123");
checkBoundaries("Created attachment 123\t \n \tfoo\tbar baz", new int[] {123}, "foo\tbar baz", "123");
checkBoundaries("Created attachment 123\t \n \tfoo\tbar baz", new int[] {123}, "foo\tbar baz", "123");
checkBoundaries("Created attachment 123\t \n \tfoo bar\nbaz", new int[] {123}, "foo bar", "123");
checkBoundaries("Created attachment 123 [details]\nfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\n\tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\n \tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\n\t foo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\t\nfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\t\t\n\t\tfoo", new int[] {123}, "foo", "123");
checkBoundaries("Created attachment 123 [details]\t\t\n\t\tfoo\tbar", new int[] {123}, "foo\tbar", "123");
checkBoundaries("Created attachment 123 [details]\t \n \tfoo\tbar", new int[] {123}, "foo\tbar", "123");
checkBoundaries("Created attachment 123 [details]\t \n \tfoo\tbar baz", new int[] {123}, "foo\tbar baz", "123");
checkBoundaries("Created attachment 123 [details]\t \n \tfoo\tbar baz", new int[] {123}, "foo\tbar baz", "123");
checkBoundaries("Created attachment 123 [details]\t \n \tfoo bar\nbaz", new int[] {123}, "foo bar", "123");
checkBoundaries("Created attachment 123\nScreenshot", new int[] {123}, "Screenshot", "123");
checkBoundaries("Created attachment 123\n\nattachment", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123 [details]\nScreenshot", new int[] {123}, "Screenshot", "123");
checkBoundaries("Created attachment 123 [details]\n\nattachment", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 92562\n"
+ "Screenshot\n"
+ '\n'
+ "I used NetBeans without connection to internet and when I tried to generate javadoc for openide.util project, strange dialog appeared. I suspect it is warning from Kenai about inability to connect to network.\n"
+ '\n'
+ "The dialog is shown when I right-click a node. This is not the right time to display dialogs (from UI point of view) nor to check internet connectivity (from performance point of view).\n"
+ '\n'
+ "Please eliminate such checks at this time.",
new int[] {92562},
"Screenshot",
"92562");
checkBoundaries("Created attachment 123", new int[] {}, null, null);
checkBoundaries("Created attachment 123", new int[] {123}, "attachment 123", "123");
checkBoundaries("Created attachment 123", new int[] {123, 789}, "attachment 123", "123");
checkBoundaries("Created attachment 789", new int[] {123, 789}, "attachment 789", "789");
checkBoundaries("Created attachment 456", new int[] {123, 456, 789}, "attachment 456", "456");
checkBoundaries("Created attachment 456", new int[] {123, 473, 789}, null, null);
}
private void checkBoundaries(String stringToParse,
String expectedHyperlinkText,
String expectedId) {
checkBoundaries(stringToParse, null, expectedHyperlinkText, expectedId);
}
private void checkBoundaries(String stringToParse,
int[] knownIds,
String expectedHyperlinkText,
String expectedId) {
int[] expected;
if (expectedHyperlinkText == null) {
expected = null;
} else {
int index = stringToParse.indexOf(expectedHyperlinkText);
assert index != -1;
expected = new int[] {index, index + expectedHyperlinkText.length()};
}
Collection<String> knownIdsColl;
if (knownIds != null) {
knownIdsColl = new ArrayList<String>(knownIds.length);
for (int knownId : knownIds) {
knownIdsColl.add(Integer.toString(knownId));
}
} else {
knownIdsColl = null;
}
Attachement attachment = AttachmentHyperlinkSupport.findAttachment(stringToParse, knownIdsColl);
if (expected != null) {
assertNotNull(attachment);
assertEquals(expected[0], attachment.idx1);
assertEquals(expected[1], attachment.idx2);
assertEquals(expectedId, attachment.id);
} else {
assertNull(attachment);
}
}
} | 6,556 |
3,651 | package com.orientechnologies.orient.core.sql.executor;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.sql.parser.OMatchPathItem;
import com.orientechnologies.orient.core.sql.parser.ORid;
import com.orientechnologies.orient.core.sql.parser.OWhereClause;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** Created by luigidellaquila on 15/10/16. */
public class MatchReverseEdgeTraverser extends MatchEdgeTraverser {
private final String startingPointAlias;
private final String endPointAlias;
public MatchReverseEdgeTraverser(OResult lastUpstreamRecord, EdgeTraversal edge) {
super(lastUpstreamRecord, edge);
this.startingPointAlias = edge.edge.in.alias;
this.endPointAlias = edge.edge.out.alias;
}
protected String targetClassName(OMatchPathItem item, OCommandContext iCommandContext) {
return edge.getLeftClass();
}
protected String targetClusterName(OMatchPathItem item, OCommandContext iCommandContext) {
return edge.getLeftCluster();
}
protected ORid targetRid(OMatchPathItem item, OCommandContext iCommandContext) {
return edge.getLeftRid();
}
protected OWhereClause getTargetFilter(OMatchPathItem item) {
return edge.getLeftFilter();
}
@Override
protected Iterable<OResultInternal> traversePatternEdge(
OIdentifiable startingPoint, OCommandContext iCommandContext) {
Object qR = this.item.getMethod().executeReverse(startingPoint, iCommandContext);
if (qR == null) {
return Collections.emptyList();
}
if (qR instanceof OResultInternal) {
return Collections.singleton((OResultInternal) qR);
}
if (qR instanceof OIdentifiable) {
return Collections.singleton(new OResultInternal((OIdentifiable) qR));
}
if (qR instanceof Iterable) {
Iterable iterable = (Iterable) qR;
List<OResultInternal> result = new ArrayList<>();
for (Object o : iterable) {
if (o instanceof OIdentifiable) {
result.add(new OResultInternal((OIdentifiable) o));
} else if (o instanceof OResultInternal) {
result.add((OResultInternal) o);
} else if (o == null) {
continue;
} else {
throw new UnsupportedOperationException();
}
}
return result;
}
return Collections.EMPTY_LIST;
}
@Override
protected String getStartingPointAlias() {
return this.startingPointAlias;
}
@Override
protected String getEndpointAlias() {
return endPointAlias;
}
}
| 917 |
885 | <filename>interactive/tsconfig.json
{
"extends": "./.nuxt/tsconfig.json",
"compilerOptions": {
"strict": true,
"strictNullChecks": true,
"types": [
"vite/client"
]
}
}
| 90 |
585 | #include "Colour.h"
#include "../Interop/Interop.hpp"
#include "Gfx.h"
#include <cassert>
using namespace OpenLoco::Interop;
namespace OpenLoco::Colour
{
loco_global<uint8_t[32][8], 0x01136BA0> _colour_map_a;
loco_global<uint8_t[32][8], 0x01136C98> _colour_map_b;
void initColourMap()
{
// TODO: create a list of tuples with colour and image id
for (uint32_t i = 0; i < 31; i++)
{
assert(i + 2170 < 2201);
auto image = Gfx::getG1Element(2170 + i);
_colour_map_a[i][0] = image->offset[9];
_colour_map_a[i][1] = image->offset[246];
_colour_map_a[i][2] = image->offset[247];
_colour_map_a[i][3] = image->offset[248];
_colour_map_a[i][4] = image->offset[249];
_colour_map_a[i][5] = image->offset[250];
_colour_map_a[i][6] = image->offset[251];
_colour_map_a[i][7] = image->offset[252];
_colour_map_b[i][8 - 8] = image->offset[253];
_colour_map_b[i][9 - 8] = image->offset[254];
_colour_map_b[i][10 - 8] = image->offset[255];
_colour_map_b[i][11 - 8] = image->offset[256];
}
}
uint8_t getShade(Colour_t colour, uint8_t shade)
{
colour &= ~Colour::inset_flag;
assert(colour <= 31);
if (shade < 8)
{
return _colour_map_a[colour][shade];
}
return _colour_map_b[colour][shade - 8];
}
}
| 780 |
9,136 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vector.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='vector.proto',
package='robotics.messages',
syntax='proto3',
serialized_pb=_b(
'\n\x0cvector.proto\x12\x11robotics.messages\"6\n\x08Vector4d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\x12\t\n\x01w\x18\x04 \x01(\x01\"6\n\x08Vector4f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\x12\t\n\x01w\x18\x04 \x01(\x02\"+\n\x08Vector3d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\"+\n\x08Vector3f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\" \n\x08Vector2d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\" \n\x08Vector2f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\"\x1b\n\x07Vectord\x12\x10\n\x04\x64\x61ta\x18\x01 \x03(\x01\x42\x02\x10\x01\"\x1b\n\x07Vectorf\x12\x10\n\x04\x64\x61ta\x18\x01 \x03(\x02\x42\x02\x10\x01\x62\x06proto3'
))
_VECTOR4D = _descriptor.Descriptor(
name='Vector4d',
full_name='robotics.messages.Vector4d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector4d.x',
index=0,
number=1,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector4d.y',
index=1,
number=2,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='z',
full_name='robotics.messages.Vector4d.z',
index=2,
number=3,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='w',
full_name='robotics.messages.Vector4d.w',
index=3,
number=4,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=35,
serialized_end=89,
)
_VECTOR4F = _descriptor.Descriptor(
name='Vector4f',
full_name='robotics.messages.Vector4f',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector4f.x',
index=0,
number=1,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector4f.y',
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='z',
full_name='robotics.messages.Vector4f.z',
index=2,
number=3,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='w',
full_name='robotics.messages.Vector4f.w',
index=3,
number=4,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=91,
serialized_end=145,
)
_VECTOR3D = _descriptor.Descriptor(
name='Vector3d',
full_name='robotics.messages.Vector3d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector3d.x',
index=0,
number=1,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector3d.y',
index=1,
number=2,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='z',
full_name='robotics.messages.Vector3d.z',
index=2,
number=3,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=147,
serialized_end=190,
)
_VECTOR3F = _descriptor.Descriptor(
name='Vector3f',
full_name='robotics.messages.Vector3f',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector3f.x',
index=0,
number=1,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector3f.y',
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='z',
full_name='robotics.messages.Vector3f.z',
index=2,
number=3,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=192,
serialized_end=235,
)
_VECTOR2D = _descriptor.Descriptor(
name='Vector2d',
full_name='robotics.messages.Vector2d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector2d.x',
index=0,
number=1,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector2d.y',
index=1,
number=2,
type=1,
cpp_type=5,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=237,
serialized_end=269,
)
_VECTOR2F = _descriptor.Descriptor(
name='Vector2f',
full_name='robotics.messages.Vector2f',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='x',
full_name='robotics.messages.Vector2f.x',
index=0,
number=1,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
_descriptor.FieldDescriptor(name='y',
full_name='robotics.messages.Vector2f.y',
index=1,
number=2,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=271,
serialized_end=303,
)
_VECTORD = _descriptor.Descriptor(
name='Vectord',
full_name='robotics.messages.Vectord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='data',
full_name='robotics.messages.Vectord.data',
index=0,
number=1,
type=1,
cpp_type=5,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=_descriptor._ParseOptions(
descriptor_pb2.FieldOptions(), _b('\020\001')),
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=305,
serialized_end=332,
)
_VECTORF = _descriptor.Descriptor(
name='Vectorf',
full_name='robotics.messages.Vectorf',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(name='data',
full_name='robotics.messages.Vectorf.data',
index=0,
number=1,
type=2,
cpp_type=6,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=_descriptor._ParseOptions(
descriptor_pb2.FieldOptions(), _b('\020\001')),
file=DESCRIPTOR),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[],
serialized_start=334,
serialized_end=361,
)
DESCRIPTOR.message_types_by_name['Vector4d'] = _VECTOR4D
DESCRIPTOR.message_types_by_name['Vector4f'] = _VECTOR4F
DESCRIPTOR.message_types_by_name['Vector3d'] = _VECTOR3D
DESCRIPTOR.message_types_by_name['Vector3f'] = _VECTOR3F
DESCRIPTOR.message_types_by_name['Vector2d'] = _VECTOR2D
DESCRIPTOR.message_types_by_name['Vector2f'] = _VECTOR2F
DESCRIPTOR.message_types_by_name['Vectord'] = _VECTORD
DESCRIPTOR.message_types_by_name['Vectorf'] = _VECTORF
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Vector4d = _reflection.GeneratedProtocolMessageType(
'Vector4d',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR4D,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector4d)
))
_sym_db.RegisterMessage(Vector4d)
Vector4f = _reflection.GeneratedProtocolMessageType(
'Vector4f',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR4F,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector4f)
))
_sym_db.RegisterMessage(Vector4f)
Vector3d = _reflection.GeneratedProtocolMessageType(
'Vector3d',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR3D,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector3d)
))
_sym_db.RegisterMessage(Vector3d)
Vector3f = _reflection.GeneratedProtocolMessageType(
'Vector3f',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR3F,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector3f)
))
_sym_db.RegisterMessage(Vector3f)
Vector2d = _reflection.GeneratedProtocolMessageType(
'Vector2d',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR2D,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector2d)
))
_sym_db.RegisterMessage(Vector2d)
Vector2f = _reflection.GeneratedProtocolMessageType(
'Vector2f',
(_message.Message,),
dict(DESCRIPTOR=_VECTOR2F,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vector2f)
))
_sym_db.RegisterMessage(Vector2f)
Vectord = _reflection.GeneratedProtocolMessageType(
'Vectord',
(_message.Message,),
dict(DESCRIPTOR=_VECTORD,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vectord)
))
_sym_db.RegisterMessage(Vectord)
Vectorf = _reflection.GeneratedProtocolMessageType(
'Vectorf',
(_message.Message,),
dict(DESCRIPTOR=_VECTORF,
__module__='vector_pb2'
# @@protoc_insertion_point(class_scope:robotics.messages.Vectorf)
))
_sym_db.RegisterMessage(Vectorf)
_VECTORD.fields_by_name['data'].has_options = True
_VECTORD.fields_by_name['data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(),
_b('\020\001'))
_VECTORF.fields_by_name['data'].has_options = True
_VECTORF.fields_by_name['data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(),
_b('\020\001'))
# @@protoc_insertion_point(module_scope)
| 17,119 |
453 | #include "mlfq.h"
#include <std/array_l.h>
#include <std/array_m.h>
#include <std/math.h>
#include <kernel/util/spinlock/spinlock.h>
#include <kernel/drivers/pit/pit.h>
#define MLFQ_QUEUE_COUNT 4
#define MLFQ_PRIO_HIGH_QUANTUM 10
#define MLFQ_PRIO_LOW_QUANTUM 200
#define MLFQ_BOOST_INTERVAL 1000
typedef struct mlfq_ent {
task_small_t* task;
uint32_t last_schedule_start;
uint32_t ttl_remaining;
} mlfq_ent_t;
typedef struct mlfq_queue {
uint32_t quantum;
array_l* round_robin_tasks;
spinlock_t spinlock;
} mlfq_queue_t;
static array_m* _queues = 0;
void mlfq_init(void) {
_queues = array_m_create(MLFQ_QUEUE_COUNT);
for (uint32_t i = 0; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = kcalloc(1, sizeof(mlfq_queue_t));
q->round_robin_tasks = array_l_create();
q->quantum = (int)lerp(MLFQ_PRIO_HIGH_QUANTUM, MLFQ_PRIO_LOW_QUANTUM, (i / (float)(MLFQ_QUEUE_COUNT-1)));
q->spinlock.name = "MLFQ queue spinlock";
printf("MLFQ queue %d quantum = %dms\n", i, q->quantum);
array_m_insert(_queues, q);
}
}
void mlfq_add_task_to_queue(task_small_t* task, uint32_t queue_idx) {
assert(queue_idx < MLFQ_QUEUE_COUNT, "Invalid queue provided");
// Tasks are always enqueued to the highest priority queue
//printf("adding to queue %d (size: %d)\n", queue_idx, _queues->size);
mlfq_queue_t* queue = array_m_lookup(_queues, queue_idx);
mlfq_ent_t* ent = kcalloc(1, sizeof(mlfq_ent_t));
ent->task = task;
ent->ttl_remaining = queue->quantum;
array_l_insert(queue->round_robin_tasks, ent);
//printf("MLFQ added task [%d %s] to q %d idx %d\n", task->id, task->name, queue_idx, array_l_index(queue->round_robin_tasks, ent));
}
bool mlfq_choose_task(task_small_t** out_task, uint32_t* out_quantum) {
// Start at the high-priority queues and make our way down
for (int i = 0; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = array_m_lookup(_queues, i);
for (int j = 0; j < q->round_robin_tasks->size; j++) {
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, j);
if (ent->task->blocked_info.status == RUNNABLE) {
*out_task = ent->task;
*out_quantum = ent->ttl_remaining;
ent->last_schedule_start = ms_since_boot();
//printf("MLFQ %d: [%d %s] Schedule, ttl = %d @ %dms\n", ms_since_boot(), ent->task->id, ent->task->name, ent->ttl_remaining, ent->last_schedule_start);
return true;
}
}
}
// Didn't find any runnable task
return false;
}
static bool _find_task(task_small_t* task, uint32_t* out_queue_idx, uint32_t* out_ent_idx) {
for (int i = 0; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = array_m_lookup(_queues, i);
for (int j = 0; j < q->round_robin_tasks->size; j++) {
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, j);
if (ent->task == task) {
*out_queue_idx = i;
*out_ent_idx = j;
return true;
}
}
}
return false;
}
bool mlfq_next_quantum_for_task(task_small_t* task, uint32_t* out_quantum) {
// Start at the high-priority queues and make our way down
for (int i = 0; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = array_m_lookup(_queues, i);
for (int j = 0; j < q->round_robin_tasks->size; j++) {
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, j);
if (ent->task == task) {
*out_quantum = ent->ttl_remaining;
ent->last_schedule_start = ms_since_boot();
return true;
}
}
}
// Didn't find any runnable task
return false;
}
void mlfq_delete_task(task_small_t* task) {
uint32_t queue_idx = 0;
uint32_t entry_idx = 0;
if (!_find_task(task, &queue_idx, &entry_idx)) {
printf("mlfq_delete_task failed: didn't find provided task in any queue\n");
return;
}
mlfq_queue_t* q = array_m_lookup(_queues, queue_idx);
spinlock_acquire(&q->spinlock);
printf("Removing task [%d %s] from MLFQ scheduler pool. Found in Q%d idx %d\n", task->id, task->name, queue_idx, entry_idx);
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, entry_idx);
array_l_remove(q->round_robin_tasks, entry_idx);
kfree(ent);
spinlock_release(&q->spinlock);
}
bool mlfq_priority_boost_if_necessary(void) {
if (ms_since_boot() % 1000 == 0) {
mlfq_queue_t* high_prio = array_m_lookup(_queues, 0);
spinlock_acquire(&high_prio->spinlock);
int runnable_count = 0;
int orig_high_prio_size = high_prio->round_robin_tasks->size;
for (int i = 1; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = array_m_lookup(_queues, i);
spinlock_acquire(&q->spinlock);
while (q->round_robin_tasks->size > 0) {
//printf("remove from %d (size %d)\n", i, q->round_robin_tasks->size);
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, 0);
//printf("\tMLFQ Q%d boost [%d %s]\n", i, ent->task->id, ent->task->name);
array_l_remove(q->round_robin_tasks, 0);
ent->ttl_remaining = high_prio->quantum;
if (ent->task->blocked_info.status == RUNNABLE) runnable_count++;
array_l_insert(high_prio->round_robin_tasks, ent);
}
spinlock_release(&q->spinlock);
}
//printf("MLFQ %d: Did priority-boost (high prio %d -> %d, runnable count: %d)\n", ms_since_boot(), orig_high_prio_size, high_prio->round_robin_tasks->size, runnable_count);
if (ms_since_boot() % 30000 == 0) {
mlfq_print();
}
spinlock_release(&high_prio->spinlock);
return true;
}
return false;
}
bool mlfq_prepare_for_switch_from_task(task_small_t* task) {
// Find the task within the our queues
uint32_t queue_idx = 0;
uint32_t ent_idx = 0;
if (!_find_task(task, &queue_idx, &ent_idx)) {
return false;
}
// Should the task remain in its current queue?
// Move the task to the back of its queue
// TODO(PT): Drop to a lower queue if we've exceeded our life
mlfq_queue_t* q = array_m_lookup(_queues, queue_idx);
spinlock_acquire(&q->spinlock);
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, ent_idx);
uint32_t runtime = ms_since_boot() - ent->last_schedule_start;
int32_t ttl_remaining = (int32_t)ent->ttl_remaining - runtime;
//printf("MLFQ %d (int %d): [%d %s] prepare_for_switch_from (last start %d, ttl %d, queue %d, runtime %d)\n", ms_since_boot(), interrupts_enabled(), ent->task->id, ent->task->name, ent->last_schedule_start, ent->ttl_remaining, queue_idx, runtime);
if (ttl_remaining <= 0) {
array_l_remove(q->round_robin_tasks, ent_idx);
// If we're already on the lowest queue, replenish TTL and do nothing
if (queue_idx == MLFQ_QUEUE_COUNT - 1) {
//printf("MLFQ: [%d %s] Already on lowest queue\n", ent->task->id, ent->task->name);
ent->ttl_remaining = q->quantum;
array_l_insert(q->round_robin_tasks, ent);
}
else {
// Lifetime has expired - demote to lower queue
//printf("MLFQ: [%d %s] Demoting to lower queue %d, TTL expired %d last_starat %d now %d\n", ent->task->id, ent->task->name, queue_idx + 1, ttl_remaining, ent->last_schedule_start, ms_since_boot());
mlfq_queue_t* new_queue = array_m_lookup(_queues, queue_idx + 1);
ent->ttl_remaining = new_queue->quantum;
array_l_insert(new_queue->round_robin_tasks, ent);
}
}
else {
// Keep on the same queue and decrement TTL
ent->ttl_remaining = ttl_remaining;
//printf("MLFQ: [%d %s] Decrementing TTL to %d\n", ent->task->id, ent->task->name, ttl_remaining);
}
spinlock_release(&q->spinlock);
return true;
}
void mlfq_print(void) {
printf("MLFQ %d\n", ms_since_boot());
for (int i = 0; i < MLFQ_QUEUE_COUNT; i++) {
mlfq_queue_t* q = array_m_lookup(_queues, i);
if (!q->round_robin_tasks->size) continue;
printf("\tQ%d: ", i);
for (int j = 0; j < q->round_robin_tasks->size; j++) {
mlfq_ent_t* ent = array_l_lookup(q->round_robin_tasks, j);
const char* blocked_reason = "unknown";
switch (ent->task->blocked_info.status) {
case RUNNABLE:
blocked_reason = "run";
break;
case IRQ_WAIT:
blocked_reason = "irq";
break;
case AMC_AWAIT_MESSAGE:
blocked_reason = "amc";
break;
case (IRQ_WAIT | AMC_AWAIT_MESSAGE):
blocked_reason = "adi";
break;
default:
blocked_reason = "unknown";
break;
}
printf("[%d %s %s] ", ent->task->id, ent->task->name, blocked_reason);
}
printf("\n");
}
}
| 4,630 |
777 | <filename>content/public/test/mock_special_storage_policy.h
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_TEST_MOCK_SPECIAL_STORAGE_POLICY_H_
#define CONTENT_PUBLIC_TEST_MOCK_SPECIAL_STORAGE_POLICY_H_
#include <set>
#include <string>
#include "storage/browser/quota/special_storage_policy.h"
#include "url/gurl.h"
using storage::SpecialStoragePolicy;
namespace content {
class MockSpecialStoragePolicy : public storage::SpecialStoragePolicy {
public:
MockSpecialStoragePolicy();
bool IsStorageProtected(const GURL& origin) override;
bool IsStorageUnlimited(const GURL& origin) override;
bool IsStorageSessionOnly(const GURL& origin) override;
bool CanQueryDiskSize(const GURL& origin) override;
bool HasIsolatedStorage(const GURL& origin) override;
bool HasSessionOnlyOrigins() override;
bool IsStorageDurable(const GURL& origin) override;
void AddProtected(const GURL& origin) {
protected_.insert(origin);
}
void AddUnlimited(const GURL& origin) {
unlimited_.insert(origin);
}
void RemoveUnlimited(const GURL& origin) {
unlimited_.erase(origin);
}
void AddSessionOnly(const GURL& origin) {
session_only_.insert(origin);
}
void GrantQueryDiskSize(const GURL& origin) {
can_query_disk_size_.insert(origin);
}
void AddIsolated(const GURL& origin) {
isolated_.insert(origin);
}
void RemoveIsolated(const GURL& origin) {
isolated_.erase(origin);
}
void SetAllUnlimited(bool all_unlimited) {
all_unlimited_ = all_unlimited;
}
void AddDurable(const GURL& origin) {
durable_.insert(origin);
}
void Reset() {
protected_.clear();
unlimited_.clear();
session_only_.clear();
can_query_disk_size_.clear();
file_handlers_.clear();
isolated_.clear();
all_unlimited_ = false;
}
void NotifyGranted(const GURL& origin, int change_flags) {
SpecialStoragePolicy::NotifyGranted(origin, change_flags);
}
void NotifyRevoked(const GURL& origin, int change_flags) {
SpecialStoragePolicy::NotifyRevoked(origin, change_flags);
}
void NotifyCleared() {
SpecialStoragePolicy::NotifyCleared();
}
protected:
~MockSpecialStoragePolicy() override;
private:
std::set<GURL> protected_;
std::set<GURL> unlimited_;
std::set<GURL> session_only_;
std::set<GURL> can_query_disk_size_;
std::set<GURL> isolated_;
std::set<GURL> durable_;
std::set<std::string> file_handlers_;
bool all_unlimited_;
};
} // namespace content
#endif // CONTENT_PUBLIC_TEST_MOCK_SPECIAL_STORAGE_POLICY_H_
| 921 |
1,706 | <reponame>LiuZhiYong0718/Notes-master
package com.lguipeng.notes.adpater;
import android.animation.Animator;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Filter;
import android.widget.Filterable;
import com.lguipeng.notes.R;
import com.lguipeng.notes.adpater.base.BaseRecyclerViewAdapter;
import com.lguipeng.notes.model.SNote;
import com.lguipeng.notes.utils.TimeUtils;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
* Created by lgp on 2015/4/6.
*/
public class NotesAdapter extends BaseRecyclerViewAdapter<SNote> implements Filterable{
private final List<SNote> originalList;
private Context mContext;
public NotesAdapter(List<SNote> list) {
super(list);
originalList = new ArrayList<>(list);
}
public NotesAdapter(List<SNote> list, Context context) {
super(list, context);
originalList = new ArrayList<>(list);
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
mContext = parent.getContext();
final View view = LayoutInflater.from(mContext).inflate(R.layout.notes_item_layout, parent, false);
return new NotesItemViewHolder(view);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position) {
super.onBindViewHolder(viewHolder, position);
NotesItemViewHolder holder = (NotesItemViewHolder) viewHolder;
SNote note = list.get(position);
if (note == null)
return;
//TODO
String label = "";
if (mContext != null) {
boolean b = TextUtils.equals(mContext.getString(R.string.default_label), note.getLabel());
label = b? "": note.getLabel();
}
holder.setLabelText(label);
holder.setContentText(note.getContent());
holder.setTimeText(TimeUtils.getConciseTime(note.getLastOprTime(), mContext));
animate(viewHolder, position);
}
@Override
public Filter getFilter() {
return new NoteFilter(this, originalList);
}
@Override
protected Animator[] getAnimators(View view) {
if (view.getMeasuredHeight() <=0){
ObjectAnimator scaleX = ObjectAnimator.ofFloat(view, "scaleX", 1.05f, 1.0f);
ObjectAnimator scaleY = ObjectAnimator.ofFloat(view, "scaleY", 1.05f, 1.0f);
return new ObjectAnimator[]{scaleX, scaleY};
}
return new Animator[]{
ObjectAnimator.ofFloat(view, "scaleX", 1.05f, 1.0f),
ObjectAnimator.ofFloat(view, "scaleY", 1.05f, 1.0f),
};
}
@Override
public void setList(List<SNote> list) {
super.setList(list);
this.originalList.clear();
originalList.addAll(list);
}
private static class NoteFilter extends Filter{
private final NotesAdapter adapter;
private final List<SNote> originalList;
private final List<SNote> filteredList;
private NoteFilter(NotesAdapter adapter, List<SNote> originalList) {
super();
this.adapter = adapter;
this.originalList = new LinkedList<>(originalList);
this.filteredList = new ArrayList<>();
}
@Override
protected FilterResults performFiltering(CharSequence constraint) {
filteredList.clear();
final FilterResults results = new FilterResults();
if (constraint.length() == 0) {
filteredList.addAll(originalList);
} else {
for ( SNote note : originalList) {
if (note.getContent().contains(constraint) || note.getLabel().contains(constraint)) {
filteredList.add(note);
}
}
}
results.values = filteredList;
results.count = filteredList.size();
return results;
}
@Override
protected void publishResults(CharSequence constraint, FilterResults results) {
adapter.list.clear();
adapter.list.addAll((ArrayList<SNote>) results.values);
adapter.notifyDataSetChanged();
}
}
}
| 1,872 |
1,557 | <filename>examples/symbolic/division-by-zero-3-35.c
uint64_t main() {
uint64_t a;
uint64_t* x;
x = malloc(8);
*x = 0; // touch memory
read(0, x, 1);
*x = *x - 48;
// division by zero if the input is '0' (== 48 == b00110000)
a = 41 + (1 / *x);
// division by zero if the input is '2' (== 50 == b00110010)
if (*x == 2)
a = 41 + (1 / 0);
if (a == 42)
// non-zero exit code if the input is '1' (== 49 == b00110001)
return 1;
else
return 0;
}
| 217 |
5,964 | /*
* Copyright 2013 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "SkOSFile.h"
#include "SkString.h"
#include "Test.h"
/**
* Test SkOSPath::Join, SkOSPath::Basename, and SkOSPath::Dirname.
* Will use SkOSPath::Join to append filename to dir, test that it works correctly,
* and tests using SkOSPath::Basename on the result.
* @param reporter Reporter for test conditions.
* @param dir String representing the path to a folder. May or may not
* end with SkPATH_SEPARATOR.
* @param filename String representing the basename of a file. Must NOT
* contain SkPATH_SEPARATOR.
*/
static void test_dir_with_file(skiatest::Reporter* reporter, SkString dir,
SkString filename) {
// If filename contains SkPATH_SEPARATOR, the tests will fail.
SkASSERT(!filename.contains(SkPATH_SEPARATOR));
// Tests for SkOSPath::Join and SkOSPath::Basename
// fullName should be "dir<SkPATH_SEPARATOR>file"
SkString fullName = SkOSPath::Join(dir.c_str(), filename.c_str());
// fullName should be the combined size of dir and file, plus one if
// dir did not include the final path separator.
size_t expectedSize = dir.size() + filename.size();
if (!dir.endsWith(SkPATH_SEPARATOR) && !dir.isEmpty()) {
expectedSize++;
}
REPORTER_ASSERT(reporter, fullName.size() == expectedSize);
SkString basename = SkOSPath::Basename(fullName.c_str());
SkString dirname = SkOSPath::Dirname(fullName.c_str());
// basename should be the same as filename
REPORTER_ASSERT(reporter, basename.equals(filename));
// dirname should be the same as dir with any trailing seperators removed.
// Except when the the string is just "/".
SkString strippedDir = dir;
while (strippedDir.size() > 2 && strippedDir[strippedDir.size() - 1] == SkPATH_SEPARATOR) {
strippedDir.remove(strippedDir.size() - 1, 1);
}
if (!dirname.equals(strippedDir)) {
SkDebugf("OOUCH %s %s %s\n", dir.c_str(), strippedDir.c_str(), dirname.c_str());
}
REPORTER_ASSERT(reporter, dirname.equals(strippedDir));
// basename will not contain a path separator
REPORTER_ASSERT(reporter, !basename.contains(SkPATH_SEPARATOR));
// Now take the basename of filename, which should be the same as filename.
basename = SkOSPath::Basename(filename.c_str());
REPORTER_ASSERT(reporter, basename.equals(filename));
}
DEF_TEST(OSPath, reporter) {
SkString dir("dir");
SkString filename("file");
test_dir_with_file(reporter, dir, filename);
// Now make sure this works with a path separator at the end of dir.
dir.appendUnichar(SkPATH_SEPARATOR);
test_dir_with_file(reporter, dir, filename);
// Test using no filename.
test_dir_with_file(reporter, dir, SkString());
// Testing using no directory.
test_dir_with_file(reporter, SkString(), filename);
// Test with a sub directory.
dir.append("subDir");
test_dir_with_file(reporter, dir, filename);
// Basename of a directory with a path separator at the end is empty.
dir.appendUnichar(SkPATH_SEPARATOR);
SkString baseOfDir = SkOSPath::Basename(dir.c_str());
REPORTER_ASSERT(reporter, baseOfDir.size() == 0);
// Basename of NULL is an empty string.
SkString empty = SkOSPath::Basename(NULL);
REPORTER_ASSERT(reporter, empty.size() == 0);
// File in root dir
dir.printf("%c", SkPATH_SEPARATOR);
filename.set("file");
test_dir_with_file(reporter, dir, filename);
// Just the root dir
filename.reset();
test_dir_with_file(reporter, dir, filename);
// Test that NULL can be used for the directory and filename.
SkString emptyPath = SkOSPath::Join(NULL, NULL);
REPORTER_ASSERT(reporter, emptyPath.isEmpty());
}
| 1,388 |
8,629 | <reponame>pdv-ru/ClickHouse
#include <Parsers/ASTIdentifier_fwd.h>
#include <Parsers/ASTRenameQuery.h>
#include <Parsers/CommonParsers.h>
#include <Parsers/ParserRenameQuery.h>
namespace DB
{
/// Parse database.table or table.
static bool parseDatabaseAndTable(
ASTRenameQuery::Table & db_and_table, IParser::Pos & pos, Expected & expected)
{
ParserIdentifier name_p;
ParserToken s_dot(TokenType::Dot);
ASTPtr database;
ASTPtr table;
if (!name_p.parse(pos, table, expected))
return false;
if (s_dot.ignore(pos, expected))
{
database = table;
if (!name_p.parse(pos, table, expected))
return false;
}
db_and_table.database.clear();
tryGetIdentifierNameInto(database, db_and_table.database);
tryGetIdentifierNameInto(table, db_and_table.table);
return true;
}
bool ParserRenameQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
{
ParserKeyword s_rename_table("RENAME TABLE");
ParserKeyword s_exchange_tables("EXCHANGE TABLES");
ParserKeyword s_rename_dictionary("RENAME DICTIONARY");
ParserKeyword s_exchange_dictionaries("EXCHANGE DICTIONARIES");
ParserKeyword s_rename_database("RENAME DATABASE");
ParserKeyword s_if_exists("IF EXISTS");
ParserKeyword s_to("TO");
ParserKeyword s_and("AND");
ParserToken s_comma(TokenType::Comma);
bool exchange = false;
bool dictionary = false;
if (s_rename_table.ignore(pos, expected))
;
else if (s_exchange_tables.ignore(pos, expected))
exchange = true;
else if (s_rename_dictionary.ignore(pos, expected))
dictionary = true;
else if (s_exchange_dictionaries.ignore(pos, expected))
{
exchange = true;
dictionary = true;
}
else if (s_rename_database.ignore(pos, expected))
{
ASTPtr from_db;
ASTPtr to_db;
ParserIdentifier db_name_p;
bool if_exists = s_if_exists.ignore(pos, expected);
if (!db_name_p.parse(pos, from_db, expected))
return false;
if (!s_to.ignore(pos, expected))
return false;
if (!db_name_p.parse(pos, to_db, expected))
return false;
String cluster_str;
if (ParserKeyword{"ON"}.ignore(pos, expected))
{
if (!ASTQueryWithOnCluster::parse(pos, cluster_str, expected))
return false;
}
auto query = std::make_shared<ASTRenameQuery>();
query->database = true;
query->elements.emplace({});
query->elements.front().if_exists = if_exists;
tryGetIdentifierNameInto(from_db, query->elements.front().from.database);
tryGetIdentifierNameInto(to_db, query->elements.front().to.database);
query->cluster = cluster_str;
node = query;
return true;
}
else
return false;
ASTRenameQuery::Elements elements;
const auto ignore_delim = [&] { return exchange ? s_and.ignore(pos) : s_to.ignore(pos); };
while (true)
{
if (!elements.empty() && !s_comma.ignore(pos))
break;
ASTRenameQuery::Element& ref = elements.emplace_back();
if (!exchange)
ref.if_exists = s_if_exists.ignore(pos, expected);
if (!parseDatabaseAndTable(ref.from, pos, expected)
|| !ignore_delim()
|| !parseDatabaseAndTable(ref.to, pos, expected))
return false;
}
String cluster_str;
if (ParserKeyword{"ON"}.ignore(pos, expected))
{
if (!ASTQueryWithOnCluster::parse(pos, cluster_str, expected))
return false;
}
auto query = std::make_shared<ASTRenameQuery>();
query->cluster = cluster_str;
node = query;
query->elements = elements;
query->exchange = exchange;
query->dictionary = dictionary;
return true;
}
}
| 1,672 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.