max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
887
<reponame>belak/Textual // See the contents of RCMSecureTransport.m for license information. #import <Security/Security.h> NS_ASSUME_NONNULL_BEGIN typedef NS_ENUM(NSUInteger, RCMCipherSuiteCollection) { RCMCipherSuiteCollectionDefault = 0, RCMCipherSuiteCollectionMozilla2015 = 1, RCMCipherSuiteCollectionMozilla2017 = 2, RCMCipherSuiteCollectionNone = 100 }; @interface RCMSecureTransport : NSObject + (nullable NSString *)descriptionForProtocolVersion:(SSLProtocol)protocolVersion; + (nullable NSString *)descriptionForCipherSuite:(SSLCipherSuite)cipherSuite; + (nullable NSString *)descriptionForCipherSuite:(SSLCipherSuite)cipherSuite withProtocol:(BOOL)appendProtocol; + (BOOL)isCipherSuiteDeprecated:(SSLCipherSuite)cipherSuite; + (NSArray<NSString *> *)descriptionsForCipherListCollection:(RCMCipherSuiteCollection)collection; + (NSArray<NSString *> *)descriptionsForCipherListCollection:(RCMCipherSuiteCollection)collection withProtocol:(BOOL)appendProtocol; + (NSArray<NSString *> *)descriptionsForCipherSuites:(NSArray<NSNumber *> *)cipherSuites; + (NSArray<NSString *> *)descriptionsForCipherSuites:(NSArray<NSNumber *> *)cipherSuites withProtocol:(BOOL)appendProtocol; + (NSArray<NSNumber *> *)cipherSuitesInCollection:(RCMCipherSuiteCollection)collection; + (NSArray<NSNumber *> *)cipherSuitesInCollection:(RCMCipherSuiteCollection)collection includeDeprecated:(BOOL)includeDepecated; + (BOOL)isTLSError:(NSError *)error; + (nullable NSString *)descriptionForError:(NSError *)error; /* -descriptionForErrorCode: returns "Unknown" for out of range error codes */ + (NSString *)descriptionForErrorCode:(NSInteger)errorCode; + (nullable NSString *)descriptionForBadCertificateError:(NSError *)error; + (nullable NSString *)descriptionForBadCertificateErrorCode:(NSInteger)errorCode; + (BOOL)isBadCertificateError:(NSError *)error; + (BOOL)isBadCertificateErrorCode:(NSInteger)errorCode; + (SecTrustRef)trustFromCertificateChain:(NSArray<NSData *> *)certificatecChain withPolicyName:(NSString *)policyName CF_RETURNS_RETAINED; + (nullable NSArray<NSData *> *)certificatesInTrust:(SecTrustRef)trustRef; + (nullable NSString *)policyNameInTrust:(SecTrustRef)trustRef; @end NS_ASSUME_NONNULL_END
753
1,320
# type: ignore # ^ that's necessary to prevent a false linting error of some kind import asyncio import urllib.parse from time import perf_counter import typer from mcsniperpy.util.logs_manager import Color as color from mcsniperpy.util.logs_manager import Logger as log async def check(url: str, iterations: int): async def ping(): try: uri = urllib.parse.urlparse(url) reader, writer = await asyncio.open_connection(uri.hostname, 443, ssl=False) writer.write( f"GET {uri.path or '/'} HTTP/1.1\r\nHost:{uri.hostname}\r\n\r\n".encode() ) start = perf_counter() await writer.drain() _ = await reader.read(100) end = perf_counter() return round((end - start) * 1000) # pylint: disable=invalid-name, broad-except except Exception as e: log.error("Failed to connect to URL. error code: " + str(e)) pings = [] with typer.progressbar( range(iterations), fill_char="█", empty_char=" ", color=10, show_eta=False, bar_template="%(label)s %(bar)s %(info)s", ) as progress: for _ in progress: pings.append(await ping()) await asyncio.sleep(0.01) print() log.info(f"Host {color.l_cyan}» {color.blue}{urllib.parse.urlparse(url).hostname}") log.info(f"Ping {color.l_cyan}» {color.blue}{sum(pings) / 5}ms") async def ping_test(iterations): print() await check("https://api.minecraftservices.com/minecraft", iterations)
778
764
<reponame>641589523/token-profile<filename>erc20/0xbBe319b73744dB9d54F5D29df7D8256B7e43995C.json {"symbol": "ANC","address": "0xbBe319b73744dB9d54F5D29df7D8256B7e43995C","overview":{"en": ""},"email": "","website": "https://aragonchina.xyz/","state": "NORMAL","links": {"blog": "","twitter": "https://twitter.com/Typto_DAOSquare","telegram": "","github": ""}}
153
310
<reponame>evgeniya-egupova/nncf """ Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from copy import deepcopy from enum import Enum from typing import Dict, List, Optional, Any from nncf.common.graph import NNCFNode from nncf.common.graph import NNCFNodeName class QuantizationMode: SYMMETRIC = 'symmetric' ASYMMETRIC = 'asymmetric' class QuantizerConfig: """ A generic, framework-agnostic information on a configuration of a quantizer for abstract reasoning and determination of a quantizer setup scheme for a given model. """ def __init__(self, num_bits: int = 8, mode: QuantizationMode = QuantizationMode.SYMMETRIC, signedness_to_force: Optional[bool] = None, per_channel: bool = False): """ :param num_bits: Bitwidth of the quantization. :param mode: The mode of quantization (symmetric or asymmetric). :param signedness_to_force: True if the quantizer *must* be signed, False if *must* be unsigned, None if the signed/unsigned attribute should be determined based on the incoming activation statistics during range initialization. :param per_channel: True for per-channel quantization, False for per-tensor. """ self.num_bits = num_bits self.mode = mode self.signedness_to_force = signedness_to_force self.per_channel = per_channel def __eq__(self, other): return self.__dict__ == other.__dict__ def __str__(self): return 'B:{bits} M:{mode} SGN:{signedness} PC:{per_channel}'.format( bits=self.num_bits, mode='S' if self.mode == QuantizationMode.SYMMETRIC else 'A', signedness='ANY' if self.signedness_to_force is None else ('S' if self.signedness_to_force else 'U'), per_channel='Y' if self.per_channel else 'N') def __hash__(self): return hash(str(self)) def is_valid_requantization_for(self, other: 'QuantizerConfig') -> bool: """ Quantizer config A is a valid requantization for quantizer config B if A is more strict - specifically, it might be reasonable to put quantizer A after quantizer B in tensor data control flow, so that the requantization will further constrain the input tensor data w.r.t. values it can take, but putting quantizer A after quantizer B would be unreasonable. :param other: The "primary" QuantizerConfig, i.e. the one that defines an already present quantization. :return: True if the current config is a valid requantization for `other`, False otherwise. """ fail_conditions = [ self.num_bits > other.num_bits, self.mode is QuantizationMode.ASYMMETRIC and other.mode is QuantizationMode.SYMMETRIC, self.signedness_to_force is None and other.signedness_to_force is not None, self.signedness_to_force is True and other.signedness_to_force is False, ] if any(fail_conditions): return False return True def compatible_with_a_unified_scale_linked_qconfig(self, linked_qconfig: 'QuantizerConfig'): """ For two configs to be compatible in a unified scale scenario, all of their fundamental parameters must be aligned. :param linked_qconfig: A QuantizerConfig that is compared against the current config. :return: A boolean value specifying whether `linked_qconfig` is compatible with the current config in terms of scale unification. """ return self.num_bits == linked_qconfig.num_bits and \ self.mode == linked_qconfig.mode and \ self.signedness_to_force == linked_qconfig.signedness_to_force and \ self.per_channel == linked_qconfig.per_channel def is_a_bitwidth_variant(self, other_qconfig: 'QuantizerConfig') -> bool: """ :param other_qconfig: A QuantizerConfig to be compared against the current config. :return: A boolean value specifying whether `other_config` is identical to the current config in everything except the bitwidth. """ return self.per_channel == other_qconfig.per_channel and \ self.signedness_to_force == other_qconfig.signedness_to_force and \ self.mode == other_qconfig.mode def get_state(self) -> Dict[str, Any]: """ Returns a dictionary with Python data structures (dict, list, tuple, str, int, float, True, False, None) that represents state of the object. :return: state of the object """ return {'num_bits': self.num_bits, 'mode': self.mode, 'signedness_to_force': self.signedness_to_force, 'per_channel': self.per_channel} @classmethod def from_state(cls, state: Dict[str, Any]) -> 'QuantizerConfig': """ Creates the object from its state. :param state: Output of `get_state()` method. """ return cls(**state) class QuantizerSpec: """ A specific (potentially framework-aware) parameter struct required to initialize a given object that performs quantization of an input tensor. """ def __init__(self, num_bits: int, mode: QuantizationMode, signedness_to_force: bool, narrow_range: bool, half_range: bool): """ :param num_bits: Bitwidth of the quantization. :param mode: The mode of quantization (symmetric or asymmetric). :param signedness_to_force: True if the quantizer *must* be signed, False if *must* be unsigned, None if the signed/unsigned attribute should be determined based on the incoming activation statistics during range initialization. :param narrow_range: True if the range of quantized values should be narrowed as compared to the naive case, False if all 2^`num_bits` quantizations should be used. :param half_range: If ``True`` effectively only a half of an quantizer range are used. False - the full range are used. """ self.num_bits = num_bits self.mode = mode self.signedness_to_force = signedness_to_force self.narrow_range = narrow_range self.half_range = half_range def __eq__(self, other: 'QuantizerSpec'): return self.__dict__ == other.__dict__ @classmethod def from_config(cls, qconfig: QuantizerConfig, narrow_range: bool, half_range: bool) -> 'QuantizerSpec': return cls(qconfig.num_bits, qconfig.mode, qconfig.signedness_to_force, narrow_range, half_range) class QuantizationConstraints: REF_QCONF_OBJ = QuantizerConfig() def __init__(self, **kwargs): """ Use attribute names of QuantizerConfig as arguments to set up constraints. E.g. QuantizationConstraint(bits=8, per_channel=True) will set up a constraint that corresponds to all 8-bit per-channel quantizers, either symmetric or asymmetric, either signed or unsigned. """ for attr_name in kwargs: if not hasattr(QuantizationConstraints.REF_QCONF_OBJ, attr_name): raise RuntimeError('Invalid constraint - QuantizerConfig has no attribute \'{}\''.format(attr_name)) self.qconf_attr_vs_constraint_dict = kwargs def apply_constraints_to(self, qconfig: QuantizerConfig) -> QuantizerConfig: for attr_name, constraint in self.qconf_attr_vs_constraint_dict.items(): if constraint is not None: setattr(qconfig, attr_name, constraint) return qconfig def is_config_compatible(self, qconfig: QuantizerConfig) -> bool: is_compatible = True for attr_name, constraint in self.qconf_attr_vs_constraint_dict.items(): if constraint is not None: qconf_attr_value = getattr(qconfig, attr_name) if qconf_attr_value != constraint: is_compatible = False return is_compatible def get_updated_constraints(self, overriding_constraints: 'QuantizationConstraints') -> 'QuantizationConstraints': new_dict = deepcopy(self.qconf_attr_vs_constraint_dict) new_dict.update(overriding_constraints.qconf_attr_vs_constraint_dict) return QuantizationConstraints(**new_dict) @classmethod def from_config_dict(cls, config_dict: Dict) -> 'QuantizationConstraints': return cls(num_bits=config_dict.get('bits'), mode=config_dict.get('mode'), per_channel=config_dict.get('per_channel'), signedness_to_force=config_dict.get('signed')) def constrain_qconfig_list(self, quantizer_config_list: List[QuantizerConfig]) -> List[QuantizerConfig]: assert quantizer_config_list is not None constrained_quantizer_config_list = list(filter( self.is_config_compatible, quantizer_config_list )) # TODO: Make the logic more flexible when the flag "warning as error" is implemented. # It means that the qconfig from overrides must be selected as final config # even if it is not valid in hw-config. if not constrained_quantizer_config_list: raise RuntimeError() return constrained_quantizer_config_list class QuantizerGroup(Enum): ACTIVATIONS = 'activations' WEIGHTS = 'weights' @staticmethod def from_str(str_: str) -> 'QuantizerGroup': if str_ == QuantizerGroup.ACTIVATIONS.value: return QuantizerGroup.ACTIVATIONS if str_ == QuantizerGroup.WEIGHTS.value: return QuantizerGroup.WEIGHTS raise RuntimeError('Unknown quantizer group string') class QuantizableWeightedLayerNode: def __init__(self, node: NNCFNode, qconfig_list: List[QuantizerConfig]): self.node = node self.qconfig_list = qconfig_list class QuantizerId: """ Unique identifier of a quantizer. It's used to store and search all quantizers in a single structure. """ def get_base(self): raise NotImplementedError def get_suffix(self) -> str: raise NotImplementedError def __str__(self): return str(self.get_base()) + self.get_suffix() def __hash__(self): return hash((self.get_base(), self.get_suffix())) def __eq__(self, other: 'QuantizerId'): return (self.get_base() == other.get_base()) and (self.get_suffix() == other.get_suffix()) class WeightQuantizerId(QuantizerId): """ Unique identifier of a quantizer for weights.""" def __init__(self, target_node_name: NNCFNodeName): self.target_node_name = target_node_name def get_base(self) -> str: return self.target_node_name def get_suffix(self) -> str: return '|WEIGHT' class NonWeightQuantizerId(QuantizerId): """ Unique identifier of a quantizer, which corresponds to non-weight operations, such as ordinary activation, function and input """ def __init__(self, target_node_name: NNCFNodeName, input_port_id=None): self.target_node_name = target_node_name self.input_port_id = input_port_id def get_base(self) -> str: return self.target_node_name def get_suffix(self) -> str: return '|OUTPUT' if self.input_port_id is None else '|INPUT{}'.format(self.input_port_id) class UnifiedScaleType(Enum): """ UNIFY_ONLY_PER_TENSOR - only results in scale unification if per-tensor quantization is ultimately applied. This is the target scenario for concat unified scales since the channel count between the concatenated tensors may be mismatching and, more importantly, the concatenation might occur on exactly the channel dimension which means that the concatenated tensor must reuse all quantization scales of the input per-channel quantized tensors. UNIFY_ALWAYS - results in scale unification for both per-channel and per-tensor quantization. This is the target scenario for eltwise unified scales, as it is assumed that the eltwise ops have matching input tensor shapes and therefore the quantization channel count is the same. """ UNIFY_ONLY_PER_TENSOR = 0 UNIFY_ALWAYS = 1 class QuantizationPreset(Enum): PERFORMANCE = 'performance' MIXED = 'mixed' @staticmethod def from_str(str_: str) -> 'QuantizationPreset': if str_ == QuantizationPreset.PERFORMANCE.value: return QuantizationPreset.PERFORMANCE if str_ == QuantizationPreset.MIXED.value: return QuantizationPreset.MIXED raise RuntimeError('Unknown preset string.') def get_params_configured_by_preset(self, quant_group: QuantizerGroup) -> Dict: if quant_group == QuantizerGroup.ACTIVATIONS and self == QuantizationPreset.MIXED: return {'mode': QuantizationMode.ASYMMETRIC} return {'mode' : QuantizationMode.SYMMETRIC}
5,264
4,462
import numpy as np import mxnet as mx import mxnet.gluon.nn as nn from autogluon.extra.contrib.enas import * import autogluon.core as ag class Identity(mx.gluon.HybridBlock): def hybrid_forward(self, F, x): return x class ConvBNReLU(mx.gluon.HybridBlock): def __init__(self, in_channels, channels, kernel, stride): super().__init__() padding = (kernel - 1) // 2 self.conv = nn.Conv2D(channels, kernel, stride, padding, in_channels=in_channels) self.bn = nn.BatchNorm(in_channels=channels) self.relu = nn.Activation('relu') def hybrid_forward(self, F, x): return self.relu(self.bn(self.conv(x))) @enas_unit() class ResUnit(mx.gluon.HybridBlock): def __init__(self, in_channels, channels, hidden_channels, kernel, stride): super().__init__() self.conv1 = ConvBNReLU(in_channels, hidden_channels, kernel, stride) self.conv2 = ConvBNReLU(hidden_channels, channels, kernel, 1) if in_channels == channels and stride == 1: self.shortcut = Identity() else: self.shortcut = nn.Conv2D(channels, 1, stride, in_channels=in_channels) def hybrid_forward(self, F, x): return self.conv2(self.conv1(x)) + self.shortcut(x) def test_enas_net(): mynet = ENAS_Sequential( ResUnit(1, 8, hidden_channels=ag.space.Categorical(4, 8), kernel=ag.space.Categorical(3, 5), stride=2), ResUnit(8, 8, hidden_channels=8, kernel=ag.space.Categorical(3, 5), stride=2), ResUnit(8, 16, hidden_channels=8, kernel=ag.space.Categorical(3, 5), stride=2), ResUnit(16, 16, hidden_channels=8, kernel=ag.space.Categorical(3, 5), stride=1, with_zero=True), ResUnit(16, 16, hidden_channels=8, kernel=ag.space.Categorical(3, 5), stride=1, with_zero=True), nn.GlobalAvgPool2D(), nn.Flatten(), nn.Activation('relu'), nn.Dense(10, in_units=16), ) mynet.initialize() mynet.hybridize() x = mx.nd.random.uniform(shape=(1, 1, 28, 28)) xx = mynet.evaluate_latency(x) y = mynet(x) assert mynet.nparams == 8714 mynet.export('enas') mynet_static = mx.gluon.nn.SymbolBlock.imports("enas-symbol.json", ['data'], "enas.params") yy = mynet_static(x) np.testing.assert_almost_equal(y.asnumpy(), yy.asnumpy()) if __name__ == "__main__": test_enas_net()
1,077
640
/* * Reverse Polish Notation calculator - integer only! * * Nabbed from GBDK distribution, converted over to Small C+ * * Small C+ changes: * * - include <ctype.h> * - #define for UBYTE WORD BYTE * - Correcting gets() statement so that we give a max size * * Added to Small C+ archive 14/3/99 djm * * I'm guessing that <NAME> originally wrote this, if * not, then I apologise. * * Enjoy it: enter expressions like 1000 2342 + then 2 * * or something like that, it's all a bit too much like Forth * for my liking! <grin> * */ #define ANSI_STDIO #include <stdio.h> #include <ctype.h> #include <stdlib.h> #define MAXOP 40 #define NUMBER '0' #define STACKSIZE 40 #define UBYTE unsigned char #define WORD int #define BYTE char UBYTE sp; WORD stack[STACKSIZE]; UBYTE s[MAXOP]; UBYTE pos; WORD n; void push(WORD l) { if(sp < STACKSIZE) stack[sp++] = l; else printf("Stack full\n"); } WORD pop() { if(sp > 0) return stack[--sp]; else printf("Stack empty\n"); return 0; } WORD top() { if(sp > 0) return stack[sp-1]; else printf("Stack empty\n"); return 0; } BYTE read_op() { if(pos == 0) { gets(s); } while(s[pos] == ' ' || s[pos] == '\t') pos++; if(s[pos] == '\0') { pos = 0; return('\n'); } if(isdigit(s[pos])==0) return(s[pos++]); n = s[pos] - '0'; while(isdigit(s[++pos])) n = 10 * n + s[pos] - '0'; return NUMBER; } void main() { BYTE type; WORD op2; printf("RPN Calculator\n"); printf("Nabbed from GBDK archive\n"); sp = 0; pos = 0; while((type = read_op()) != 0) { switch(type) { case NUMBER: push(n); break; case '+': push(pop() + pop()); break; case '*': push(pop() * pop()); break; case '-': op2 = pop(); push(pop() - op2); break; case '/': op2 = pop(); if(op2 != 0) push(pop() / op2); else printf("Divide by 0\n"); break; case '.': return; case '\n': printf("==> %d\n", top()); break; } } }
1,017
2,112
/** * Autogenerated by Thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package test.fixtures.module1; import com.facebook.swift.codec.*; import com.google.common.collect.*; import java.util.*; @SwiftGenerated public final class Constants { private Constants() {} public static final test.fixtures.module1.Struct C1 = new test.fixtures.module1.Struct.Builder().setFirst(201).setSecond("module1_str").build(); public static final List<test.fixtures.module1.Enum> E1S = ImmutableList.<test.fixtures.module1.Enum>builder() .add(test.fixtures.module1.Enum.ONE) .add(test.fixtures.module1.Enum.THREE) .build(); }
255
4,821
/* * Copyright 2017 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _VMSTRUCTS_H #define _VMSTRUCTS_H #include <jvmti.h> #include <stdint.h> #include "codeCache.h" class VMStructs { protected: static NativeCodeCache* _libjvm; static bool _has_class_names; static bool _has_class_loader_data; static bool _has_thread_bridge; static bool _has_perm_gen; static int _klass_name_offset; static int _symbol_length_offset; static int _symbol_length_and_refcount_offset; static int _symbol_body_offset; static int _class_loader_data_offset; static int _class_loader_data_next_offset; static int _methods_offset; static int _thread_osthread_offset; static int _thread_anchor_offset; static int _thread_state_offset; static int _osthread_id_offset; static int _anchor_sp_offset; static int _anchor_pc_offset; static int _frame_size_offset; static int _is_gc_active_offset; static char* _collected_heap_addr; static const void* _code_heap_low; static const void* _code_heap_high; static jfieldID _eetop; static jfieldID _tid; static jfieldID _klass; static int _tls_index; static intptr_t _env_offset; typedef void* (*FindBlobFunc)(const void*); static FindBlobFunc _find_blob; typedef void (*LockFunc)(void*); static LockFunc _lock_func; static LockFunc _unlock_func; static char* _method_flushing; static int* _sweep_started; static uintptr_t readSymbol(const char* symbol_name); static void initOffsets(); static void initJvmFunctions(); static void initThreadBridge(JNIEnv* env); static void initLogging(JNIEnv* env); const char* at(int offset) { return (const char*)this + offset; } public: static void init(NativeCodeCache* libjvm); static NativeCodeCache* libjvm() { return _libjvm; } static bool hasClassNames() { return _has_class_names; } static bool hasClassLoaderData() { return _has_class_loader_data; } static bool hasThreadBridge() { return _has_thread_bridge; } typedef jvmtiError (*GetStackTraceFunc)(void* self, void* thread, jint start_depth, jint max_frame_count, jvmtiFrameInfo* frame_buffer, jint* count_ptr); static GetStackTraceFunc _get_stack_trace; static bool hasDebugSymbols() { return _get_stack_trace != NULL; } }; class MethodList { public: enum { SIZE = 8 }; private: intptr_t _method[SIZE]; int _ptr; MethodList* _next; int _padding; public: MethodList(MethodList* next) : _ptr(0), _next(next), _padding(0) { for (int i = 0; i < SIZE; i++) { _method[i] = 0x37; } } }; class VMSymbol : VMStructs { public: unsigned short length() { if (_symbol_length_offset >= 0) { return *(unsigned short*) at(_symbol_length_offset); } else { return *(unsigned int*) at(_symbol_length_and_refcount_offset) >> 16; } } const char* body() { return at(_symbol_body_offset); } }; class ClassLoaderData : VMStructs { private: void* mutex() { return *(void**) at(sizeof(uintptr_t) * 3); } public: void lock() { _lock_func(mutex()); } void unlock() { _unlock_func(mutex()); } MethodList** methodList() { return (MethodList**) at(sizeof(uintptr_t) * 6 + 8); } }; class VMKlass : VMStructs { public: static VMKlass* fromJavaClass(JNIEnv* env, jclass cls) { if (_has_perm_gen) { jobject klassOop = env->GetObjectField(cls, _klass); return (VMKlass*)(*(uintptr_t**)klassOop + 2); } else if (sizeof(VMKlass*) == 8) { return (VMKlass*)(uintptr_t)env->GetLongField(cls, _klass); } else { return (VMKlass*)(uintptr_t)env->GetIntField(cls, _klass); } } static VMKlass* fromHandle(uintptr_t handle) { if (_has_perm_gen) { // On JDK 7 KlassHandle is a pointer to klassOop, hence one more indirection return (VMKlass*)(*(uintptr_t**)handle + 2); } else { return (VMKlass*)handle; } } VMSymbol* name() { return *(VMSymbol**) at(_klass_name_offset); } ClassLoaderData* classLoaderData() { return *(ClassLoaderData**) at(_class_loader_data_offset); } int methodCount() { int* methods = *(int**) at(_methods_offset); return methods == NULL ? 0 : *methods & 0xffff; } }; class VMThread : VMStructs { public: static VMThread* current(); static VMThread* fromJavaThread(JNIEnv* env, jthread thread) { return (VMThread*)(uintptr_t)env->GetLongField(thread, _eetop); } static VMThread* fromEnv(JNIEnv* env) { return (VMThread*)((intptr_t)env - _env_offset); } static jlong javaThreadId(JNIEnv* env, jthread thread) { return env->GetLongField(thread, _tid); } static bool hasNativeId() { return _thread_osthread_offset >= 0 && _osthread_id_offset >= 0; } int osThreadId() { const char* osthread = *(const char**) at(_thread_osthread_offset); return *(int*)(osthread + _osthread_id_offset); } int state() { return _thread_state_offset >= 0 ? *(int*) at(_thread_state_offset) : 0; } uintptr_t& lastJavaSP() { return *(uintptr_t*) (at(_thread_anchor_offset) + _anchor_sp_offset); } uintptr_t& lastJavaPC() { return *(uintptr_t*) (at(_thread_anchor_offset) + _anchor_pc_offset); } }; class RuntimeStub : VMStructs { public: static RuntimeStub* findBlob(const void* pc) { return _find_blob != NULL ? (RuntimeStub*)_find_blob(pc) : NULL; } int frameSize() { return *(int*) at(_frame_size_offset); } }; class CodeHeap : VMStructs { public: static bool contains(const void* pc) { return _code_heap_low <= pc && pc < _code_heap_high; } static void updateBounds(const void* start, const void* end) { if (start < _code_heap_low) _code_heap_low = start; if (end > _code_heap_high) _code_heap_high = end; } }; class CollectedHeap : VMStructs { public: static bool isGCActive() { return _collected_heap_addr != NULL && _is_gc_active_offset >= 0 && _collected_heap_addr[_is_gc_active_offset] != 0; } }; class DisableSweeper : VMStructs { private: bool _enabled; public: DisableSweeper(); ~DisableSweeper(); }; #endif // _VMSTRUCTS_H
3,087
372
<filename>clients/google-api-services-mybusinessbusinessinformation/v1/1.31.0/com/google/api/services/mybusinessbusinessinformation/v1/model/SearchGoogleLocationsRequest.java /* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.mybusinessbusinessinformation.v1.model; /** * Request message for GoogleLocations.SearchGoogleLocations. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the My Business Business Information API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class SearchGoogleLocationsRequest extends com.google.api.client.json.GenericJson { /** * Location to search for. If provided, will find locations which match the provided location * details. * The value may be {@code null}. */ @com.google.api.client.util.Key private Location location; /** * The number of matches to return. The default value is 3, with a maximum of 10. Note that * latency may increase if more are requested. There is no pagination. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** * Text query to search for. The search results from a query string will be less accurate than if * providing an exact location, but can provide more inexact matches. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String query; /** * Location to search for. If provided, will find locations which match the provided location * details. * @return value or {@code null} for none */ public Location getLocation() { return location; } /** * Location to search for. If provided, will find locations which match the provided location * details. * @param location location or {@code null} for none */ public SearchGoogleLocationsRequest setLocation(Location location) { this.location = location; return this; } /** * The number of matches to return. The default value is 3, with a maximum of 10. Note that * latency may increase if more are requested. There is no pagination. * @return value or {@code null} for none */ public java.lang.Integer getPageSize() { return pageSize; } /** * The number of matches to return. The default value is 3, with a maximum of 10. Note that * latency may increase if more are requested. There is no pagination. * @param pageSize pageSize or {@code null} for none */ public SearchGoogleLocationsRequest setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** * Text query to search for. The search results from a query string will be less accurate than if * providing an exact location, but can provide more inexact matches. * @return value or {@code null} for none */ public java.lang.String getQuery() { return query; } /** * Text query to search for. The search results from a query string will be less accurate than if * providing an exact location, but can provide more inexact matches. * @param query query or {@code null} for none */ public SearchGoogleLocationsRequest setQuery(java.lang.String query) { this.query = query; return this; } @Override public SearchGoogleLocationsRequest set(String fieldName, Object value) { return (SearchGoogleLocationsRequest) super.set(fieldName, value); } @Override public SearchGoogleLocationsRequest clone() { return (SearchGoogleLocationsRequest) super.clone(); } }
1,289
17,143
<reponame>nickmelnikov82/dash import json from setuptools import setup with open("package.json") as fp: package = json.load(fp) setup( name="dash_renderer", version=package["version"], author="<NAME>", author_email="<EMAIL>", packages=["dash_renderer"], include_package_data=True, license="MIT", description="Front-end component renderer for Dash", install_requires=[], )
153
11,356
<reponame>Bpowers4/turicreate<gh_stars>1000+ /* @file is_noexcept @Copyright <NAME> 2015-2017 Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_CLBL_TRTS_IS_NOEXCEPT_HPP #define BOOST_CLBL_TRTS_IS_NOEXCEPT_HPP #include <boost/callable_traits/detail/core.hpp> namespace boost { namespace callable_traits { //[ is_noexcept_hpp /*`[section:ref_is_noexcept is_noexcept] [heading Header] ``#include <boost/callable_traits/is_noexcept.hpp>`` [heading Definition] */ // inherits from either std::true_type or std::false_type template<typename T> struct is_noexcept; //<- template<typename T> struct is_noexcept : detail::traits<detail::shallow_decay<T>>::is_noexcept { using type = typename detail::traits< detail::shallow_decay<T>>::is_noexcept; }; #ifdef BOOST_CLBL_TRTS_DISABLE_VARIABLE_TEMPLATES template<typename T> struct is_noexcept_v { static_assert(std::is_same<T, detail::dummy>::value, "Variable templates not supported on this compiler."); }; #else //-> // only available when variable templates are supported template<typename T> //<- BOOST_CLBL_TRAITS_INLINE_VAR //-> constexpr bool is_noexcept_v = //see below //<- detail::traits<detail::shallow_decay<T>>::is_noexcept::value; #endif }} // namespace boost::callable_traits //-> /*` [heading Constraints] * none * [heading Behavior] * `is_noexcept<T>::value` is `true` when either: * `T` is a function type, function pointer type, function reference type, or member function pointer type where the function has a `noexcept` specifier * `T` is a function object with a non-overloaded `operator()`, where the `operator()` has a `noexcept` specifier * On compilers that support variable templates, `is_noexcept_v<T>` is equivalent to `is_noexcept<T>::value`. [heading Input/Output Examples] [table [[`T`] [`is_noexcept_v<T>`]] [[`int() const noexcept`] [`true`]] [[`int(* const &)() noexcept`] [`true`]] [[`int(&)() noexcept`] [`true`]] [[`int(foo::*)() noexcept`] [`true`]] [[`int() const`] [`false`]] [[`int() volatile`] [`false`]] [[`int(foo::*)() const`] [`false`]] [[`int() const`] [`false`]] [[`int() volatile`] [`false`]] [[`int() &`] [`false`]] [[`int(*)()`] [`false`]] [[`int`] [`false`]] [[`int foo::*`] [`false`]] [[`const int foo::*`] [`false`]] ] [heading Example Program] [import ../example/is_noexcept.cpp] [is_noexcept] [endsect] */ //] #endif // #ifndef BOOST_CLBL_TRTS_IS_NOEXCEPT_HPP
1,283
335
<gh_stars>100-1000 { "word": "Zinjanthropus", "definitions": [ "A genus name sometimes applied to the early hominid known as Nutcracker man." ], "parts-of-speech": "Noun" }
82
1,292
<gh_stars>1000+ from .rest import RESTObject, RESTProperty from datetime import datetime def from_epoch(value): if isinstance(value, datetime): return value else: return datetime.utcfromtimestamp(value) def to_epoch(value): return (value - datetime(1970, 1, 1)).total_seconds() class Endpoint(RESTObject): """Represents an endpoint. Note that not every attribute is returned as part of the GET. Attributes ---------- name : str The name of the endpoint. Valid names include ``[a-zA-Z0-9_\\- ]+`` type : str The type of endpoint. The types include "alias", "model". version : int The version of this endpoint. Initial versions have version on 1. New versions increment this by 1. description : str A human-readable description of the endpoint. dependencies: list A list of endpoints that this endpoint depends on. methods : list ??? """ name = RESTProperty(str) type = RESTProperty(str) version = RESTProperty(int) description = RESTProperty(str) dependencies = RESTProperty(list) methods = RESTProperty(list) creation_time = RESTProperty(datetime, from_epoch, to_epoch) last_modified_time = RESTProperty(datetime, from_epoch, to_epoch) evaluator = RESTProperty(str) schema_version = RESTProperty(int) schema = RESTProperty(str) def __new__(cls, **kwargs): """Dispatch to the appropriate class.""" cls2 = {"alias": AliasEndpoint, "model": ModelEndpoint}[kwargs["type"]] """return object.__new__(cls, **kwargs)""" """ modified for Python 3""" return object.__new__(cls2) def __eq__(self, other): return ( self.name == other.name and self.type == other.type and self.version == other.version and self.description == other.description and self.dependencies == other.dependencies and self.methods == other.methods and self.evaluator == other.evaluator and self.schema_version == other.schema_version and self.schema == other.schema ) class ModelEndpoint(Endpoint): """Represents a model endpoint. src_path : str The local file path to the source of this object. required_files : str The local file path to the directory containing the required files. required_packages : str The local file path to the directory containing the required packages. """ src_path = RESTProperty(str) required_files = RESTProperty(list) required_packages = RESTProperty(list) required_packages_dst_path = RESTProperty(str) def __init__(self, **kwargs): super().__init__(**kwargs) self.type = "model" def __eq__(self, other): return ( super().__eq__(other) and self.required_files == other.required_files and self.required_packages == other.required_packages ) class AliasEndpoint(Endpoint): """Represents an alias Endpoint. target : str The endpoint that this is an alias for. """ target = RESTProperty(str) def __init__(self, **kwargs): super().__init__(**kwargs) self.type = "alias" class RESTServiceClient: """A thin client for the REST Service.""" def __init__(self, service_client): self.service_client = service_client self.query_timeout = None def get_info(self): """Returns the /info""" return self.service_client.GET("info") def query(self, name, *args, **kwargs): """Performs a query. Either specify *args or **kwargs, not both. Respects query_timeout.""" if args and kwargs: raise ValueError( "Mixing of keyword arguments and positional arguments when " "querying an endpoint is not supported." ) return self.service_client.POST( "query/" + name, data={"data": args or kwargs}, timeout=self.query_timeout ) def get_endpoint_upload_destination(self): """Returns a dict representing where endpoint data should be uploaded. Returns ------- dict Keys include: * path: a local file path. Note: In the future, other paths and parameters may be supported. Note: At this time, the response should not change over time. """ return self.service_client.GET("configurations/endpoint_upload_destination") def get_endpoints(self, type=None): """Returns endpoints from the management API. Parameters ---------- type : str The type of endpoint to return. None will include all endpoints. Other options are 'model' and 'alias'. """ result = {} for name, attrs in self.service_client.GET("endpoints", {"type": type}).items(): endpoint = Endpoint.from_json(attrs) endpoint.name = name result[name] = endpoint return result def get_endpoint(self, endpoint_name): """Returns an endpoints from the management API given its name. Parameters ---------- endpoint_name : str The name of the endpoint. """ ((name, attrs),) = self.service_client.GET("endpoints/" + endpoint_name).items() endpoint = Endpoint.from_json(attrs) endpoint.name = name return endpoint def add_endpoint(self, endpoint): """Adds an endpoint through the management API. Parameters ---------- endpoint : Endpoint """ return self.service_client.POST("endpoints", endpoint.to_json()) def set_endpoint(self, endpoint): """Updates an endpoint through the management API. Parameters ---------- endpoint : Endpoint The endpoint to update. """ return self.service_client.PUT("endpoints/" + endpoint.name, endpoint.to_json()) def remove_endpoint(self, endpoint_name): """Deletes an endpoint through the management API. Parameters ---------- endpoint_name : str The endpoint to delete. """ self.service_client.DELETE("endpoints/" + endpoint_name) def get_status(self): """Returns the status of the server. Returns ------- dict """ return self.service_client.GET("status") def set_credentials(self, username, password): """ Set credentials for all the TabPy client-server communication where client is tabpy-tools and server is tabpy-server. Parameters ---------- username : str User name (login). Username is case insensitive. password : str Password in plain text. """ self.service_client.set_credentials(username, password)
2,809
418
from twisted.internet.task import react from twisted.internet.defer import inlineCallbacks import treq @inlineCallbacks def main(reactor): print('List of tuples') resp = yield treq.get('https://httpbin.org/get', params=[('foo', 'bar'), ('baz', 'bax')]) content = yield resp.text() print(content) print('Single value dictionary') resp = yield treq.get('https://httpbin.org/get', params={'foo': 'bar', 'baz': 'bax'}) content = yield resp.text() print(content) print('Multi value dictionary') resp = yield treq.get('https://httpbin.org/get', params={b'foo': [b'bar', b'baz', b'bax']}) content = yield resp.text() print(content) print('Mixed value dictionary') resp = yield treq.get('https://httpbin.org/get', params={'foo': [1, 2, 3], 'bax': b'quux', b'bar': 'foo'}) content = yield resp.text() print(content) print('Preserved query parameters') resp = yield treq.get('https://httpbin.org/get?foo=bar', params={'baz': 'bax'}) content = yield resp.text() print(content) react(main, [])
530
347
<reponame>hbraha/ovirt-engine<gh_stars>100-1000 package org.ovirt.engine.core.common.validation; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Random; import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; public class IPv4MaskValidatorTest { @ParameterizedTest @MethodSource public void netmaskFormatValidation(String mask, boolean isNetmaskValidFormat) { assertEquals(isNetmaskValidFormat, IPv4MaskValidator.getInstance().isValidNetmaskFormat(mask), "Failed to validate mask's Format: " + mask); } public static Stream<Arguments> netmaskFormatValidation() { return namesParams().map(o -> Arguments.of(o[0], o[1])); } @ParameterizedTest @MethodSource public void prefixFormatValidation(String mask, boolean isPrefixValid) { assertEquals(isPrefixValid, IPv4MaskValidator.getInstance().isPrefixValid(mask), "Failed to validate prefix's Format: " + mask); } public static Stream<Arguments> prefixFormatValidation() { return namesParams().map(o -> Arguments.of(o[0], o[3])); } @ParameterizedTest @MethodSource public void netmaskValidValue(String mask, boolean isNetmaskValidValue) { assertEquals(isNetmaskValidValue, IPv4MaskValidator.getInstance().isNetmaskValid(mask), "Failed to validate mask value" + mask); } public static Stream<Arguments> netmaskValidValue() { return namesParams() .filter(o -> IPv4MaskValidator.getInstance().isValidNetmaskFormat((String) o[0])) .map(o -> Arguments.of(o[0], o[2])); } public static Stream<Object[]> namesParams() { Random random = new Random(); return Stream.of( // Bad Format new Object[] { null, false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "a.a.a.a", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "255.255.0", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "255.255.0.0.0", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "255.255.0.0.", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "31 ", false, random.nextBoolean(), false }, //$NON-NLS-1$ /*note extra space*/ new Object[] { "/31 ", false, random.nextBoolean(), false }, //$NON-NLS-1$ /*note extra space*/ new Object[] { "31/", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "31*", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "//31 ", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "33", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "/33", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "01", false, random.nextBoolean(), false }, //$NON-NLS-1$ new Object[] { "01/", false, random.nextBoolean(), false }, //$NON-NLS-1$ // Not Valid new Object[] { "255.255.0.1", true, false, false }, //$NON-NLS-1$ new Object[] { "255.0.255.0", true, false, false }, //$NON-NLS-1$ new Object[] { "255.0.0.255", true, false, false }, //$NON-NLS-1$ new Object[] { "172.16.17.32", true, false, false }, //$NON-NLS-1$ // Valid new Object[] { "255.255.0.0", true, true, false }, //$NON-NLS-1$ new Object[] { "255.255.255.255", true, true, false }, //$NON-NLS-1$ new Object[] { "31", false, random.nextBoolean(), true }, //$NON-NLS-1$ new Object[] { "/31", false, random.nextBoolean(), true }, //$NON-NLS-1$ new Object[] { "7", false, random.nextBoolean(), true }, //$NON-NLS-1$ new Object[] { "/7", false, random.nextBoolean(), true } //$NON-NLS-1$ ); } @ParameterizedTest @MethodSource public void testOctetNetmask(String mask, String expected) { assertEquals(IPv4MaskValidator.getInstance().getOctetNetmask(mask), expected); } public static Stream<Object[]> testOctetNetmask() { return Stream.of( new Object[] { "255.255.0.0", "255.255.0.0" }, //$NON-NLS-1$ new Object[] { "255.255.255.0", "255.255.255.0" }, //$NON-NLS-1$ new Object[] { "255.255.255.255", "255.255.255.255" }, //$NON-NLS-1$ new Object[] { "255.255.124.0", "255.255.124.0" }, //$NON-NLS-1$ new Object[] { "32", "255.255.255.255" }, //$NON-NLS-1$ new Object[] { "31", "255.255.255.254" }, //$NON-NLS-1$ new Object[] { "24", "255.255.255.0" }, //$NON-NLS-1$ new Object[] { "21", "255.255.248.0" }, //$NON-NLS-1$ new Object[] { "16", "255.255.0.0" }, //$NON-NLS-1$ new Object[] { "13", "255.248.0.0" }, //$NON-NLS-1$ new Object[] { "8", "255.0.0.0" }, //$NON-NLS-1$ new Object[] { "5", "248.0.0.0" } //$NON-NLS-1$ ); } }
2,574
592
// // MMPolylineButton.h // Loose Leaf // // Created by <NAME> on 6/21/12. // Copyright (c) 2012 Milestone Made, LLC. All rights reserved. // #import "MMSidebarButton.h" @interface MMPolylineButton : MMSidebarButton @end
87
382
<filename>relax-business-component/src/main/java/com/ustory/relax_business_component/plugin/ProxyActivity.java package com.ustory.relax_business_component.plugin; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.res.AssetManager; import android.content.res.Resources; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.WindowManager; import com.ustory.relax_business_component.plugin.inter.IPlugin; /** * 每个插件对应一个代理的ProxyActivity,添加第二个插件需要写一个ProxyActivity2 重写对应的proxyModel即可 */ public class ProxyActivity extends Activity implements IProxy{ private ProxyModel mProxyModel; protected IPlugin mPluginActivity; @Override protected void attachBaseContext(Context context) { Log.i("qiyue", "proxyActivity=" + context); mProxyModel = new ProxyModel(this); // mProxyModel.replaceContextResources(context); Log.i("old","attachBaseContext"); super.attachBaseContext(context); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); String className = getIntent().getStringExtra("Class"); String pluginName = getIntent().getStringExtra("pluginName"); mProxyModel.onCreate(this,savedInstanceState,pluginName,className); Log.i("old","onCreate"); } @Override public AssetManager getAssets() { Log.i("old","getAssets"); return mProxyModel.getAssets() == null ? super.getAssets() : mProxyModel.getAssets(); } @Override public Resources getResources() { Log.i("old","getResources"); return mProxyModel.getResources() == null ? super.getResources() : mProxyModel.getResources(); } @Override public Resources.Theme getTheme() { return mProxyModel.getTheme() == null ? super.getTheme() : mProxyModel.getTheme(); } @Override public ClassLoader getClassLoader() { return mProxyModel.getClassLoader(); } @Override public void attach(IPlugin pluginActivity) { mPluginActivity = pluginActivity; } @Override protected void onStart() { mPluginActivity.onStart(); super.onStart(); } @Override protected void onRestart() { mPluginActivity.onRestart(); super.onRestart(); } @Override protected void onResume() { mPluginActivity.onResume(); super.onResume(); } @Override protected void onPause() { mPluginActivity.onPause(); super.onPause(); } @Override protected void onStop() { mPluginActivity.onStop(); super.onStop(); } @Override protected void onDestroy() { mPluginActivity.onDestroy(); super.onDestroy(); } @Override protected void onSaveInstanceState(Bundle outState) { mPluginActivity.onSaveInstanceState(outState); super.onSaveInstanceState(outState); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { mPluginActivity.onRestoreInstanceState(savedInstanceState); super.onRestoreInstanceState(savedInstanceState); } @Override protected void onNewIntent(Intent intent) { mPluginActivity.onNewIntent(intent); super.onNewIntent(intent); } @Override public void onBackPressed() { mPluginActivity.onBackPressed(); super.onBackPressed(); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); return mPluginActivity.onTouchEvent(event); } @Override public boolean onKeyUp(int keyCode, KeyEvent event) { super.onKeyUp(keyCode, event); return mPluginActivity.onKeyUp(keyCode, event); } @Override public void onWindowAttributesChanged(WindowManager.LayoutParams params) { mPluginActivity.onWindowAttributesChanged(params); super.onWindowAttributesChanged(params); } @Override public void onWindowFocusChanged(boolean hasFocus) { mPluginActivity.onWindowFocusChanged(hasFocus); super.onWindowFocusChanged(hasFocus); } @Override public boolean onCreateOptionsMenu(Menu menu) { mPluginActivity.onCreateOptionsMenu(menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { mPluginActivity.onOptionsItemSelected(item); return super.onOptionsItemSelected(item); } }
1,782
1,025
<filename>helper/github-ci-vars.py import datetime import json import re import subprocess envs = subprocess.check_output(['tox', '-l']).decode().rstrip().split('\n') matrix = [] for env in envs: version = re.search(r'^py(?P<major>\d)(?P<minor>\d+)-', env) # github "commit" checks will fail even though workflow passes overall. # temp remove the optional targets to make github CI work. if 'master' in env: continue matrix.append({ 'toxenv': env, 'python-version': f'{version.group("major")}.{version.group("minor")}', 'experimental': bool('master' in env) }) print(f"::set-output name=date::{datetime.date.today()}") print(f"::set-output name=matrix::{json.dumps(matrix)}")
292
1,615
<filename>MLN-iOS/MLN/Classes/MUIKit/Component/UI/CollectionView/MLNUICollectionViewGridLayoutDelegate.h // // MLNUICollectionViewGridLayoutDelegate.h // MLNUI // // Created by MoMo on 2019/11/1. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @protocol MLNUICollectionViewGridLayoutDelegate <NSObject> - (CGSize)collectionView:(UICollectionView *)collectionView layout:(UICollectionViewLayout *)collectionViewLayout sizeForItemAtIndexPath:(NSIndexPath *)indexPath; @end NS_ASSUME_NONNULL_END
187
609
<reponame>stevenybw/thrill #!/usr/bin/env python ########################################################################## # run/ec2-setup/submit.py # # Part of Project Thrill - http://project-thrill.org # # Copyright (C) 2015 <NAME> <<EMAIL>> # # All rights reserved. Published under the BSD-2 license in the LICENSE file. ########################################################################## import boto3 <<<<<<< HEAD import time import json import datetime import sys with open('config.json') as data_file: data = json.load(data_file) client = boto3.client('ec2') ec2 = boto3.resource('ec2') job_id = int(time.time()) response = client.request_spot_instances(SpotPrice=data["SPOT_PRICE"], InstanceCount=data["COUNT"], Type=data["TYPE"], #ValidFrom=datetime.datetime(2015, 10, 11, 18, 10, 00), ValidUntil=datetime.datetime(2015, 10, 11, 19, 37, 00), #AvailabilityZoneGroup=data["ZONE"], LaunchSpecification={ 'ImageId' : data["AMI_ID"], 'KeyName' : data["EC2_KEY_HANDLE"], 'InstanceType' : data["INSTANCE_TYPE"], 'SecurityGroups' : [ data["SECGROUP_HANDLE"] ], 'Placement': { 'AvailabilityZone': data["ZONE"] } }) request_ids = [] for request in response['SpotInstanceRequests']: request_ids.append(request['SpotInstanceRequestId']) running_instances = [] loop = True; print "waiting for instances to get fulfilled..." while loop: requests = client.describe_spot_instance_requests(SpotInstanceRequestIds=request_ids) for request in requests['SpotInstanceRequests']: if request['State'] in ['closed', 'cancelled', 'failed']: print request['SpotInstanceRequestId'] + " " + request['State'] loop = False break; # TODO(ms) ensure running instances are terminated if 'InstanceId' in request and request['InstanceId'] not in running_instances: running_instances.append(request['InstanceId']) print request['InstanceId'] + " running..." if len(running_instances) == int(data["COUNT"]): print 'all requested instances are fulfilled' break; time.sleep(5) if loop == False: print "unable to fulfill all requested instances... aborting..." sys.exit(); # ensure all instances are running loop = True; while loop: loop = False response = client.describe_instance_status(InstanceIds=running_instances, IncludeAllInstances=True) for status in response['InstanceStatuses']: if status['InstanceState']['Name'] != 'running': loop = True print "all instances are running..." # optionally, attach snapshot as volume to each instance if data["VOL_SNAPSHOT_ID"]: print "attaching volumes..." for instance_id in running_instances: volume = ec2.create_volume(SnapshotId = data["VOL_SNAPSHOT_ID"], AvailabilityZone = data["ZONE"], VolumeType = data["VOLUME_TYPE"]) while ec2.Volume(volume.id).state != "available": time.sleep(1) ec2.Instance(instance_id).attach_volume(VolumeId = volume.id, Device=data["DEVICE"]) print data["VOL_SNAPSHOT_ID"] + " attached to " + instance_id print "volumes attached..." print job_id ##########################################################################
1,562
482
package io.cattle.platform.process.common.handler; import io.cattle.platform.engine.handler.ProcessHandler; public abstract class AbstractObjectProcessHandler extends AbstractObjectProcessLogic implements ProcessHandler { }
53
852
<filename>RecoHI/HiTracking/test/runGlobalAndRegit_cfg.py import FWCore.ParameterSet.VarParsing as VarParsing ivars = VarParsing.VarParsing('standard') ivars.register('initialEvent',mult=ivars.multiplicity.singleton,info="for testing") ivars.files = 'file:/mnt/hadoop/cms/store/user/yetkin/MC_Production/Pythia80_HydjetDrum_mix01/RECO/set2_random40000_HydjetDrum_642.root' ivars.output = 'test.root' ivars.maxEvents = -1 ivars.initialEvent = 1 ivars.parseArguments() import FWCore.ParameterSet.Config as cms process = cms.Process('TRACKATTACK') doRegit=True rawORreco=True isEmbedded=True process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ) ##################################################################################### # Input source ##################################################################################### process.source = cms.Source("PoolSource", duplicateCheckMode = cms.untracked.string("noDuplicateCheck"), fileNames = cms.untracked.vstring( ivars.files )) process.Timing = cms.Service("Timing") # Number of events we want to process, -1 = all events process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(ivars.maxEvents)) ##################################################################################### # Load some general stuff ##################################################################################### process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') process.load('Configuration.StandardSequences.Services_cff') process.load('Configuration.StandardSequences.GeometryExtended_cff') process.load('Configuration.StandardSequences.MagneticField_38T_cff') process.load('Configuration.StandardSequences.RawToDigi_cff') process.load('Configuration.StandardSequences.ReconstructionHeavyIons_cff') process.load('FWCore.MessageService.MessageLogger_cfi') process.load('RecoLocalTracker.SiPixelRecHits.PixelCPEESProducers_cff') # Data Global Tag 44x #process.GlobalTag.globaltag = 'GR_P_V27::All' # MC Global Tag 44x process.GlobalTag.globaltag = 'STARTHI44_V7::All' # load centrality from CmsHi.Analysis2010.CommonFunctions_cff import * overrideCentrality(process) process.HeavyIonGlobalParameters = cms.PSet( centralityVariable = cms.string("HFhits"), nonDefaultGlauberModel = cms.string("Hydjet_2760GeV"), centralitySrc = cms.InputTag("hiCentrality") ) process.hiCentrality.pixelBarrelOnly = False #process.load("RecoHI.HiCentralityAlgos.CentralityFilter_cfi") #process.centralityFilter.selectedBins = [0,1] # EcalSeverityLevel ES Producer process.load("RecoLocalCalo.EcalRecAlgos.EcalSeverityLevelESProducer_cfi") process.load("RecoEcal.EgammaCoreTools.EcalNextToDeadChannelESProducer_cff") ##################################################################################### # Define tree output ##################################################################################### process.TFileService = cms.Service("TFileService", fileName=cms.string(ivars.output)) ##################################################################################### # Additional Reconstruction ##################################################################################### # redo reco or just tracking if rawORreco: process.rechits = cms.Sequence(process.siPixelRecHits * process.siStripMatchedRecHits) process.hiTrackReco = cms.Sequence(process.rechits * process.heavyIonTracking) process.trackRecoAndSelection = cms.Path( #process.centralityFilter* process.hiTrackReco ) else: process.reco_extra = cms.Path( #process.centralityFilter * process.RawToDigi * process.reconstructionHeavyIons) # tack on iteative tracking, particle flow and calo-matching #iteerative tracking process.load("RecoHI.HiTracking.hiIterTracking_cff") process.heavyIonTracking *= process.hiIterTracking # Now do more tracking around the jets if doRegit: process.load("RecoHI.HiTracking.hiRegitTracking_cff") process.hiRegitInitialStepSeeds.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") process.hiRegitLowPtTripletStepSeeds.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") process.hiRegitPixelPairStepSeeds.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") process.hiRegitDetachedTripletStepSeeds.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") process.hiRegitMixedTripletStepSeedsA.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") process.hiRegitMixedTripletStepSeedsB.RegionFactoryPSet.RegionPSet.JetSrc = cms.InputTag("iterativeConePu5CaloJets") # merged with the global, iterative tracking process.load("RecoHI.HiTracking.MergeRegit_cff") # now re-run the muons process.regGlobalMuons = process.globalMuons.clone( TrackerCollectionLabel = "hiGeneralAndRegitTracks" ) process.regGlbTrackQual = process.glbTrackQual.clone( InputCollection = "regGlobalMuons", InputLinksCollection = "regGlobalMuons" ) process.regMuons = process.muons.clone() process.regMuons.TrackExtractorPSet.inputTrackCollection = "hiGeneralAndRegitTracks" process.regMuons.globalTrackQualityInputTag = "regGlbTrackQual" process.regMuons.inputCollectionLabels = cms.VInputTag("hiGeneralAndRegitTracks", "regGlobalMuons", "standAloneMuons:UpdatedAtVtx", "tevMuons:firstHit", "tevMuons:picky", "tevMuons:dyt") process.regMuonReco = cms.Sequence( process.regGlobalMuons* process.regGlbTrackQual* process.regMuons ) process.regionalTracking = cms.Path( process.hiRegitTracking * process.hiGeneralAndRegitTracks* process.regMuonReco ) process.load("edwenger.HiTrkEffAnalyzer.HiTPCuts_cff") process.load("SimTracker.TrackAssociatorProducers.trackAssociatorByHits_cfi") process.load("SimTracker.TrackAssociation.trackingParticleRecoTrackAsssociation_cfi") process.load("MitHig.PixelTrackletAnalyzer.trackAnalyzer_cff") process.cutsTPForEff.primaryOnly = False process.cutsTPForFak.ptMin = 0.2 process.cutsTPForEff.ptMin = 0.2 if doRegit: process.anaTrack.trackSrc = 'hiGeneralAndRegitTracks' process.anaTrack.qualityString = "highPurity" else: process.anaTrack.trackSrc = 'hiGeneralTracks' process.anaTrack.qualityString = "highPurity" process.anaTrack.trackPtMin = 0 process.anaTrack.useQuality = False process.anaTrack.doPFMatching = False process.anaTrack.doSimTrack = True process.trackAnalysis = cms.Path( process.cutsTPForEff* process.cutsTPForFak* process.anaTrack ) ##################################################################################### # Edm Output ##################################################################################### #process.out = cms.OutputModule("PoolOutputModule", # fileName = cms.untracked.string("/tmp/mnguyen/output.root") # ) #process.save = cms.EndPath(process.out)
2,679
372
/* * * (c) Copyright 1989 OPEN SOFTWARE FOUNDATION, INC. * (c) Copyright 1989 HEWLETT-PACKARD COMPANY * (c) Copyright 1989 DIGITAL EQUIPMENT CORPORATION * To anyone who acknowledges that this file is provided "AS IS" * without any express or implied warranty: * permission to use, copy, modify, and distribute this * file for any purpose is hereby granted without fee, provided that * the above copyright notices and this notice appears in all source * code copies, and that none of the names of Open Software * Foundation, Inc., Hewlett-Packard Company, or Digital Equipment * Corporation be used in advertising or publicity pertaining to * distribution of the software without specific, written prior * permission. Neither Open Software Foundation, Inc., Hewlett- * Packard Company, nor Digital Equipment Corporation makes any * representations about the suitability of this software for any * purpose. * */ /* */ /* ** ** NAME ** ** perfb.c ** ** FACILITY: ** ** Remote Procedure Call (RPC) ** ** ABSTRACT: ** ** Server manager routines for performance and system execiser auxiliary ** interface. This interface is dynamically registered by the server when ** request by the client through a call to an operation in the "perf" ** interface. ** ** */ #include <perf_c.h> #include <perf_p.h> #include <unistd.h> void print_binding_info(char *text, handle_t h); perfb_v1_0_epv_t perfb_mgr_epv = { perfb_init, perfb_in, perfb_brd, perfb_null, perfb_null_idem }; /***************************************************************************/ void perfb_init ( handle_t h, idl_char *name ) { print_binding_info ("perfb_init", h); gethostname(name, 256); } /***************************************************************************/ void perfb_in ( handle_t h, perf_data_t d, unsigned32 l, idl_boolean verify, unsigned32 *sum ) { print_binding_info ("perfb_in", h); perf_in(h, d, l, verify, sum); } /***************************************************************************/ void perfb_brd ( handle_t h, idl_char *name ) { print_binding_info ("perfb_brd", h); gethostname(name, 256); } /***************************************************************************/ void perfb_null ( handle_t h __attribute__((unused)) ) { } /***************************************************************************/ void perfb_null_idem ( handle_t h __attribute__((unused)) ) { }
966
6,647
/* * Copyright 1999-2019 Seata.io Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.seata.spring.boot.autoconfigure; import io.seata.common.loader.EnhancedServiceLoader; import io.seata.config.Configuration; import io.seata.config.ExtConfigurationProvider; import io.seata.config.FileConfiguration; import io.seata.config.springcloud.SpringApplicationContextProvider; import io.seata.spring.boot.autoconfigure.properties.registry.RegistryRedisProperties; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; import static io.seata.spring.boot.autoconfigure.StarterConstants.PROPERTY_BEAN_MAP; import static io.seata.spring.boot.autoconfigure.StarterConstants.REGISTRY_REDIS_PREFIX; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; /** * @author zhangheng **/ @Import(SpringApplicationContextProvider.class) @org.springframework.context.annotation.Configuration public class RedisAutoInjectionTypeConvertTest { private static AnnotationConfigApplicationContext applicationContext; @BeforeAll public static void initContext() { applicationContext = new AnnotationConfigApplicationContext(RedisAutoInjectionTypeConvertTest.class); } @Bean RegistryRedisProperties registryRedisProperties() { RegistryRedisProperties registryRedisProperties = new RegistryRedisProperties().setPassword("<PASSWORD>").setDb(1).setServerAddr("localhost:123456"); PROPERTY_BEAN_MAP.put(REGISTRY_REDIS_PREFIX, RegistryRedisProperties.class); return registryRedisProperties; } @Test public void testReadConfigurationItems() { FileConfiguration configuration = mock(FileConfiguration.class); Configuration currentConfiguration = EnhancedServiceLoader.load(ExtConfigurationProvider.class).provide(configuration); System.setProperty("seata.registry.redis.db","1"); assertEquals(1, currentConfiguration.getInt("registry.redis.db")); System.setProperty("seata.registry.redis.password","<PASSWORD>"); assertEquals("123456", currentConfiguration.getConfig("registry.redis.password")); System.setProperty("seata.registry.redis.serverAddr","localhost:123456"); assertEquals("localhost:123456", currentConfiguration.getConfig("registry.redis.serverAddr")); } @AfterAll public static void closeContext() { applicationContext.close(); } }
1,021
1,018
<reponame>rajdavies/glowroot<gh_stars>1000+ /* * Copyright 2011-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.agent.plugin.servlet; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.ning.http.client.AsyncHttpClient; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.glowroot.agent.it.harness.Container; import org.glowroot.agent.it.harness.Containers; import org.glowroot.wire.api.model.TraceOuterClass.Trace; import static org.assertj.core.api.Assertions.assertThat; public class AnnotatedServletIT { private static Container container; @BeforeAll public static void setUp() throws Exception { container = Containers.create(); } @AfterAll public static void tearDown() throws Exception { container.close(); } @AfterEach public void afterEachTest() throws Exception { container.checkAndReset(); } @Test public void testServlet() throws Exception { // when Trace trace = container.execute(InvokeServlet.class, "Web"); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("/hello/5"); // TODO the transaction name should ideally be /hello/*, but taking safe route for now // because servlet could be mapped to another path via web.xml, in future would be good to // get use actual servlet mapping, probably need to instrument tomcat/other web containers // to capture this assertThat(header.getTransactionName()).isEqualTo("/hello/5"); } @Test public void testServletWithContextPath() throws Exception { // when Trace trace = container.execute(InvokeServletWithContextPath.class, "Web"); // then Trace.Header header = trace.getHeader(); assertThat(header.getHeadline()).isEqualTo("/zzz/hello/5"); // TODO the transaction name should ideally be /hello/*, but taking safe route for now // because servlet could be mapped to another path via web.xml, in future would be good to // get use actual servlet mapping, probably need to instrument tomcat/other web containers // to capture this assertThat(header.getTransactionName()).isEqualTo("/zzz/hello/5"); } public static class InvokeServlet extends InvokeServletInTomcat { public InvokeServlet() { super(""); } @Override protected void doTest(int port) throws Exception { AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); int statusCode = asyncHttpClient.prepareGet("http://localhost:" + port + "/hello/5") .execute().get().getStatusCode(); asyncHttpClient.close(); if (statusCode != 200) { throw new IllegalStateException("Unexpected status code: " + statusCode); } } } public static class InvokeServletWithContextPath extends InvokeServletInTomcat { public InvokeServletWithContextPath() { super("/zzz"); } @Override protected void doTest(int port) throws Exception { AsyncHttpClient asyncHttpClient = new AsyncHttpClient(); int statusCode = asyncHttpClient.prepareGet("http://localhost:" + port + "/zzz/hello/5") .execute().get().getStatusCode(); asyncHttpClient.close(); if (statusCode != 200) { throw new IllegalStateException("Unexpected status code: " + statusCode); } } } @WebServlet(value = "/hello/*", loadOnStartup = 0) @SuppressWarnings("serial") public static class AnnotatedServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.getWriter().print("hello"); } } }
1,768
3,442
<reponame>wcicola/jitsi /* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.plugin.keybindingchooser.globalchooser; import java.util.*; import javax.swing.table.*; /** * Table model for global shortcuts. * * @author <NAME> */ public class GlobalShortcutTableModel extends AbstractTableModel { /** * Serial version UID. */ private static final long serialVersionUID = 0L; /** * List of shortcuts. */ private List<GlobalShortcutEntry> shortcuts = new ArrayList<GlobalShortcutEntry>(); /** * Returns the title for this column * * @param column the column * * @return the title for this column * * @see javax.swing.table.AbstractTableModel#getColumnName */ @Override public String getColumnName(int column) { switch(column) { case 0: return Resources.getString( "plugin.keybindings.globalchooser.SHORTCUT_NAME"); case 1: return Resources.getString( "plugin.keybindings.globalchooser.SHORTCUT_PRIMARY"); case 2: return Resources.getString( "plugin.keybindings.globalchooser.SHORTCUT_SECOND"); default: throw new IllegalArgumentException("column not found"); } } /** * Returns the number of rows in the table * * @return the number of rows in the table * @see javax.swing.table.AbstractTableModel#getRowCount */ public int getRowCount() { return shortcuts.size(); } /** * Returns the number of column in the table * * @return the number of columns in the table * * @see javax.swing.table.AbstractTableModel#getColumnCount */ public int getColumnCount() { // 3 columns: "name", "primary shortcut", "second shortcut" return 3; } /** * Returns the text for the given cell of the table * * @param row cell row * @param column cell column * @return object at the row/column * @see javax.swing.table.AbstractTableModel#getValueAt */ public Object getValueAt(int row, int column) { switch(column) { case 0: return getEntryAt(row).getAction(); case 1: return getEntryAt(row).getEditShortcut1() ? "Press key" : GlobalShortcutEntry.getShortcutText( getEntryAt(row).getShortcut()); case 2: return getEntryAt(row).getEditShortcut2() ? "Press key" : GlobalShortcutEntry.getShortcutText( getEntryAt(row).getShortcut2()); default: throw new IllegalArgumentException("column not found"); } } /** * Returns the LdapDirectory at the row 'row' * * @param row the row on which to find the LdapDirectory * * @return the LdapDirectory found */ public GlobalShortcutEntry getEntryAt(int row) { int i = 0; for(GlobalShortcutEntry entry : shortcuts) { if(i == row) return entry; i++; } throw new IllegalArgumentException("row not found"); } /** * Returns whether a cell is editable. * @param row row of the cell * @param col column of the cell * * @return whether the cell is editable */ @Override public boolean isCellEditable(int row, int col) { return false; } /** * Overrides a method that always returned Object.class * Now it will return Boolean.class for the first method, * letting the DefaultTableCellRenderer create checkboxes. * * @param columnIndex index of the column * @return Column class */ @Override public Class<?> getColumnClass(int columnIndex) { Object o = getValueAt(0, columnIndex); if(o == null) return String.class; return o.getClass(); } /** * Sets a value in an editable cell. * * @param aValue value to set * @param rowIndex row index * @param columnIndex column index */ public void ssetValueAt(Object aValue, int rowIndex, int columnIndex) { if(columnIndex != 0) throw new IllegalArgumentException("non editable column!"); } /** * Adds an entry. * * @param entry entry to add */ public void addEntry(GlobalShortcutEntry entry) { shortcuts.add(entry); } /** * Adds an entry. * * @param entry entry to add */ public void removeEntry(GlobalShortcutEntry entry) { shortcuts.remove(entry); } /** * Returns all shortcuts. * * @return all shortcuts. */ public List<GlobalShortcutEntry> getEntries() { return shortcuts; } }
2,361
521
<gh_stars>100-1000 /* getx entrypoint, default to console mode */ #include "fb.h" /*:::::*/ FBCALL int fb_GetX( void ) { int res; FB_LOCK(); if( __fb_ctx.hooks.getxproc ) res = __fb_ctx.hooks.getxproc( ); else res = fb_ConsoleGetX( ); FB_UNLOCK(); return res; }
126
11,719
// Copyright (C) 2004 <NAME> (<EMAIL>) // License: Boost Software License See LICENSE.txt for the full license. #ifndef DLIB_ENTROPY_DECODER_KERNEl_2_ #define DLIB_ENTROPY_DECODER_KERNEl_2_ #include "../algs.h" #include "entropy_decoder_kernel_abstract.h" #include <iosfwd> #include "../uintn.h" namespace dlib { class entropy_decoder_kernel_2 { /*! GENERAL NOTES this decoder is implemented using "range" coding INITIAL VALUE in == 0 initial_low == 0x00000001 (slightly more than zero) initial_high == 0xffffffff (slightly less than one, 0.99999999976717) target == 0x00000000 (zero) low == initial_low high == initial_high r == 0 CONVENTION if (in != 0) *in == get_stream() true == stream_is_set() streambuf == in->rdbuf() else false == stream_is_set() low == the low end of the range used for arithmetic encoding. this number is used as a 32bit fixed point real number. the point is fixed just before the first bit, so it is always in the range [0,1) low is also never allowed to be zero to avoid overflow in the calculation (high-low+1)/total. high == the high end of the range - 1 used for arithmetic encoding. this number is used as a 32bit fixed point real number. the point is fixed just before the first bit, so when we interpret high as a real number then it is always in the range [0,1) the range for arithmetic encoding is always [low,high + 0.9999999...) the 0.9999999... is why high == real upper range - 1 target == 32 bits of the fraction produced from an arithmetic encoder. this number is used as a 32bit fixed point real number. the point is fixed just before the first bit, so it is always in the range [0,1) r == the value (high-low+1)/total from the last call to get_target() or 0 if get_target_called() should be false get_target_called() == (r != 0) !*/ public: entropy_decoder_kernel_2 ( ); virtual ~entropy_decoder_kernel_2 ( ); void clear( ); void set_stream ( std::istream& in ); bool stream_is_set ( ) const; std::istream& get_stream ( ) const; void decode ( uint32 low_count, uint32 high_count ); bool get_target_called ( ) const; uint32 get_target ( uint32 total ); private: // restricted functions entropy_decoder_kernel_2(entropy_decoder_kernel_2&); // copy constructor entropy_decoder_kernel_2& operator=(entropy_decoder_kernel_2&); // assignment operator // data members const uint32 initial_low; const uint32 initial_high; std::istream* in; uint32 low; uint32 high; uint32 target; uint32 r; std::streambuf* streambuf; }; } #ifdef NO_MAKEFILE #include "entropy_decoder_kernel_2.cpp" #endif #endif // DLIB_ENTROPY_DECODER_KERNEl_2_
2,005
3,897
/* mbed Microcontroller Library * Copyright (c) 2020 ARM Limited * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef SECURITYMANAGERMOCK_H #define SECURITYMANAGERMOCK_H #include "gmock/gmock.h" #include "source/generic/SecurityManagerImpl.h" namespace ble { class SecurityManagerMock : public ble::impl::SecurityManager { public: SecurityManagerMock() {}; SecurityManagerMock(const GattServerMock&) = delete; SecurityManagerMock& operator=(const GattServerMock&) = delete; virtual ~SecurityManagerMock() {}; MOCK_METHOD(ble_error_t, reset, (), (override)); MOCK_METHOD(ble_error_t, init, (bool enableBonding, bool requireMITM, SecurityIOCapabilities_t iocaps, const Passkey_t passkey, bool signing, const char *dbFilepath), (override)); MOCK_METHOD(ble_error_t, setDatabaseFilepath, (const char *dbFilepath), (override)); MOCK_METHOD(ble_error_t, preserveBondingStateOnReset, (bool enable), (override)); MOCK_METHOD(ble_error_t, writeBondingStateToPersistentStorage, (), (override)); MOCK_METHOD(ble_error_t, purgeAllBondingState, (), (override)); MOCK_METHOD(ble_error_t, generateWhitelistFromBondTable, (::ble::whitelist_t *whitelist), (const, override)); MOCK_METHOD(ble_error_t, requestPairing, (ble::connection_handle_t connectionHandle), (override)); MOCK_METHOD(ble_error_t, acceptPairingRequest, (ble::connection_handle_t connectionHandle), (override)); MOCK_METHOD(ble_error_t, cancelPairingRequest, (ble::connection_handle_t connectionHandle), (override)); MOCK_METHOD(ble_error_t, setPairingRequestAuthorisation, (bool required), (override)); MOCK_METHOD(ble_error_t, getPeerIdentity, (ble::connection_handle_t connectionHandle), (override)); MOCK_METHOD(ble_error_t, allowLegacyPairing, (bool allow), (override)); MOCK_METHOD(ble_error_t, getSecureConnectionsSupport, (bool *enabled), (override)); MOCK_METHOD(ble_error_t, setIoCapability, (SecurityIOCapabilities_t iocaps), (override)); MOCK_METHOD(ble_error_t, setDisplayPasskey, (const Passkey_t passkey), (override)); MOCK_METHOD(ble_error_t, setLinkSecurity, (ble::connection_handle_t connectionHandle, SecurityMode_t securityMode), (override)); MOCK_METHOD(ble_error_t, setKeypressNotification, (bool enabled), (override)); MOCK_METHOD(ble_error_t, enableSigning, (ble::connection_handle_t connectionHandle, bool enabled), (override)); MOCK_METHOD(ble_error_t, setHintFutureRoleReversal, (bool enable), (override)); MOCK_METHOD(ble_error_t, getLinkEncryption, (ble::connection_handle_t connectionHandle, ble::link_encryption_t *encryption), (override)); MOCK_METHOD(ble_error_t, setLinkEncryption, (ble::connection_handle_t connectionHandle, ble::link_encryption_t encryption), (override)); MOCK_METHOD(ble_error_t, setEncryptionKeyRequirements, (uint8_t minimumByteSize, uint8_t maximumByteSize), (override)); MOCK_METHOD(ble_error_t, getEncryptionKeySize, (connection_handle_t connectionHandle, uint8_t *size), (override)); MOCK_METHOD(ble_error_t, requestAuthentication, (ble::connection_handle_t connectionHandle), (override)); MOCK_METHOD(ble_error_t, generateOOB, (const ble::address_t *address), (override)); MOCK_METHOD(ble_error_t, setOOBDataUsage, (ble::connection_handle_t connectionHandle, bool useOOB, bool OOBProvidesMITM), (override)); MOCK_METHOD(ble_error_t, passkeyEntered, (ble::connection_handle_t connectionHandle, Passkey_t passkey), (override)); MOCK_METHOD(ble_error_t, legacyPairingOobReceived, (const ble::address_t *address, const ble::oob_tk_t *tk), (override)); MOCK_METHOD(ble_error_t, confirmationEntered, (ble::connection_handle_t connectionHandle, bool confirmation), (override)); MOCK_METHOD(ble_error_t, sendKeypressNotification, (ble::connection_handle_t connectionHandle, ble::Keypress_t keypress), (override)); MOCK_METHOD(ble_error_t, oobReceived, (const ble::address_t *address, const ble::oob_lesc_value_t *random, const ble::oob_confirm_t *confirm), (override)); MOCK_METHOD(ble_error_t, getSigningKey, (ble::connection_handle_t connectionHandle, bool authenticated), (override)); MOCK_METHOD(ble_error_t, setPrivateAddressTimeout, (uint16_t timeout_in_seconds), (override)); MOCK_METHOD(void, onShutdown, (const SecurityManagerShutdownCallback_t &callback), (override)); MOCK_METHOD(SecurityManagerShutdownCallbackChain_t&, onShutdown, (), (override)); MOCK_METHOD(void, setSecurityManagerEventHandler, (EventHandler *handler), (override)); }; } #endif //SECURITYMANAGERMOCK_H
1,715
2,151
<gh_stars>1000+ //===----------------------- catch_function_01.cpp ------------------------===// // // The LLVM Compiler Infrastructure // // This file is dual licensed under the MIT and the University of Illinois Open // Source Licenses. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // Can you have a catch clause of array type that catches anything? #include <cassert> void f() {} int main() { typedef void Function(); try { throw f; // converts to void (*)() assert(false); } catch (Function& b) // can't catch void (*)() { assert(false); } catch (...) { } }
251
3,372
<reponame>rbalamohan/aws-sdk-java /* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.apigatewayv2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Creates a new Api resource to represent an API. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateApiRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. * </p> */ private String apiKeySelectionExpression; /** * <p> * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring CORS</a> for * more information. * </p> */ private Cors corsConfiguration; /** * <p> * This property is part of quick create. It specifies the credentials required for the integration, if any. For a * Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, use the role's * Amazon Resource Name (ARN). To require that the caller's identity be passed through from the request, specify * arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, specify null. Currently, this * property is not used for HTTP integrations. Supported only for HTTP APIs. * </p> */ private String credentialsArn; /** * <p> * The description of the API. * </p> */ private String description; /** * <p> * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * </p> */ private Boolean disableSchemaValidation; /** * <p> * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, clients can * invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that * clients use a custom domain name to invoke your API, disable the default endpoint. * </p> */ private Boolean disableExecuteApiEndpoint; /** * <p> * The name of the API. * </p> */ private String name; /** * <p> * The API protocol. * </p> */ private String protocolType; /** * <p> * This property is part of quick create. If you don't specify a routeKey, a default route of $default is created. * The $default route acts as a catch-all for any request made to your API, for a particular stage. The $default * route key can't be modified. You can add routes after creating the API, and you can update the route keys of * additional routes. Supported only for HTTP APIs. * </p> */ private String routeKey; /** * <p> * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be ${request.method} * ${request.path}. If not provided, this will be the default for HTTP APIs. This property is required for WebSocket * APIs. * </p> */ private String routeSelectionExpression; /** * <p> * The collection of tags. Each tag element is associated with a given resource. * </p> */ private java.util.Map<String, String> tags; /** * <p> * This property is part of quick create. Quick create produces an API with an integration, a default catch-all * route, and a default stage which is configured to automatically deploy changes. For HTTP integrations, specify a * fully qualified URL. For Lambda integrations, specify a function ARN. The type of the integration will be * HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. * </p> */ private String target; /** * <p> * A version identifier for the API. * </p> */ private String version; /** * <p> * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. * </p> * * @param apiKeySelectionExpression * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. */ public void setApiKeySelectionExpression(String apiKeySelectionExpression) { this.apiKeySelectionExpression = apiKeySelectionExpression; } /** * <p> * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. * </p> * * @return An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. */ public String getApiKeySelectionExpression() { return this.apiKeySelectionExpression; } /** * <p> * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. * </p> * * @param apiKeySelectionExpression * An API key selection expression. Supported only for WebSocket APIs. See <a href= * "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions" * >API Key Selection Expressions</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withApiKeySelectionExpression(String apiKeySelectionExpression) { setApiKeySelectionExpression(apiKeySelectionExpression); return this; } /** * <p> * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring CORS</a> for * more information. * </p> * * @param corsConfiguration * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring * CORS</a> for more information. */ public void setCorsConfiguration(Cors corsConfiguration) { this.corsConfiguration = corsConfiguration; } /** * <p> * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring CORS</a> for * more information. * </p> * * @return A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring * CORS</a> for more information. */ public Cors getCorsConfiguration() { return this.corsConfiguration; } /** * <p> * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring CORS</a> for * more information. * </p> * * @param corsConfiguration * A CORS configuration. Supported only for HTTP APIs. See <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html">Configuring * CORS</a> for more information. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withCorsConfiguration(Cors corsConfiguration) { setCorsConfiguration(corsConfiguration); return this; } /** * <p> * This property is part of quick create. It specifies the credentials required for the integration, if any. For a * Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, use the role's * Amazon Resource Name (ARN). To require that the caller's identity be passed through from the request, specify * arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, specify null. Currently, this * property is not used for HTTP integrations. Supported only for HTTP APIs. * </p> * * @param credentialsArn * This property is part of quick create. It specifies the credentials required for the integration, if any. * For a Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, * use the role's Amazon Resource Name (ARN). To require that the caller's identity be passed through from * the request, specify arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, * specify null. Currently, this property is not used for HTTP integrations. Supported only for HTTP APIs. */ public void setCredentialsArn(String credentialsArn) { this.credentialsArn = credentialsArn; } /** * <p> * This property is part of quick create. It specifies the credentials required for the integration, if any. For a * Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, use the role's * Amazon Resource Name (ARN). To require that the caller's identity be passed through from the request, specify * arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, specify null. Currently, this * property is not used for HTTP integrations. Supported only for HTTP APIs. * </p> * * @return This property is part of quick create. It specifies the credentials required for the integration, if any. * For a Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, * use the role's Amazon Resource Name (ARN). To require that the caller's identity be passed through from * the request, specify arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, * specify null. Currently, this property is not used for HTTP integrations. Supported only for HTTP APIs. */ public String getCredentialsArn() { return this.credentialsArn; } /** * <p> * This property is part of quick create. It specifies the credentials required for the integration, if any. For a * Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, use the role's * Amazon Resource Name (ARN). To require that the caller's identity be passed through from the request, specify * arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, specify null. Currently, this * property is not used for HTTP integrations. Supported only for HTTP APIs. * </p> * * @param credentialsArn * This property is part of quick create. It specifies the credentials required for the integration, if any. * For a Lambda integration, three options are available. To specify an IAM Role for API Gateway to assume, * use the role's Amazon Resource Name (ARN). To require that the caller's identity be passed through from * the request, specify arn:aws:iam::*:user/*. To use resource-based permissions on supported AWS services, * specify null. Currently, this property is not used for HTTP integrations. Supported only for HTTP APIs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withCredentialsArn(String credentialsArn) { setCredentialsArn(credentialsArn); return this; } /** * <p> * The description of the API. * </p> * * @param description * The description of the API. */ public void setDescription(String description) { this.description = description; } /** * <p> * The description of the API. * </p> * * @return The description of the API. */ public String getDescription() { return this.description; } /** * <p> * The description of the API. * </p> * * @param description * The description of the API. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * </p> * * @param disableSchemaValidation * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. */ public void setDisableSchemaValidation(Boolean disableSchemaValidation) { this.disableSchemaValidation = disableSchemaValidation; } /** * <p> * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * </p> * * @return Avoid validating models when creating a deployment. Supported only for WebSocket APIs. */ public Boolean getDisableSchemaValidation() { return this.disableSchemaValidation; } /** * <p> * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * </p> * * @param disableSchemaValidation * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withDisableSchemaValidation(Boolean disableSchemaValidation) { setDisableSchemaValidation(disableSchemaValidation); return this; } /** * <p> * Avoid validating models when creating a deployment. Supported only for WebSocket APIs. * </p> * * @return Avoid validating models when creating a deployment. Supported only for WebSocket APIs. */ public Boolean isDisableSchemaValidation() { return this.disableSchemaValidation; } /** * <p> * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, clients can * invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that * clients use a custom domain name to invoke your API, disable the default endpoint. * </p> * * @param disableExecuteApiEndpoint * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, * clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. * To require that clients use a custom domain name to invoke your API, disable the default endpoint. */ public void setDisableExecuteApiEndpoint(Boolean disableExecuteApiEndpoint) { this.disableExecuteApiEndpoint = disableExecuteApiEndpoint; } /** * <p> * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, clients can * invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that * clients use a custom domain name to invoke your API, disable the default endpoint. * </p> * * @return Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, * clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com * endpoint. To require that clients use a custom domain name to invoke your API, disable the default * endpoint. */ public Boolean getDisableExecuteApiEndpoint() { return this.disableExecuteApiEndpoint; } /** * <p> * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, clients can * invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that * clients use a custom domain name to invoke your API, disable the default endpoint. * </p> * * @param disableExecuteApiEndpoint * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, * clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. * To require that clients use a custom domain name to invoke your API, disable the default endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withDisableExecuteApiEndpoint(Boolean disableExecuteApiEndpoint) { setDisableExecuteApiEndpoint(disableExecuteApiEndpoint); return this; } /** * <p> * Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, clients can * invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that * clients use a custom domain name to invoke your API, disable the default endpoint. * </p> * * @return Specifies whether clients can invoke your API by using the default execute-api endpoint. By default, * clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com * endpoint. To require that clients use a custom domain name to invoke your API, disable the default * endpoint. */ public Boolean isDisableExecuteApiEndpoint() { return this.disableExecuteApiEndpoint; } /** * <p> * The name of the API. * </p> * * @param name * The name of the API. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the API. * </p> * * @return The name of the API. */ public String getName() { return this.name; } /** * <p> * The name of the API. * </p> * * @param name * The name of the API. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withName(String name) { setName(name); return this; } /** * <p> * The API protocol. * </p> * * @param protocolType * The API protocol. * @see ProtocolType */ public void setProtocolType(String protocolType) { this.protocolType = protocolType; } /** * <p> * The API protocol. * </p> * * @return The API protocol. * @see ProtocolType */ public String getProtocolType() { return this.protocolType; } /** * <p> * The API protocol. * </p> * * @param protocolType * The API protocol. * @return Returns a reference to this object so that method calls can be chained together. * @see ProtocolType */ public CreateApiRequest withProtocolType(String protocolType) { setProtocolType(protocolType); return this; } /** * <p> * The API protocol. * </p> * * @param protocolType * The API protocol. * @return Returns a reference to this object so that method calls can be chained together. * @see ProtocolType */ public CreateApiRequest withProtocolType(ProtocolType protocolType) { this.protocolType = protocolType.toString(); return this; } /** * <p> * This property is part of quick create. If you don't specify a routeKey, a default route of $default is created. * The $default route acts as a catch-all for any request made to your API, for a particular stage. The $default * route key can't be modified. You can add routes after creating the API, and you can update the route keys of * additional routes. Supported only for HTTP APIs. * </p> * * @param routeKey * This property is part of quick create. If you don't specify a routeKey, a default route of $default is * created. The $default route acts as a catch-all for any request made to your API, for a particular stage. * The $default route key can't be modified. You can add routes after creating the API, and you can update * the route keys of additional routes. Supported only for HTTP APIs. */ public void setRouteKey(String routeKey) { this.routeKey = routeKey; } /** * <p> * This property is part of quick create. If you don't specify a routeKey, a default route of $default is created. * The $default route acts as a catch-all for any request made to your API, for a particular stage. The $default * route key can't be modified. You can add routes after creating the API, and you can update the route keys of * additional routes. Supported only for HTTP APIs. * </p> * * @return This property is part of quick create. If you don't specify a routeKey, a default route of $default is * created. The $default route acts as a catch-all for any request made to your API, for a particular stage. * The $default route key can't be modified. You can add routes after creating the API, and you can update * the route keys of additional routes. Supported only for HTTP APIs. */ public String getRouteKey() { return this.routeKey; } /** * <p> * This property is part of quick create. If you don't specify a routeKey, a default route of $default is created. * The $default route acts as a catch-all for any request made to your API, for a particular stage. The $default * route key can't be modified. You can add routes after creating the API, and you can update the route keys of * additional routes. Supported only for HTTP APIs. * </p> * * @param routeKey * This property is part of quick create. If you don't specify a routeKey, a default route of $default is * created. The $default route acts as a catch-all for any request made to your API, for a particular stage. * The $default route key can't be modified. You can add routes after creating the API, and you can update * the route keys of additional routes. Supported only for HTTP APIs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withRouteKey(String routeKey) { setRouteKey(routeKey); return this; } /** * <p> * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be ${request.method} * ${request.path}. If not provided, this will be the default for HTTP APIs. This property is required for WebSocket * APIs. * </p> * * @param routeSelectionExpression * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be * ${request.method} ${request.path}. If not provided, this will be the default for HTTP APIs. This property * is required for WebSocket APIs. */ public void setRouteSelectionExpression(String routeSelectionExpression) { this.routeSelectionExpression = routeSelectionExpression; } /** * <p> * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be ${request.method} * ${request.path}. If not provided, this will be the default for HTTP APIs. This property is required for WebSocket * APIs. * </p> * * @return The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be * ${request.method} ${request.path}. If not provided, this will be the default for HTTP APIs. This property * is required for WebSocket APIs. */ public String getRouteSelectionExpression() { return this.routeSelectionExpression; } /** * <p> * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be ${request.method} * ${request.path}. If not provided, this will be the default for HTTP APIs. This property is required for WebSocket * APIs. * </p> * * @param routeSelectionExpression * The route selection expression for the API. For HTTP APIs, the routeSelectionExpression must be * ${request.method} ${request.path}. If not provided, this will be the default for HTTP APIs. This property * is required for WebSocket APIs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withRouteSelectionExpression(String routeSelectionExpression) { setRouteSelectionExpression(routeSelectionExpression); return this; } /** * <p> * The collection of tags. Each tag element is associated with a given resource. * </p> * * @return The collection of tags. Each tag element is associated with a given resource. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * The collection of tags. Each tag element is associated with a given resource. * </p> * * @param tags * The collection of tags. Each tag element is associated with a given resource. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * The collection of tags. Each tag element is associated with a given resource. * </p> * * @param tags * The collection of tags. Each tag element is associated with a given resource. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see CreateApiRequest#withTags * @returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest clearTagsEntries() { this.tags = null; return this; } /** * <p> * This property is part of quick create. Quick create produces an API with an integration, a default catch-all * route, and a default stage which is configured to automatically deploy changes. For HTTP integrations, specify a * fully qualified URL. For Lambda integrations, specify a function ARN. The type of the integration will be * HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. * </p> * * @param target * This property is part of quick create. Quick create produces an API with an integration, a default * catch-all route, and a default stage which is configured to automatically deploy changes. For HTTP * integrations, specify a fully qualified URL. For Lambda integrations, specify a function ARN. The type of * the integration will be HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. */ public void setTarget(String target) { this.target = target; } /** * <p> * This property is part of quick create. Quick create produces an API with an integration, a default catch-all * route, and a default stage which is configured to automatically deploy changes. For HTTP integrations, specify a * fully qualified URL. For Lambda integrations, specify a function ARN. The type of the integration will be * HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. * </p> * * @return This property is part of quick create. Quick create produces an API with an integration, a default * catch-all route, and a default stage which is configured to automatically deploy changes. For HTTP * integrations, specify a fully qualified URL. For Lambda integrations, specify a function ARN. The type of * the integration will be HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. */ public String getTarget() { return this.target; } /** * <p> * This property is part of quick create. Quick create produces an API with an integration, a default catch-all * route, and a default stage which is configured to automatically deploy changes. For HTTP integrations, specify a * fully qualified URL. For Lambda integrations, specify a function ARN. The type of the integration will be * HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. * </p> * * @param target * This property is part of quick create. Quick create produces an API with an integration, a default * catch-all route, and a default stage which is configured to automatically deploy changes. For HTTP * integrations, specify a fully qualified URL. For Lambda integrations, specify a function ARN. The type of * the integration will be HTTP_PROXY or AWS_PROXY, respectively. Supported only for HTTP APIs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withTarget(String target) { setTarget(target); return this; } /** * <p> * A version identifier for the API. * </p> * * @param version * A version identifier for the API. */ public void setVersion(String version) { this.version = version; } /** * <p> * A version identifier for the API. * </p> * * @return A version identifier for the API. */ public String getVersion() { return this.version; } /** * <p> * A version identifier for the API. * </p> * * @param version * A version identifier for the API. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateApiRequest withVersion(String version) { setVersion(version); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getApiKeySelectionExpression() != null) sb.append("ApiKeySelectionExpression: ").append(getApiKeySelectionExpression()).append(","); if (getCorsConfiguration() != null) sb.append("CorsConfiguration: ").append(getCorsConfiguration()).append(","); if (getCredentialsArn() != null) sb.append("CredentialsArn: ").append(getCredentialsArn()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getDisableSchemaValidation() != null) sb.append("DisableSchemaValidation: ").append(getDisableSchemaValidation()).append(","); if (getDisableExecuteApiEndpoint() != null) sb.append("DisableExecuteApiEndpoint: ").append(getDisableExecuteApiEndpoint()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getProtocolType() != null) sb.append("ProtocolType: ").append(getProtocolType()).append(","); if (getRouteKey() != null) sb.append("RouteKey: ").append(getRouteKey()).append(","); if (getRouteSelectionExpression() != null) sb.append("RouteSelectionExpression: ").append(getRouteSelectionExpression()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()).append(","); if (getTarget() != null) sb.append("Target: ").append(getTarget()).append(","); if (getVersion() != null) sb.append("Version: ").append(getVersion()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateApiRequest == false) return false; CreateApiRequest other = (CreateApiRequest) obj; if (other.getApiKeySelectionExpression() == null ^ this.getApiKeySelectionExpression() == null) return false; if (other.getApiKeySelectionExpression() != null && other.getApiKeySelectionExpression().equals(this.getApiKeySelectionExpression()) == false) return false; if (other.getCorsConfiguration() == null ^ this.getCorsConfiguration() == null) return false; if (other.getCorsConfiguration() != null && other.getCorsConfiguration().equals(this.getCorsConfiguration()) == false) return false; if (other.getCredentialsArn() == null ^ this.getCredentialsArn() == null) return false; if (other.getCredentialsArn() != null && other.getCredentialsArn().equals(this.getCredentialsArn()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getDisableSchemaValidation() == null ^ this.getDisableSchemaValidation() == null) return false; if (other.getDisableSchemaValidation() != null && other.getDisableSchemaValidation().equals(this.getDisableSchemaValidation()) == false) return false; if (other.getDisableExecuteApiEndpoint() == null ^ this.getDisableExecuteApiEndpoint() == null) return false; if (other.getDisableExecuteApiEndpoint() != null && other.getDisableExecuteApiEndpoint().equals(this.getDisableExecuteApiEndpoint()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getProtocolType() == null ^ this.getProtocolType() == null) return false; if (other.getProtocolType() != null && other.getProtocolType().equals(this.getProtocolType()) == false) return false; if (other.getRouteKey() == null ^ this.getRouteKey() == null) return false; if (other.getRouteKey() != null && other.getRouteKey().equals(this.getRouteKey()) == false) return false; if (other.getRouteSelectionExpression() == null ^ this.getRouteSelectionExpression() == null) return false; if (other.getRouteSelectionExpression() != null && other.getRouteSelectionExpression().equals(this.getRouteSelectionExpression()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; if (other.getTarget() == null ^ this.getTarget() == null) return false; if (other.getTarget() != null && other.getTarget().equals(this.getTarget()) == false) return false; if (other.getVersion() == null ^ this.getVersion() == null) return false; if (other.getVersion() != null && other.getVersion().equals(this.getVersion()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getApiKeySelectionExpression() == null) ? 0 : getApiKeySelectionExpression().hashCode()); hashCode = prime * hashCode + ((getCorsConfiguration() == null) ? 0 : getCorsConfiguration().hashCode()); hashCode = prime * hashCode + ((getCredentialsArn() == null) ? 0 : getCredentialsArn().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getDisableSchemaValidation() == null) ? 0 : getDisableSchemaValidation().hashCode()); hashCode = prime * hashCode + ((getDisableExecuteApiEndpoint() == null) ? 0 : getDisableExecuteApiEndpoint().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getProtocolType() == null) ? 0 : getProtocolType().hashCode()); hashCode = prime * hashCode + ((getRouteKey() == null) ? 0 : getRouteKey().hashCode()); hashCode = prime * hashCode + ((getRouteSelectionExpression() == null) ? 0 : getRouteSelectionExpression().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); hashCode = prime * hashCode + ((getTarget() == null) ? 0 : getTarget().hashCode()); hashCode = prime * hashCode + ((getVersion() == null) ? 0 : getVersion().hashCode()); return hashCode; } @Override public CreateApiRequest clone() { return (CreateApiRequest) super.clone(); } }
14,383
2,268
//========= Copyright Valve Corporation, All rights reserved. ============// // // Purpose: // //===========================================================================// #ifndef ICVAR_H #define ICVAR_H #ifdef _WIN32 #pragma once #endif #include "appframework/IAppSystem.h" #include "tier1/iconvar.h" class ConCommandBase; class ConCommand; class ConVar; class Color; //----------------------------------------------------------------------------- // ConVars/ComCommands are marked as having a particular DLL identifier //----------------------------------------------------------------------------- typedef int CVarDLLIdentifier_t; //----------------------------------------------------------------------------- // Used to display console messages //----------------------------------------------------------------------------- abstract_class IConsoleDisplayFunc { public: virtual void ColorPrint( const Color& clr, const char *pMessage ) = 0; virtual void Print( const char *pMessage ) = 0; virtual void DPrint( const char *pMessage ) = 0; }; //----------------------------------------------------------------------------- // Purpose: Applications can implement this to modify behavior in ICvar //----------------------------------------------------------------------------- #define CVAR_QUERY_INTERFACE_VERSION "VCvarQuery001" abstract_class ICvarQuery : public IAppSystem { public: // Can these two convars be aliased? virtual bool AreConVarsLinkable( const ConVar *child, const ConVar *parent ) = 0; }; //----------------------------------------------------------------------------- // Purpose: DLL interface to ConVars/ConCommands //----------------------------------------------------------------------------- abstract_class ICvar : public IAppSystem { public: // Allocate a unique DLL identifier virtual CVarDLLIdentifier_t AllocateDLLIdentifier() = 0; // Register, unregister commands virtual void RegisterConCommand( ConCommandBase *pCommandBase ) = 0; virtual void UnregisterConCommand( ConCommandBase *pCommandBase ) = 0; virtual void UnregisterConCommands( CVarDLLIdentifier_t id ) = 0; // If there is a +<varname> <value> on the command line, this returns the value. // Otherwise, it returns NULL. virtual const char* GetCommandLineValue( const char *pVariableName ) = 0; // Try to find the cvar pointer by name virtual ConCommandBase *FindCommandBase( const char *name ) = 0; virtual const ConCommandBase *FindCommandBase( const char *name ) const = 0; virtual ConVar *FindVar ( const char *var_name ) = 0; virtual const ConVar *FindVar ( const char *var_name ) const = 0; virtual ConCommand *FindCommand( const char *name ) = 0; virtual const ConCommand *FindCommand( const char *name ) const = 0; // Get first ConCommandBase to allow iteration virtual ConCommandBase *GetCommands( void ) = 0; virtual const ConCommandBase *GetCommands( void ) const = 0; // Install a global change callback (to be called when any convar changes) virtual void InstallGlobalChangeCallback( FnChangeCallback_t callback ) = 0; virtual void RemoveGlobalChangeCallback( FnChangeCallback_t callback ) = 0; virtual void CallGlobalChangeCallbacks( ConVar *var, const char *pOldString, float flOldValue ) = 0; // Install a console printer virtual void InstallConsoleDisplayFunc( IConsoleDisplayFunc* pDisplayFunc ) = 0; virtual void RemoveConsoleDisplayFunc( IConsoleDisplayFunc* pDisplayFunc ) = 0; virtual void ConsoleColorPrintf( const Color& clr, PRINTF_FORMAT_STRING const char *pFormat, ... ) const = 0; virtual void ConsolePrintf( PRINTF_FORMAT_STRING const char *pFormat, ... ) const = 0; virtual void ConsoleDPrintf( PRINTF_FORMAT_STRING const char *pFormat, ... ) const = 0; // Reverts cvars which contain a specific flag virtual void RevertFlaggedConVars( int nFlag ) = 0; // Method allowing the engine ICvarQuery interface to take over // A little hacky, owing to the fact the engine is loaded // well after ICVar, so we can't use the standard connect pattern virtual void InstallCVarQuery( ICvarQuery *pQuery ) = 0; #if defined( _X360 ) virtual void PublishToVXConsole( ) = 0; #endif virtual bool IsMaterialThreadSetAllowed( ) const = 0; virtual void QueueMaterialThreadSetValue( ConVar *pConVar, const char *pValue ) = 0; virtual void QueueMaterialThreadSetValue( ConVar *pConVar, int nValue ) = 0; virtual void QueueMaterialThreadSetValue( ConVar *pConVar, float flValue ) = 0; virtual bool HasQueuedMaterialThreadConVarSets() const = 0; virtual int ProcessQueuedMaterialThreadConVarSets() = 0; protected: class ICVarIteratorInternal; public: /// Iteration over all cvars. /// (THIS IS A SLOW OPERATION AND YOU SHOULD AVOID IT.) /// usage: /// { ICVar::Iterator iter(g_pCVar); /// for ( iter.SetFirst() ; iter.IsValid() ; iter.Next() ) /// { /// ConCommandBase *cmd = iter.Get(); /// } /// } /// The Iterator class actually wraps the internal factory methods /// so you don't need to worry about new/delete -- scope takes care // of it. /// We need an iterator like this because we can't simply return a /// pointer to the internal data type that contains the cvars -- /// it's a custom, protected class with unusual semantics and is /// prone to change. class Iterator { public: inline Iterator(ICvar *icvar); inline ~Iterator(void); inline void SetFirst( void ); inline void Next( void ); inline bool IsValid( void ); inline ConCommandBase *Get( void ); private: ICVarIteratorInternal *m_pIter; }; protected: // internals for ICVarIterator class ICVarIteratorInternal { public: // warning: delete called on 'ICvar::ICVarIteratorInternal' that is abstract but has non-virtual destructor [-Wdelete-non-virtual-dtor] virtual ~ICVarIteratorInternal() {} virtual void SetFirst( void ) = 0; virtual void Next( void ) = 0; virtual bool IsValid( void ) = 0; virtual ConCommandBase *Get( void ) = 0; }; virtual ICVarIteratorInternal *FactoryInternalIterator( void ) = 0; friend class Iterator; }; inline ICvar::Iterator::Iterator(ICvar *icvar) { m_pIter = icvar->FactoryInternalIterator(); } inline ICvar::Iterator::~Iterator( void ) { delete m_pIter; } inline void ICvar::Iterator::SetFirst( void ) { m_pIter->SetFirst(); } inline void ICvar::Iterator::Next( void ) { m_pIter->Next(); } inline bool ICvar::Iterator::IsValid( void ) { return m_pIter->IsValid(); } inline ConCommandBase * ICvar::Iterator::Get( void ) { return m_pIter->Get(); } #define CVAR_INTERFACE_VERSION "VEngineCvar004" //----------------------------------------------------------------------------- // These global names are defined by tier1.h, duplicated here so you // don't have to include tier1.h //----------------------------------------------------------------------------- // These are marked DLL_EXPORT for Linux. DLL_EXPORT ICvar *cvar; extern ICvar *g_pCVar; #endif // ICVAR_H
2,040
404
<gh_stars>100-1000 // // SampleSource.c - MrsWatson // Copyright (c) 2016 <NAME>. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // #include "SampleSource.h" #include "base/File.h" #include "logging/EventLogger.h" #include <stdio.h> #include <stdlib.h> #include <string.h> void sampleSourcePrintSupportedTypes(void) { logInfo("Supported audio file types:"); // We can theoretically support more formats, pretty much anything audiofile // supports // would work here. However, most of those file types are rather uncommon, and // require // special setup when writing, so we only choose the most common ones. #if USE_AUDIOFILE logInfo("- AIFF (via libaudiofile)"); #endif #if USE_FLAC logInfo("- FLAC (via libaudiofile)"); #endif // Always supported logInfo("- PCM"); #if USE_AUDIOFILE logInfo("- WAV (via libaudiofile)"); #else logInfo("- WAV (internal)"); #endif } static SampleSourceType _sampleSourceGuess(const CharString sampleSourceName) { File sourceFile = NULL; CharString sourceFileExtension = NULL; SampleSourceType result = SAMPLE_SOURCE_TYPE_PCM; if (sampleSourceName == NULL || charStringIsEmpty(sampleSourceName)) { result = SAMPLE_SOURCE_TYPE_SILENCE; } else { // Look for stdin/stdout if (strlen(sampleSourceName->data) == 1 && sampleSourceName->data[0] == '-') { result = SAMPLE_SOURCE_TYPE_PCM; } else { sourceFile = newFileWithPath(sampleSourceName); sourceFileExtension = fileGetExtension(sourceFile); freeFile(sourceFile); // If there is no file extension, then automatically assume raw PCM data. // Deal with it! if (charStringIsEmpty(sourceFileExtension)) { result = SAMPLE_SOURCE_TYPE_PCM; } // Possible file extensions for raw PCM data else if (charStringIsEqualToCString(sourceFileExtension, "pcm", true) || charStringIsEqualToCString(sourceFileExtension, "raw", true) || charStringIsEqualToCString(sourceFileExtension, "dat", true)) { result = SAMPLE_SOURCE_TYPE_PCM; } #if USE_AUDIOFILE else if (charStringIsEqualToCString(sourceFileExtension, "aif", true) || charStringIsEqualToCString(sourceFileExtension, "aiff", true)) { result = SAMPLE_SOURCE_TYPE_AIFF; } #endif #if USE_FLAC else if (charStringIsEqualToCString(sourceFileExtension, "flac", true)) { result = SAMPLE_SOURCE_TYPE_FLAC; } #endif else if (charStringIsEqualToCString(sourceFileExtension, "wav", true) || charStringIsEqualToCString(sourceFileExtension, "wave", true)) { result = SAMPLE_SOURCE_TYPE_WAVE; } else { logCritical("Sample source '%s' does not match any supported type", sampleSourceName->data); result = SAMPLE_SOURCE_TYPE_INVALID; } } } freeCharString(sourceFileExtension); return result; } extern SampleSource _newSampleSourceAudiofile(const CharString sampleSourceName, const SampleSourceType sampleSourceType); extern SampleSource _newSampleSourcePcm(const CharString sampleSourceName); extern SampleSource _newSampleSourceSilence(); extern SampleSource _newSampleSourceWave(const CharString sampleSourceName); SampleSource sampleSourceFactory(const CharString sampleSourceName) { SampleSourceType sampleSourceType = _sampleSourceGuess(sampleSourceName); switch (sampleSourceType) { case SAMPLE_SOURCE_TYPE_SILENCE: return _newSampleSourceSilence(); case SAMPLE_SOURCE_TYPE_PCM: return _newSampleSourcePcm(sampleSourceName); #if USE_AUDIOFILE case SAMPLE_SOURCE_TYPE_AIFF: return _newSampleSourceAudiofile(sampleSourceName, sampleSourceType); #endif #if USE_FLAC case SAMPLE_SOURCE_TYPE_FLAC: return _newSampleSourceAudiofile(sampleSourceName, sampleSourceType); #endif #if USE_AUDIOFILE case SAMPLE_SOURCE_TYPE_WAVE: return _newSampleSourceAudiofile(sampleSourceName, sampleSourceType); #else case SAMPLE_SOURCE_TYPE_WAVE: return _newSampleSourceWave(sampleSourceName); #endif default: return NULL; } } void freeSampleSource(SampleSource self) { if (self != NULL) { self->freeSampleSourceData(self->extraData); freeCharString(self->sourceName); free(self); } }
1,910
370
{ "displayName": "Compute Engine default service account", "email": "<EMAIL>", "name": "projects/k8s-infra-e2e-boskos-013/serviceAccounts/<EMAIL>", "oauth2ClientId": "105277301157853398368", "projectId": "k8s-infra-e2e-boskos-013", "uniqueId": "105277301157853398368" }
121
2,151
<gh_stars>1000+ import proguard.annotation.KeepApplication; /** * This application illustrates the use of annotations for configuring ProGuard. * * You can compile it with: * javac -classpath ../lib/annotations.jar Application.java * You can then process it with: * java -jar ../../../lib/proguard.jar @ ../examples.pro * * The annotation will preserve the class and its main method. */ @KeepApplication public class Application { public static void main(String[] args) { System.out.println("The answer is 42"); } }
177
310
<filename>gear/software/s/snapndrag.json<gh_stars>100-1000 { "name": "SnapNDrag", "description": "A screenshot tool for Mac OS X.", "url": "http://www.yellowmug.com/snapndrag/" }
70
1,765
<reponame>meandthemachine/deconz-rest-plugin #include <QComboBox> #include <QCheckBox> #include <QFormLayout> #include <QHBoxLayout> #include <QLabel> #include <QMessageBox> #include <QMimeData> #include <QPushButton> #include <QPlainTextEdit> #include <QScrollArea> #include <QSpinBox> #include <QVBoxLayout> #include <QHBoxLayout> #include <QUrlQuery> #include <vector> #include "deconz/zcl.h" #include "ddf_itemeditor.h" ItemLineEdit::ItemLineEdit(const QVariantMap &ddfParam, const DDF_FunctionDescriptor::Parameter &param, QWidget *parent) : QLineEdit(parent) { setAcceptDrops(false); paramDescription = param; if (ddfParam.contains(param.key)) { QVariant val = ddfParam.value(param.key); // ["0x0001","0x0002"] --> "0x0001,0x0002" if (val.type() == QVariant::List) { val = val.toStringList().join(QLatin1Char(',')); } if (param.dataType == DataTypeUInt8 && param.key == QLatin1String("ep") && val.toUInt() == 0) { setText(QLatin1String("auto")); } else { setText(val.toString()); } origValue = text(); } switch (param.dataType) { case DataTypeUInt8: { if (param.isHexString) { setPlaceholderText(QString("0x%1").arg(param.defaultValue.toUInt(), 2, 16, QLatin1Char('0'))); } else { setPlaceholderText(QString::number(param.defaultValue.toUInt())); } } break; case DataTypeUInt16: { if (param.isHexString) { setPlaceholderText(QString("0x%1").arg(param.defaultValue.toUInt(), 4, 16, QLatin1Char('0'))); } else { setPlaceholderText(QString::number(param.defaultValue.toUInt())); } } break; case DataTypeString: { if (!param.defaultValue.isNull() && text().isEmpty()) { setPlaceholderText(text()); } } break; default: break; } verifyInputText(text()); connect(this, &QLineEdit::textChanged, this, &ItemLineEdit::inputTextChanged); } void ItemLineEdit::inputTextChanged(const QString &text) { if (verifyInputText(text)) { emit valueChanged(); } } bool ItemLineEdit::verifyInputText(const QString &text) { bool isValid = true; const QStringList ls = text.split(QLatin1Char(','), SKIP_EMPTY_PARTS); for (const QString &i : ls) { if (paramDescription.dataType == DataTypeUInt8 && paramDescription.key == QLatin1String("ep")) { if (i == QLatin1String("auto")) { isValid = true; continue; } } if (i.isEmpty() && !paramDescription.isOptional) { isValid = false; } else if (!i.isEmpty()) { switch (paramDescription.dataType) { case DataTypeUInt8: case DataTypeUInt16: case DataTypeUInt32: case DataTypeUInt64: { bool ok; qint64 num = i.toULongLong(&ok, 0); if (!ok) { isValid = false; } else if (paramDescription.dataType == DataTypeUInt8 && num > UINT8_MAX) { isValid = false; } else if (paramDescription.dataType == DataTypeUInt16 && num > UINT16_MAX) { isValid = false; } else if (paramDescription.dataType == DataTypeUInt32 && num > UINT32_MAX) { isValid = false; } } break; default: break; } } } if (!isValid) { setStyleSheet(QLatin1String("color:red")); } else if (text != origValue) { setStyleSheet(QLatin1String("color:blue")); } else { setStyleSheet(QString()); } return isValid; } void ItemLineEdit::updateValueInMap(QVariantMap &map) const { QVariantList vls; const QStringList sls = text().split(QLatin1Char(','), SKIP_EMPTY_PARTS); int fieldWidth = 0; for (const QString &i : sls) { if (paramDescription.dataType == DataTypeUInt8 && paramDescription.key == QLatin1String("ep")) { if (i == QLatin1String("auto")) { vls.push_back(0); continue; } } switch (paramDescription.dataType) { case DataTypeUInt8: fieldWidth = 2; break; case DataTypeUInt16: fieldWidth = 4; break; case DataTypeUInt32: fieldWidth = 8; break; case DataTypeUInt64: fieldWidth = 16; break; case DataTypeString: { vls.push_back(i); } break; default: break; } if (fieldWidth > 0) { bool ok; qint64 num = i.toULongLong(&ok, 0); if (!ok) { } else if (paramDescription.isHexString) { vls.push_back(QString("0x%1").arg(num, fieldWidth, 16, QLatin1Char('0'))); } else { vls.push_back(num); } } } if (vls.size() == 1) { map[paramDescription.key] = vls.front(); } else if (vls.size() > 1) { map[paramDescription.key] = vls; } else { map[paramDescription.key] = QVariant(); } } struct DDF_Function { FunctionWidget *widget = nullptr; QComboBox *functionComboBox = nullptr; QWidget *paramWidget = nullptr; QVariantMap paramMap; QLabel *clusterName = nullptr; QLabel *attrName = nullptr; std::vector<QWidget*> itemWidgets; // dynamic widgets void (DDF_ItemEditor::*paramChanged)() = nullptr; // called when the line edit content changes }; enum EditorState { StateInit, StateLoad, StateEdit }; class DDF_ItemEditorPrivate { public: EditorState state = StateInit; DeviceDescriptions *dd = nullptr; QLabel *itemHeader = nullptr; QPlainTextEdit *itemDescription = nullptr; QScrollArea *scrollArea = nullptr; QCheckBox *publicCheckBox = nullptr; QCheckBox *staticCheckBox = nullptr; QCheckBox *awakeCheckBox = nullptr; QLineEdit *defaultValue = nullptr; // also static value QSpinBox *readInterval = nullptr; DDF_Function readFunction; DDF_Function parseFunction; DeviceDescription::Item editItem; }; DDF_ItemEditor::DDF_ItemEditor(QWidget *parent) : QWidget(parent) { d = new DDF_ItemEditorPrivate; QVBoxLayout *mainLay = new QVBoxLayout; setLayout(mainLay); mainLay->setMargin(0); setAcceptDrops(true); d->itemHeader = new QLabel(tr("Item"), this); mainLay->addWidget(d->itemHeader); d->scrollArea = new QScrollArea(this); QWidget *scrollWidget = new QWidget; d->scrollArea->setWidget(scrollWidget); d->scrollArea->setWidgetResizable(true); QVBoxLayout *scrollLay = new QVBoxLayout; scrollWidget->setLayout(scrollLay); scrollWidget->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::MinimumExpanding); mainLay->addWidget(d->scrollArea); QLabel *description = new QLabel(tr("Description"), scrollWidget); scrollLay->addWidget(description); d->itemDescription = new QPlainTextEdit(scrollWidget); d->itemDescription->setAcceptDrops(false); d->itemDescription->setMinimumHeight(32); d->itemDescription->setMaximumHeight(72); d->itemDescription->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::Minimum); connect(d->itemDescription, &QPlainTextEdit::textChanged, this, &DDF_ItemEditor::attributeChanged); scrollLay->addWidget(d->itemDescription); d->publicCheckBox = new QCheckBox(tr("Public item")); d->publicCheckBox->setToolTip(tr("The item is visible in the REST-API")); scrollLay->addWidget(d->publicCheckBox); connect(d->publicCheckBox, &QCheckBox::stateChanged, this, &DDF_ItemEditor::attributeChanged); d->awakeCheckBox = new QCheckBox(tr("Awake on receive")); d->awakeCheckBox->setToolTip(tr("The device is considered awake when this item is set due a incoming command.")); scrollLay->addWidget(d->awakeCheckBox); connect(d->awakeCheckBox, &QCheckBox::stateChanged, this, &DDF_ItemEditor::attributeChanged); d->staticCheckBox = new QCheckBox(tr("Static default value")); d->staticCheckBox->setToolTip(tr("A static default value is fixed and can't be changed.")); scrollLay->addWidget(d->staticCheckBox); connect(d->staticCheckBox, &QCheckBox::stateChanged, this, &DDF_ItemEditor::attributeChanged); scrollLay->addWidget(new QLabel(tr("Default value"))); d->defaultValue = new QLineEdit; d->defaultValue->setAcceptDrops(false); connect(d->defaultValue, &QLineEdit::textChanged, this, &DDF_ItemEditor::attributeChanged); scrollLay->addWidget(d->defaultValue); QFont boldFont = font(); boldFont.setBold(true); // parse function { DDF_Function &fn = d->parseFunction; fn.paramChanged = &DDF_ItemEditor::parseParamChanged; fn.widget = new FunctionWidget(scrollWidget); scrollLay->addWidget(fn.widget); auto *fnLay = new QVBoxLayout; fn.widget->setLayout(fnLay); connect(fn.widget, &FunctionWidget::droppedUrl, this, &DDF_ItemEditor::droppedUrl); QLabel *parseLabel = new QLabel(tr("Parse"), this); parseLabel->setFont(boldFont); fnLay->addWidget(parseLabel); fn.functionComboBox = new QComboBox(fn.widget); fn.functionComboBox->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Minimum); fn.functionComboBox->setMinimumWidth(160); fnLay->addWidget(fn.functionComboBox); fn.paramWidget = new QWidget(fn.widget); fnLay->addWidget(fn.paramWidget); fn.paramWidget->setLayout(new QFormLayout); } // read function { DDF_Function &fn = d->readFunction; fn.paramChanged = &DDF_ItemEditor::readParamChanged; fn.widget = new FunctionWidget(scrollWidget); scrollLay->addWidget(fn.widget); auto *fnLay = new QVBoxLayout; fn.widget->setLayout(fnLay); connect(fn.widget, &FunctionWidget::droppedUrl, this, &DDF_ItemEditor::droppedUrl); QLabel *readLabel = new QLabel(tr("Read"), this); readLabel->setFont(boldFont); fnLay->addWidget(readLabel); fn.functionComboBox = new QComboBox(fn.widget); fn.functionComboBox->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Minimum); fn.functionComboBox->setMinimumWidth(160); fnLay->addWidget(fn.functionComboBox); fn.paramWidget = new QWidget(fn.widget); fnLay->addWidget(fn.paramWidget); QFormLayout *readLay = new QFormLayout; fn.paramWidget->setLayout(readLay); d->readInterval = new QSpinBox(this); d->readInterval->setSuffix(" s"); d->readInterval->setRange(0, 84000 * 2); connect(d->readInterval, SIGNAL(valueChanged(int)), this, SLOT(attributeChanged())); readLay->addRow(new QLabel(tr("Interval")), d->readInterval); } scrollLay->addStretch(); } DDF_ItemEditor::~DDF_ItemEditor() { delete d; } void DDF_ItemEditor::updateZclLabels(DDF_Function &fn) { bool ok; quint16 clusterId = UINT16_MAX; quint16 attrId = UINT16_MAX; if (fn.paramMap.contains(QLatin1String("cl"))) { clusterId = fn.paramMap.value(QLatin1String("cl")).toString().toUInt(&ok, 0); } if (clusterId == UINT16_MAX) { return; } const auto cl = deCONZ::ZCL_InCluster(HA_PROFILE_ID, clusterId, 0x0000); if (!cl.isValid()) { return; } if (fn.clusterName && clusterId != UINT16_MAX) { fn.clusterName->setText(cl.name()); } if (fn.paramMap.contains(QLatin1String("at"))) { attrId = fn.paramMap.value(QLatin1String("at")).toString().toUInt(&ok, 0); } if (fn.attrName && attrId != UINT16_MAX) { const auto i = std::find_if(cl.attributes().cbegin(), cl.attributes().cend(), [attrId](const auto &i) { return i.id() == attrId; }); if (i != cl.attributes().cend()) { fn.attrName->setText(i->name()); } } } void DDF_ItemEditor::setupFunction(DDF_Function &fn, const DeviceDescription::Item &item, const QVariantMap &ddfParam, const std::vector<DDF_FunctionDescriptor> &fnDescriptors) { fn.paramWidget->hide(); fn.attrName = nullptr; fn.clusterName = nullptr; for (auto *w : fn.itemWidgets) { w->hide(); w->deleteLater(); } disconnect(fn.functionComboBox, &QComboBox::currentTextChanged, this, &DDF_ItemEditor::functionChanged); fn.itemWidgets.clear(); if (item.isStatic) { fn.widget->hide(); } else { fn.widget->show(); } fn.functionComboBox->clear(); fn.functionComboBox->setToolTip(QString()); fn.functionComboBox->addItem(QObject::tr("None")); fn.paramMap = ddfParam; for (auto &descr : fnDescriptors) { fn.functionComboBox->addItem(descr.name); } QString fnName; if (!ddfParam.isEmpty()) { if (ddfParam.contains(QLatin1String("fn"))) { fnName = ddfParam.value(QLatin1String("fn")).toString(); } else { fnName = QLatin1String("zcl"); // default parse function } fn.functionComboBox->setCurrentText(fnName); } for (auto &descr : fnDescriptors) { if (descr.name == fn.functionComboBox->currentText()) { fn.functionComboBox->setToolTip(descr.description); QFormLayout *lay = static_cast<QFormLayout*>(fn.paramWidget->layout()); for (const auto &param : descr.parameters) { if (fnName == QLatin1String("zcl")) { if (param.key == QLatin1String("cl")) { auto *label = new QLabel("Cluster"); fn.clusterName = new QLabel; fn.clusterName->setWordWrap(true); fn.itemWidgets.push_back(label); fn.itemWidgets.push_back(fn.clusterName); lay->insertRow(0, label, fn.clusterName); } else if (param.key == QLatin1String("at")) { auto *label = new QLabel("Attribute"); fn.attrName = new QLabel; fn.attrName->setWordWrap(true); fn.itemWidgets.push_back(label); fn.itemWidgets.push_back(fn.attrName); lay->insertRow(1, label, fn.attrName); } } QLabel *name = new QLabel(param.name, fn.paramWidget); fn.itemWidgets.push_back(name); ItemLineEdit *edit = new ItemLineEdit(ddfParam, param, fn.paramWidget); edit->setToolTip(param.description); fn.itemWidgets.push_back(edit); connect(edit, &ItemLineEdit::valueChanged, this, fn.paramChanged); if (param.dataType == DataTypeString) { lay->addRow(name); lay->addRow(edit); } else { lay->addRow(name, edit); } } break; } } if (fn.functionComboBox->currentIndex() != 0) // none { fn.paramWidget->show(); } connect(fn.functionComboBox, &QComboBox::currentTextChanged, this, &DDF_ItemEditor::functionChanged); updateZclLabels(fn); } void DDF_ItemEditor::setItem(const DeviceDescription::Item &item, DeviceDescriptions *dd) { d->state = StateInit; d->editItem = item; d->dd = dd; d->itemHeader->setText(QString("%1 (%2)") .arg(QLatin1String(item.name.c_str())) .arg(R_DataTypeToString(item.descriptor.type))); d->publicCheckBox->setChecked(item.isPublic); d->awakeCheckBox->setChecked(item.awake); d->staticCheckBox->setChecked(item.isStatic); d->defaultValue->setText(item.defaultValue.toString()); if (item.refreshInterval >= 0) { d->readInterval->setValue(item.refreshInterval); } else { d->readInterval->setValue(0); } const auto &genItem = dd->getGenericItem(item.descriptor.suffix); d->itemDescription->setPlaceholderText(genItem.description); if (!genItem.description.isEmpty() && genItem.description == item.description) { d->itemDescription->setPlainText(QLatin1String("")); d->editItem.description.clear(); } else { d->itemDescription->setPlainText(item.description); } setupFunction(d->parseFunction, item, item.parseParameters.toMap(), dd->getParseFunctions()); setupFunction(d->readFunction, item, item.readParameters.toMap(), dd->getReadFunctions()); d->state = StateEdit; if (item != d->editItem) { emit itemChanged(); } } const DeviceDescription::Item DDF_ItemEditor::item() const { return d->editItem; } void DDF_ItemEditor::parseParamChanged() { Q_ASSERT(d->dd); DDF_Function &fn = d->parseFunction; ItemLineEdit *edit = qobject_cast<ItemLineEdit*>(sender()); if (edit) { edit->updateValueInMap(fn.paramMap); } if (d->editItem.parseParameters != fn.paramMap) { d->editItem.parseParameters = fn.paramMap; updateZclLabels(fn); } const auto &genItem = d->dd->getGenericItem(d->editItem.descriptor.suffix); if (genItem.parseParameters == d->editItem.parseParameters) { d->editItem.isGenericParse = 1; d->editItem.isImplicit = genItem.isImplicit; // todo is implicit shouldn't be used here } else { d->editItem.isGenericParse = 0; d->editItem.isImplicit = 0; } emit itemChanged(); } void DDF_ItemEditor::readParamChanged() { Q_ASSERT(d->dd); DDF_Function &fn = d->readFunction; ItemLineEdit *edit = qobject_cast<ItemLineEdit*>(sender()); if (edit) { edit->updateValueInMap(fn.paramMap); } if (d->editItem.readParameters != fn.paramMap) { d->editItem.readParameters = fn.paramMap; updateZclLabels(fn); } const auto &genItem = d->dd->getGenericItem(d->editItem.descriptor.suffix); if (genItem.readParameters == d->editItem.readParameters) { d->editItem.isGenericRead = 1; d->editItem.isImplicit = genItem.isImplicit; // todo is implicit shouldn't be used here } else { d->editItem.isGenericRead = 0; d->editItem.isImplicit = 0; } emit itemChanged(); } void DDF_ItemEditor::attributeChanged() { if (d->state != StateEdit) { return; } if (d->editItem.awake != d->awakeCheckBox->isChecked() || d->editItem.isPublic != d->publicCheckBox->isChecked() || d->editItem.isStatic != d->staticCheckBox->isChecked() || d->editItem.refreshInterval != d->readInterval->value() || d->editItem.description != d->itemDescription->toPlainText() || d->editItem.defaultValue.toString() != d->defaultValue->text()) { d->editItem.awake = d->awakeCheckBox->isChecked(); d->editItem.isPublic = d->publicCheckBox->isChecked(); d->editItem.isStatic = d->staticCheckBox->isChecked(); d->editItem.description = d->itemDescription->toPlainText(); d->editItem.refreshInterval = d->readInterval->value(); if (d->editItem.refreshInterval <= 0) { d->editItem.refreshInterval = DeviceDescription::Item::NoRefreshInterval; } if (!d->defaultValue->text().isEmpty()) { switch (d->editItem.descriptor.qVariantType) { case QVariant::Double: { bool ok; double val = d->defaultValue->text().toDouble(&ok); if (ok) { d->editItem.defaultValue = val; } } break; case QVariant::String: { d->editItem.defaultValue = d->defaultValue->text(); } break; case QVariant::Bool: { if (d->defaultValue->text() == QLatin1String("true") || d->defaultValue->text() == QLatin1String("1")) { d->editItem.defaultValue = true; } else if (d->defaultValue->text() == QLatin1String("false") || d->defaultValue->text() == QLatin1String("0")) { d->editItem.defaultValue = false; } else { d->editItem.defaultValue = {}; } } break; default: break; } } else { d->editItem.defaultValue = {}; } if (d->editItem.isStatic) { d->parseFunction.widget->hide(); d->readFunction.widget->hide(); } else { d->parseFunction.widget->show(); d->readFunction.widget->show(); } emit itemChanged(); } } void DDF_ItemEditor::functionChanged(const QString &text) { DDF_Function *fn = nullptr; QVariant *fnParam = nullptr; QString prevFunction; QComboBox *combo = qobject_cast<QComboBox*>(sender()); if (!combo) { return; } if (d->parseFunction.functionComboBox == combo) { fn = &d->parseFunction; fnParam = &d->editItem.parseParameters; } else if (d->readFunction.functionComboBox == combo) { fn = &d->readFunction; fnParam = &d->editItem.readParameters; } else { return; } if (fn->paramMap.contains("fn")) { prevFunction = fn->paramMap.value("fn").toString(); } if (prevFunction != text) { if (!prevFunction.isEmpty()) { auto btn = QMessageBox::question(parentWidget(), tr("Change function to %1").arg(text), tr("Proceed? Current function settings will be lost.")); if (btn == QMessageBox::No) { fn->functionComboBox->setCurrentText(prevFunction); return; } } fn->paramMap = {}; fn->paramMap["fn"] = text; *fnParam = fn->paramMap; auto &item = d->editItem; if (d->parseFunction.functionComboBox == combo) { setupFunction(d->parseFunction, item, item.parseParameters.toMap(), d->dd->getParseFunctions()); } else if (d->readFunction.functionComboBox == combo) { setupFunction(d->readFunction, item, item.readParameters.toMap(), d->dd->getReadFunctions()); } } } void DDF_ItemEditor::droppedUrl(const QUrl &url) { if (url.scheme() == QLatin1String("zclattr")) { // zclattr:attr?ep=1&cl=6&cs=s&mf=0&a=0&dt=16&rmin=1&rmax=300&t=D QUrlQuery urlQuery(url); bool ok; QVariantMap params; if (sender() == d->parseFunction.widget) { params = d->editItem.parseParameters.toMap(); } else if (sender() == d->readFunction.widget) { params = d->editItem.readParameters.toMap(); } if (urlQuery.hasQueryItem("ep")) { params["ep"] = urlQuery.queryItemValue("ep").toUInt(&ok, 16); } if (urlQuery.hasQueryItem("cid")) { quint16 cl = urlQuery.queryItemValue("cid").toUShort(&ok, 16); params["cl"] = QString("0x%1").arg(cl, 4, 16, QLatin1Char('0')); } if (urlQuery.hasQueryItem("a")) { quint16 attr = urlQuery.queryItemValue("a").toUShort(&ok, 16); params["at"] = QString("0x%1").arg(attr, 4, 16, QLatin1Char('0')); } if (urlQuery.hasQueryItem("mf")) { quint16 mf = urlQuery.queryItemValue("mf").toUShort(&ok, 16); if (mf != 0) { params["mf"] = QString("0x%1").arg(mf, 4, 16, QLatin1Char('0')); } else { params.remove("mf"); } } if (sender() == d->parseFunction.widget) { //d->editItem.parseParameters = params; setupFunction(d->parseFunction, d->editItem, params, d->dd->getParseFunctions()); parseParamChanged(); } else if (sender() == d->readFunction.widget) { if (urlQuery.hasQueryItem("rmax")) { int max = urlQuery.queryItemValue("rmax").toInt(); d->readInterval->setValue(max); } //d->editItem.readParameters = params; setupFunction(d->readFunction, d->editItem, params, d->dd->getReadFunctions()); readParamChanged(); } //emit itemChanged(); } } FunctionWidget::FunctionWidget(QWidget *parent) : QWidget(parent) { setAcceptDrops(true); } void FunctionWidget::dragEnterEvent(QDragEnterEvent *event) { if (!event->mimeData()->hasUrls()) { return; } window()->raise(); const auto urls = event->mimeData()->urls(); const auto url = urls.first(); if (url.scheme() == QLatin1String("zclattr")) { event->accept(); QPalette pal = parentWidget()->palette(); pal.setColor(QPalette::Window, pal.color(QPalette::AlternateBase)); setPalette(pal); setAutoFillBackground(true); } } void FunctionWidget::dragLeaveEvent(QDragLeaveEvent *event) { Q_UNUSED(event) setPalette(parentWidget()->palette()); } void FunctionWidget::dropEvent(QDropEvent *event) { setPalette(parentWidget()->palette()); if (!event->mimeData()->hasUrls()) { return; } const auto urls = event->mimeData()->urls(); const auto url = urls.first(); emit droppedUrl(url); }
12,544
4,533
from PyInstaller.utils.hooks import copy_metadata datas = copy_metadata('webrtcvad-wheels')
32
302
# -*- coding: utf-8 -*- # Copyright 2016 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import importlib import inspect import os import sys def get_module(module_full_name): if ':' in module_full_name: path, module_name = module_full_name.rsplit(':', 1) if not os.path.isdir(path): print("{0} is not a valid directory".format(path), file=sys.stderr) sys.exit(1) sys.path.append(path) return importlib.import_module(module_name) else: return importlib.import_module(module_full_name) def child_class(class_types, base_class): """ Find the child-most class of `base_class`. Examples: class A: pass class B(A): pass class C(B): pass child_class([A, B, C], A) = C """ subclasses = set() for class_type in class_types: if class_type is base_class: continue if issubclass(class_type, base_class): subclasses.add(class_type) if len(subclasses) == 0: return None elif len(subclasses) == 1: return subclasses.pop() else: # If more than one class is a subclass of `base_class` # It is possible that one or more classes are subclasses of another # class (see example above). # Recursively find the child-most class. Break ties by returning any # child-most class. for c in subclasses: child = child_class(subclasses, c) if child is not None: return child return subclasses.pop() def dynamic_import(module_full_name, base_class): module = get_module(module_full_name) class_types = [ class_type for _, class_type in inspect.getmembers(module, inspect.isclass) ] return child_class(class_types, base_class)
980
8,649
package org.hswebframework.web.authorization.access; public interface DataAccessConfiguration { }
25
1,781
/* * Copyright 2013 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marshalchen.common.uimodule.listviewanimations; import android.widget.BaseAdapter; import com.marshalchen.common.uimodule.listviewanimations.itemmanipulation.AnimateAdditionAdapter; import com.marshalchen.common.uimodule.listviewanimations.widget.DynamicListView; import java.util.*; /** * A {@code true} {@link java.util.ArrayList} adapter providing access to all ArrayList methods. * Also implements {@link DynamicListView.Swappable} for easy object swapping, and {@link AnimateAdditionAdapter.Insertable} for inserting objects. */ @SuppressWarnings("UnusedDeclaration") public abstract class ArrayAdapter<T> extends BaseAdapter implements List<T>, DynamicListView.Swappable, AnimateAdditionAdapter.Insertable<T> { protected List<T> mItems; /** * Creates a new ArrayAdapter with an empty {@code List} . */ public ArrayAdapter() { this(null); } /** * Creates a new {@link com.marshalchen.common.uimodule.listviewanimations.ArrayAdapter} using given {@code List} , or an empty {@code List} if objects == null. */ public ArrayAdapter(final List<T> objects) { this(objects, false); } /** * Creates a new {@link com.marshalchen.common.uimodule.listviewanimations.ArrayAdapter}, using (a copy of) given {@code List} , or an empty {@code List} if objects = null. * @param copyList {@code true} to create a copy of the {@code List} , {@code false} to reuse the reference. */ public ArrayAdapter(final List<T> objects, final boolean copyList) { if (objects != null) { if (copyList) { mItems = new ArrayList<T>(objects); } else { mItems = objects; } } else { mItems = new ArrayList<T>(); } } @Override public int getCount() { return mItems.size(); } @Override public T getItem(final int location) { return mItems.get(location); } @Override public long getItemId(final int location) { return location; } /** * Appends the specified element to the end of the {@code List} . * @param object the object to add. * @return always true. */ @Override public boolean add(final T object) { boolean result = mItems.add(object); notifyDataSetChanged(); return result; } @Override public void add(final int location, final T object) { mItems.add(location, object); notifyDataSetChanged(); } /** * Adds the objects in the specified collection to the end of this List. The objects are added in the order in which they are returned from the collection's iterator. * @param collection the collection of objects. * @return {@code true} if this {@code List} is modified, {@code false} otherwise. */ @Override public boolean addAll(final Collection<? extends T> collection) { boolean result = mItems.addAll(collection); notifyDataSetChanged(); return result; } /** * Appends all of the elements in the specified collection to the end of the * {@code List} , in the order that they are specified. * @param objects the array of objects. * @return {@code true} if the collection changed during insertion. */ public boolean addAll(final T... objects) { boolean result = Collections.addAll(mItems, objects); notifyDataSetChanged(); return result; } @Override public boolean addAll(final int location, final Collection<? extends T> objects) { boolean result = mItems.addAll(location, objects); notifyDataSetChanged(); return result; } /** * Inserts the objects in the specified collection at the specified location in this List. The objects are added in the order that they specified. * @param location the index at which to insert. * @param objects the array of objects. */ public void addAll(final int location, final T... objects) { for (int i = location; i < objects.length + location; i++) { mItems.add(i, objects[i]); } notifyDataSetChanged(); } @Override public void clear() { mItems.clear(); notifyDataSetChanged(); } @Override public boolean contains(final Object object) { return mItems.contains(object); } @Override public boolean containsAll(final Collection<?> collection) { return mItems.containsAll(collection); } @Override public T get(final int location) { return mItems.get(location); } @Override public T set(final int location, final T object) { T result = mItems.set(location, object); notifyDataSetChanged(); return result; } @Override public int size() { return mItems.size(); } @Override public List<T> subList(final int start, final int end) { return mItems.subList(start, end); } @Override public Object[] toArray() { return mItems.toArray(); } @Override public <T1> T1[] toArray(final T1[] array) { //noinspection SuspiciousToArrayCall return mItems.toArray(array); } @Override public boolean remove(final Object object) { boolean result = mItems.remove(object); notifyDataSetChanged(); return result; } @Override public T remove(final int location) { T result = mItems.remove(location); notifyDataSetChanged(); return result; } /** * Removes all elements at the specified locations in the {@code List} . * @param locations the collection of indexes to remove. * @return a collection containing the removed objects. */ public Collection<T> removePositions(final Collection<Integer> locations) { ArrayList<T> removedItems = new ArrayList<T>(); ArrayList<Integer> locationsList = new ArrayList<Integer>(locations); Collections.sort(locationsList); Collections.reverse(locationsList); for (int location : locationsList) { removedItems.add(mItems.remove(location)); } notifyDataSetChanged(); return removedItems; } @Override public boolean removeAll(final Collection<?> objects) { boolean result = mItems.removeAll(objects); notifyDataSetChanged(); return result; } @Override public boolean retainAll(final Collection<?> objects) { boolean result = mItems.retainAll(objects); notifyDataSetChanged(); return result; } @Override public int indexOf(final Object object) { return mItems.indexOf(object); } @Override public Iterator<T> iterator() { return mItems.iterator(); } @Override public int lastIndexOf(final Object object) { return mItems.lastIndexOf(object); } @Override public ListIterator<T> listIterator() { return mItems.listIterator(); } @Override public ListIterator<T> listIterator(final int location) { return mItems.listIterator(location); } @Override public void swapItems(final int locationOne, final int locationTwo) { T temp = getItem(locationOne); set(locationOne, getItem(locationTwo)); set(locationTwo, temp); } private BaseAdapter mDataSetChangedSlavedAdapter; public void propagateNotifyDataSetChanged(final BaseAdapter slavedAdapter) { mDataSetChangedSlavedAdapter = slavedAdapter; } @Override public void notifyDataSetChanged() { super.notifyDataSetChanged(); if (mDataSetChangedSlavedAdapter != null) { mDataSetChangedSlavedAdapter.notifyDataSetChanged(); } } }
3,086
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.form; import org.openide.util.datatransfer.NewType; import java.awt.*; import java.util.*; import org.netbeans.modules.form.project.ClassSource; /** * * @author <NAME> */ public class RADMenuComponent extends RADMenuItemComponent implements ComponentContainer { /** Map of possible combinations of menus in menus. Menu types (as Integer) * are mapped to supported (sub)menu types (as Class[]). */ static Map<Integer,Class[]> supportedMenus; /** Initialization of supportedMenus map. */ static { supportedMenus = new HashMap<Integer,Class[]>(); supportedMenus.put(Integer.valueOf(T_MENUBAR), new Class[] { Menu.class }); supportedMenus.put(Integer.valueOf(T_MENU), new Class[] { MenuItem.class, CheckboxMenuItem.class, Menu.class, Separator.class }); supportedMenus.put(Integer.valueOf(T_POPUPMENU), new Class[] { MenuItem.class, CheckboxMenuItem.class, Menu.class, Separator.class }); // supportedMenus.put(new Integer(T_JMENUBAR), // new Class[] { JMenu.class }); // supportedMenus.put(new Integer(T_JMENU), // new Class[] { JMenuItem.class, // JCheckBoxMenuItem.class, // JRadioButtonMenuItem.class, // JMenu.class, // JSeparator.class }); // supportedMenus.put(new Integer(T_JPOPUPMENU), // new Class[] { JMenuItem.class, // JCheckBoxMenuItem.class, // JRadioButtonMenuItem.class, // JMenu.class, // JSeparator.class }); } // ----------------------------------------------------------------------------- // Private variables private ArrayList<RADComponent> subComponents; // ----------------------------------------------------------------------------- // Initialization /** Support for new types that can be created in this node. * @return array of new type operations that are allowed */ @Override public NewType[] getNewTypes() { if (isReadOnly()) return RADComponent.NO_NEW_TYPES; Class[] classes = supportedMenus.get(Integer.valueOf(getMenuItemType())); if (classes == null) return RADComponent.NO_NEW_TYPES; NewType[] types = new NewType[classes.length]; for (int i = 0; i < types.length; i++) types[i] = new NewMenuType(classes[i]); return types; } public boolean canAddItem(Class itemType) { Class[] classes = supportedMenus.get(Integer.valueOf(getMenuItemType())); if (classes != null) for (int i=0; i < classes.length; i++) if (classes[i] == itemType) // or more general isAssignableFrom ?? return true; return false; } // ----------------------------------------------------------------------------- // SubComponents Management @Override public RADComponent[] getSubBeans() { RADComponent[] components = new RADComponent [subComponents.size()]; subComponents.toArray(components); return components; } @Override public void initSubComponents(RADComponent[] initComponents) { if (subComponents == null) subComponents = new ArrayList<RADComponent>(initComponents.length); else { subComponents.clear(); subComponents.ensureCapacity(initComponents.length); } for (int i = 0; i < initComponents.length; i++) { RADComponent comp = initComponents[i]; if (comp instanceof RADMenuItemComponent) { subComponents.add(comp); comp.setParentComponent(this); } } } @Override public void reorderSubComponents(int[] perm) { RADComponent[] components = new RADComponent[subComponents.size()]; for (int i=0; i < perm.length; i++) components[perm[i]] = subComponents.get(i); subComponents.clear(); subComponents.addAll(Arrays.asList(components)); } @Override public void add(RADComponent comp) { if (comp instanceof RADMenuItemComponent) { subComponents.add(comp); comp.setParentComponent(this); // getNodeReference().updateChildren(); } } @Override public void remove(RADComponent comp) { if (subComponents.remove(comp)) comp.setParentComponent(null); // getNodeReference().updateChildren(); } @Override public int getIndexOf(RADComponent comp) { return subComponents.indexOf(comp); } // ------------- // Innerclasses /** NewType for creating sub-MenuItem. */ class NewMenuType extends NewType { /** Class which represents the menu class for this NewType */ Class item; /** Constructs new NewType for the given menu class */ public NewMenuType(Class item) { this.item = item; } /** Display name for the creation action. This should be * presented as an item in a menu. * * @return the name of the action */ @Override public String getName() { String s = item.getName(); int index = s.lastIndexOf('.'); if (index != -1) return s.substring(index + 1); else return s; } /** Create the object. * @exception IOException if something fails */ @Override public void create() throws java.io.IOException { getFormModel().getComponentCreator() .createComponent(new ClassSource(item.getName()), RADMenuComponent.this, null); } } }
3,224
1,174
// Copyright 2020 Espressif Systems (Shanghai) Co. Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <stdio.h> #include <string.h> #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "esp_log.h" #include "driver/gpio.h" #include "screen_driver.h" #include "screen_utility.h" #include "ili9488.h" static const char *TAG = "ILI9488"; #define LCD_CHECK(a, str, ret) if(!(a)) { \ ESP_LOGE(TAG,"%s:%d (%s):%s", __FILE__, __LINE__, __FUNCTION__, str); \ return (ret); \ } #define ILI9488_NOP 0x00 #define ILI9488_SWRESET 0x01 #define ILI9488_RDDID 0x04 #define ILI9488_RDDST 0x09 #define ILI9488_SLPIN 0x10 #define ILI9488_SLPOUT 0x11 #define ILI9488_PTLON 0x12 #define ILI9488_NORON 0x13 #define ILI9488_RDMODE 0x0A #define ILI9488_RDMADCTL 0x0B #define ILI9488_RDPIXFMT 0x0C #define ILI9488_RDIMGFMT 0x0D #define ILI9488_RDSELFDIAG 0x0F #define ILI9488_INVOFF 0x20 #define ILI9488_INVON 0x21 #define ILI9488_GAMMASET 0x26 #define ILI9488_DISPOFF 0x28 #define ILI9488_DISPON 0x29 #define ILI9488_CASET 0x2A #define ILI9488_PASET 0x2B #define ILI9488_RAMWR 0x2C #define ILI9488_RAMRD 0x2E #define ILI9488_PTLAR 0x30 #define ILI9488_VSCRDEF 0x33 #define ILI9488_MADCTL 0x36 #define ILI9488_VSCRSADD 0x37 #define ILI9488_PIXFMT 0x3A #define ILI9488_RAMWRCONT 0x3C #define ILI9488_RAMRDCONT 0x3E #define ILI9488_IMCTR 0xB0 #define ILI9488_FRMCTR1 0xB1 #define ILI9488_FRMCTR2 0xB2 #define ILI9488_FRMCTR3 0xB3 #define ILI9488_INVCTR 0xB4 #define ILI9488_DFUNCTR 0xB6 #define ILI9488_PWCTR1 0xC0 #define ILI9488_PWCTR2 0xC1 #define ILI9488_PWCTR3 0xC2 #define ILI9488_PWCTR4 0xC3 #define ILI9488_PWCTR5 0xC4 #define ILI9488_VMCTR1 0xC5 #define ILI9488_VMCTR2 0xC7 #define ILI9488_RDID1 0xDA #define ILI9488_RDID2 0xDB #define ILI9488_RDID3 0xDC #define ILI9488_RDID4 0xDD #define ILI9488_GMCTRP1 0xE0 #define ILI9488_GMCTRN1 0xE1 #define ILI9488_IMGFUNCT 0xE9 #define ILI9488_ADJCTR3 0xF7 #define ILI9488_MAD_RGB 0x08 #define ILI9488_MAD_BGR 0x00 #define ILI9488_MAD_VERTICAL 0x20 #define ILI9488_MAD_X_LEFT 0x00 #define ILI9488_MAD_X_RIGHT 0x40 #define ILI9488_MAD_Y_UP 0x80 #define ILI9488_MAD_Y_DOWN 0x00 /* MADCTL Defines */ #define MADCTL_MY 0x80 #define MADCTL_MX 0x40 #define MADCTL_MV 0x20 #define MADCTL_ML 0x10 #define MADCTL_RGB 0x08 #define MADCTL_MH 0x04 #define LCD_NAME "ILI9488" #define LCD_BPP 16 #define ILI9488_RESOLUTION_HOR 320 #define ILI9488_RESOLUTION_VER 480 static scr_handle_t g_lcd_handle; /** * This header file is only used to redefine the function to facilitate the call. * It can only be placed in this position, not in the head of the file. */ #include "interface_drv_def.h" scr_driver_t lcd_ili9488_default_driver = { .init = lcd_ili9488_init, .deinit = lcd_ili9488_deinit, .set_direction = lcd_ili9488_set_rotation, .set_window = lcd_ili9488_set_window, .write_ram_data = lcd_ili9488_write_ram_data, .draw_pixel = lcd_ili9488_draw_pixel, .draw_bitmap = lcd_ili9488_draw_bitmap, .get_info = lcd_ili9488_get_info, }; static void lcd_ili9488_init_reg(void); esp_err_t lcd_ili9488_init(const scr_controller_config_t *lcd_conf) { LCD_CHECK(lcd_conf->width <= ILI9488_RESOLUTION_HOR, "Width greater than maximum", ESP_ERR_INVALID_ARG); LCD_CHECK(lcd_conf->height <= ILI9488_RESOLUTION_VER, "Height greater than maximum", ESP_ERR_INVALID_ARG); LCD_CHECK(NULL != lcd_conf, "config pointer invalid", ESP_ERR_INVALID_ARG); LCD_CHECK((NULL != lcd_conf->interface_drv->write_cmd && \ NULL != lcd_conf->interface_drv->write_data && \ NULL != lcd_conf->interface_drv->write && \ NULL != lcd_conf->interface_drv->read && \ NULL != lcd_conf->interface_drv->bus_acquire && \ NULL != lcd_conf->interface_drv->bus_release), "Interface driver invalid", ESP_ERR_INVALID_ARG); esp_err_t ret; // Reset the display if (lcd_conf->pin_num_rst >= 0) { gpio_pad_select_gpio(lcd_conf->pin_num_rst); gpio_set_direction(lcd_conf->pin_num_rst, GPIO_MODE_OUTPUT); gpio_set_level(lcd_conf->pin_num_rst, (lcd_conf->rst_active_level) & 0x1); vTaskDelay(100 / portTICK_RATE_MS); gpio_set_level(lcd_conf->pin_num_rst, (~(lcd_conf->rst_active_level)) & 0x1); vTaskDelay(100 / portTICK_RATE_MS); } g_lcd_handle.interface_drv = lcd_conf->interface_drv; g_lcd_handle.original_width = lcd_conf->width; g_lcd_handle.original_height = lcd_conf->height; g_lcd_handle.offset_hor = lcd_conf->offset_hor; g_lcd_handle.offset_ver = lcd_conf->offset_ver; lcd_ili9488_init_reg(); // Enable backlight if (lcd_conf->pin_num_bckl >= 0) { gpio_pad_select_gpio(lcd_conf->pin_num_bckl); gpio_set_direction(lcd_conf->pin_num_bckl, GPIO_MODE_OUTPUT); gpio_set_level(lcd_conf->pin_num_bckl, (lcd_conf->bckl_active_level) & 0x1); } ret = lcd_ili9488_set_rotation(lcd_conf->rotate); LCD_CHECK(ESP_OK == ret, "set rotation failed", ESP_FAIL); ret = lcd_ili9488_set_invert(1); /**< ILI9488 setting the reverse color is the normal color */ LCD_CHECK(ESP_OK == ret, "Set color invert failed", ESP_FAIL); return ESP_OK; } esp_err_t lcd_ili9488_deinit(void) { memset(&g_lcd_handle, 0, sizeof(scr_handle_t)); return ESP_OK; } esp_err_t lcd_ili9488_set_rotation(scr_dir_t dir) { esp_err_t ret; uint8_t reg_data = MADCTL_RGB; if (SCR_DIR_MAX < dir) { dir >>= 5; } LCD_CHECK(dir < 8, "Unsupport rotate direction", ESP_ERR_INVALID_ARG); switch (dir) { case SCR_DIR_LRTB: g_lcd_handle.width = g_lcd_handle.original_width; g_lcd_handle.height = g_lcd_handle.original_height; break; case SCR_DIR_LRBT: reg_data |= MADCTL_MY; g_lcd_handle.width = g_lcd_handle.original_width; g_lcd_handle.height = g_lcd_handle.original_height; break; case SCR_DIR_RLTB: reg_data |= MADCTL_MX; g_lcd_handle.width = g_lcd_handle.original_width; g_lcd_handle.height = g_lcd_handle.original_height; break; case SCR_DIR_RLBT: reg_data |= MADCTL_MX | MADCTL_MY; g_lcd_handle.width = g_lcd_handle.original_width; g_lcd_handle.height = g_lcd_handle.original_height; break; case SCR_DIR_TBLR: reg_data |= MADCTL_MV; g_lcd_handle.width = g_lcd_handle.original_height; g_lcd_handle.height = g_lcd_handle.original_width; break; case SCR_DIR_BTLR: reg_data |= MADCTL_MY | MADCTL_MV; g_lcd_handle.width = g_lcd_handle.original_height; g_lcd_handle.height = g_lcd_handle.original_width; break; case SCR_DIR_TBRL: reg_data |= MADCTL_MX | MADCTL_MV; g_lcd_handle.width = g_lcd_handle.original_height; g_lcd_handle.height = g_lcd_handle.original_width; break; case SCR_DIR_BTRL: reg_data |= MADCTL_MX | MADCTL_MY | MADCTL_MV; g_lcd_handle.width = g_lcd_handle.original_height; g_lcd_handle.height = g_lcd_handle.original_width; break; default: break; } ESP_LOGI(TAG, "MADCTL=0x%x", reg_data); ret = LCD_WRITE_REG(ILI9488_MADCTL, reg_data); LCD_CHECK(ESP_OK == ret, "Set screen rotate failed", ESP_FAIL); g_lcd_handle.dir = dir; return ESP_OK; } esp_err_t lcd_ili9488_get_info(scr_info_t *info) { LCD_CHECK(NULL != info, "info pointer invalid", ESP_ERR_INVALID_ARG); info->width = g_lcd_handle.width; info->height = g_lcd_handle.height; info->dir = g_lcd_handle.dir; info->name = LCD_NAME; info->color_type = SCR_COLOR_TYPE_RGB565; info->bpp = LCD_BPP; return ESP_OK; } esp_err_t lcd_ili9488_set_invert(bool is_invert) { return LCD_WRITE_CMD(is_invert ? ILI9488_INVON : ILI9488_INVOFF); } esp_err_t lcd_ili9488_set_window(uint16_t x0, uint16_t y0, uint16_t x1, uint16_t y1) { LCD_CHECK((x1 < g_lcd_handle.width) && (y1 < g_lcd_handle.height), "The set coordinates exceed the screen size", ESP_ERR_INVALID_ARG); LCD_CHECK((x0 <= x1) && (y0 <= y1), "Window coordinates invalid", ESP_ERR_INVALID_ARG); esp_err_t ret = ESP_OK; scr_utility_apply_offset(&g_lcd_handle, ILI9488_RESOLUTION_HOR, ILI9488_RESOLUTION_VER, &x0, &y0, &x1, &y1); ret |= LCD_WRITE_CMD(ILI9488_CASET); ret |= LCD_WRITE_DATA(x0 >> 8); ret |= LCD_WRITE_DATA(x0 & 0xff); ret |= LCD_WRITE_DATA(x1 >> 8); ret |= LCD_WRITE_DATA(x1 & 0xff); ret |= LCD_WRITE_CMD(ILI9488_PASET); ret |= LCD_WRITE_DATA(y0 >> 8); ret |= LCD_WRITE_DATA(y0 & 0xff); ret |= LCD_WRITE_DATA(y1 >> 8); ret |= LCD_WRITE_DATA(y1 & 0xff); ret |= LCD_WRITE_CMD(ILI9488_RAMWR); LCD_CHECK(ESP_OK == ret, "Set window failed", ESP_FAIL); return ESP_OK; } esp_err_t lcd_ili9488_write_ram_data(uint16_t color) { static uint8_t data[2]; data[0] = (uint8_t)(color & 0xff); data[1] = (uint8_t)(color >> 8); return LCD_WRITE(data, 2); } esp_err_t lcd_ili9488_draw_pixel(uint16_t x, uint16_t y, uint16_t color) { esp_err_t ret; ret = lcd_ili9488_set_window(x, y, x, y); if (ESP_OK != ret) { return ESP_FAIL; } return lcd_ili9488_write_ram_data(color); } esp_err_t lcd_ili9488_draw_bitmap(uint16_t x, uint16_t y, uint16_t w, uint16_t h, uint16_t *bitmap) { esp_err_t ret; LCD_CHECK(NULL != bitmap, "bitmap pointer invalid", ESP_ERR_INVALID_ARG); LCD_IFACE_ACQUIRE(); ret = lcd_ili9488_set_window(x, y, x + w - 1, y + h - 1); if (ESP_OK != ret) { return ESP_FAIL; } uint32_t len = w * h; ret = LCD_WRITE((uint8_t *)bitmap, 2 * len); LCD_IFACE_RELEASE(); LCD_CHECK(ESP_OK == ret, "lcd write ram data failed", ESP_FAIL); return ESP_OK; } static void lcd_ili9488_init_reg(void) { LCD_WRITE_CMD(ILI9488_SWRESET); vTaskDelay(120 / portTICK_RATE_MS); // positive gamma control LCD_WRITE_CMD(ILI9488_GMCTRP1); LCD_WRITE_DATA(0x00); LCD_WRITE_DATA(0x03); LCD_WRITE_DATA(0x09); LCD_WRITE_DATA(0x08); LCD_WRITE_DATA(0x16); LCD_WRITE_DATA(0x0A); LCD_WRITE_DATA(0x3F); LCD_WRITE_DATA(0x78); LCD_WRITE_DATA(0x4C); LCD_WRITE_DATA(0x09); LCD_WRITE_DATA(0x0A); LCD_WRITE_DATA(0x08); LCD_WRITE_DATA(0x16); LCD_WRITE_DATA(0x1A); LCD_WRITE_DATA(0x0F); // negative gamma control LCD_WRITE_CMD(ILI9488_GMCTRN1); LCD_WRITE_DATA(0x00); LCD_WRITE_DATA(0x16); LCD_WRITE_DATA(0x19); LCD_WRITE_DATA(0x03); LCD_WRITE_DATA(0x0F); LCD_WRITE_DATA(0x05); LCD_WRITE_DATA(0x32); LCD_WRITE_DATA(0x45); LCD_WRITE_DATA(0x46); LCD_WRITE_DATA(0x04); LCD_WRITE_DATA(0x0E); LCD_WRITE_DATA(0x0D); LCD_WRITE_DATA(0x35); LCD_WRITE_DATA(0x37); LCD_WRITE_DATA(0x0F); // Power Control 1 (Vreg1out, Verg2out) LCD_WRITE_CMD(ILI9488_PWCTR1); LCD_WRITE_DATA(0x17); LCD_WRITE_DATA(0x15); // Power Control 2 (VGH,VGL) LCD_WRITE_CMD(ILI9488_PWCTR2); LCD_WRITE_DATA(0x41); // Power Control 3 (Vcom) LCD_WRITE_CMD(ILI9488_VMCTR1); LCD_WRITE_DATA(0x00); LCD_WRITE_DATA(0x12); LCD_WRITE_DATA(0x80); LCD_WRITE_CMD(ILI9488_IMCTR); LCD_WRITE_DATA(0x80); // Interface Mode Control (SDO NOT USE) LCD_WRITE_CMD(ILI9488_PIXFMT); LCD_WRITE_DATA(0x55); // Interface Pixel Format (16 bit) LCD_WRITE_CMD(ILI9488_FRMCTR1); LCD_WRITE_DATA(0xA0); // Frame rate (60Hz) LCD_WRITE_CMD(ILI9488_INVCTR); LCD_WRITE_DATA(0x02); // Display Inversion Control (2-dot) LCD_WRITE_CMD(ILI9488_DFUNCTR); // Display Function Control RGB/MCU Interface Control LCD_WRITE_DATA(0x02); LCD_WRITE_DATA(0x02); LCD_WRITE_CMD(ILI9488_IMGFUNCT); // Set Image Functio (Disable 24 bit data) LCD_WRITE_DATA(0x00); LCD_WRITE_CMD(ILI9488_ADJCTR3); // Adjust Control (D7 stream, loose) LCD_WRITE_DATA(0xa9); LCD_WRITE_DATA(0x51); LCD_WRITE_DATA(0x2c); LCD_WRITE_DATA(0x82); LCD_WRITE_CMD(ILI9488_SLPOUT); // Exit Sleep LCD_WRITE_CMD(ILI9488_DISPON); // Display on vTaskDelay(120 / portTICK_RATE_MS); }
6,665
2,858
<filename>flashlight/app/lm/plugins/LMAdae1024SinposL16H8Fc4096Dp01Ldp0Amp.cpp /** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT-style license found in the * LICENSE file in the root directory of this source tree. */ #include "flashlight/fl/contrib/modules/modules.h" #include "flashlight/fl/flashlight.h" #include "flashlight/fl/nn/modules/modules.h" /** * This is example of plugin for language model architecture which is expected * the input with size Time x Batch x 1 x 1 and used with the adaptive softmax * as criterion (so the last linear layer is absent here) * This architecture is using also adaptive embedding and sinusoidal positional * embedding. */ class LmModel : public fl::Container { public: LmModel(int64_t nLabel) { // Time x B x 1 x 1 std::vector<int> cutoffs = {10000, 50000, (int)nLabel}; frontend_ = std::make_shared<fl::Sequential>(); frontend_->add(std::make_shared<fl::AdaptiveEmbedding>(1024, cutoffs)); // nFeature x Time x B x 1 frontend_->add(std::make_shared<fl::SinusoidalPositionEmbedding>(1024, 32)); frontend_->add(std::make_shared<fl::Dropout>(0.1)); // nFeature x Time x Batch x 1 add(frontend_); for (int trIdx = 0; trIdx < 16; trIdx++) { auto layer = std::make_shared<fl::Transformer>( 1024, 128, 4096, 8, 0, 0.1, 0., true, false); transformers_.push_back(layer); add(layer); } } std::vector<fl::Variable> forward( const std::vector<fl::Variable>& input) override { auto out = input[0]; // Avoid fp16 usage in any embedding-ralated calls. out = frontend_->forward(out); // Run all transformer forward passes in fp16 out = out.as(f16); for (int trIdx = 0; trIdx < transformers_.size(); trIdx++) { out = transformers_[trIdx]->forward({out, fl::Variable()}).front(); } // Make sure passing fp32 tensor to criterion. // Avoid fp16 usage in any embedding-ralated calls. return {out.as(f32)}; } std::string prettyString() const override { std::ostringstream ss; ss << "LmModel: "; ss << frontend_->prettyString() << "\n"; for (int trIdx = 0; trIdx < transformers_.size(); trIdx++) { ss << transformers_[trIdx]->prettyString() << "\n"; } return ss.str(); } private: LmModel() = default; std::shared_ptr<fl::Sequential> frontend_; std::vector<std::shared_ptr<fl::Transformer>> transformers_; FL_SAVE_LOAD_WITH_BASE(fl::Container, frontend_, transformers_) }; extern "C" fl::Module* createModule(int64_t, int64_t nLabel) { auto m = std::make_unique<LmModel>(nLabel); return m.release(); } CEREAL_REGISTER_TYPE(LmModel)
1,009
412
public class ContainerForFloat { public Float floatField; public ContainerForFloat(Float f) { floatField = f; } }
33
428
/* * Copyright 2012 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.surfnet.oaaas.auth.principal; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; /** * Holder and parser for the credential pair expected in a Basic Auth header. */ public class BasicAuthCredentials { private static final char SEMI_COLON = ':'; private static final int BASIC_AUTH_PREFIX_LENGTH = "Basic ".length(); private String username; private String password; private static class NullAuthCredentials extends BasicAuthCredentials { private NullAuthCredentials() { super(null, null); } @Override public boolean isValid() { return true; } @Override public boolean isNull() { return true; } } private static final BasicAuthCredentials NULL_CREDENTIALS = new NullAuthCredentials(); private static final BasicAuthCredentials INVALID_CREDENTIALS = new BasicAuthCredentials(null, null); public static BasicAuthCredentials createCredentialsFromHeader(final String authorizationHeader) { if (authorizationHeader == null) { return NULL_CREDENTIALS; } if (authorizationHeader.length() < BASIC_AUTH_PREFIX_LENGTH) { return INVALID_CREDENTIALS; } String authPart = authorizationHeader.substring(BASIC_AUTH_PREFIX_LENGTH); String userpass = new String(Base64.decodeBase64(authPart.getBytes())); int index = userpass.indexOf(SEMI_COLON); if (index < 1) { return INVALID_CREDENTIALS; } String name = userpass.substring(0, index); String pass = userpass.substring(index + 1); return new BasicAuthCredentials(name, pass); } /** * Create a credential with the given username and password. * * @param username * @param password */ public BasicAuthCredentials(String username, String password) { super(); this.username = username; this.password = password; } /** * @return {@code true} if this is a valid credential */ public boolean isValid() { return !StringUtils.isBlank(username) && !StringUtils.isBlank(password); } public boolean isNull() { return false; } /** * Get the username. * @return the username or null if the username was not found */ public String getUsername() { return username; } /** * Get the password. * @return the password or null if the password was not found */ public String getPassword() { return password; } @Override public String toString() { return "UserPassCredentials [username=" + username + "]"; } /** * Construct and return the Base64 encoded Basic Auth header value for this credential. * @return the header value suitable for insertion into an HTTP request */ public String getAuthorizationHeaderValue() { String result = null; if (!StringUtils.isBlank(username) && !StringUtils.isBlank(password)) { String value = username + ":" + password; result = "Basic " + new String(Base64.encodeBase64(value.getBytes())) ; } return result; } }
1,199
1,444
<reponame>J-VOL/mage package mage.cards.l; import mage.MageInt; import mage.abilities.Ability; import mage.abilities.common.PutCardIntoGraveFromAnywhereAllTriggeredAbility; import mage.abilities.common.SimpleStaticAbility; import mage.abilities.costs.Cost; import mage.abilities.costs.common.PayLifeCost; import mage.abilities.effects.OneShotEffect; import mage.abilities.effects.ReplacementEffectImpl; import mage.abilities.effects.common.continuous.AddCardSubTypeTargetEffect; import mage.abilities.keyword.FlyingAbility; import mage.cards.Card; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.*; import mage.filter.StaticFilters; import mage.game.Game; import mage.game.events.GameEvent; import mage.game.events.ZoneChangeEvent; import mage.game.permanent.Permanent; import mage.players.Player; import mage.target.targetpointer.FixedTarget; import java.util.UUID; /** * @author TheElk801 */ public final class LorcanWarlockCollector extends CardImpl { public LorcanWarlockCollector(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{5}{B}{B}"); this.addSuperType(SuperType.LEGENDARY); this.subtype.add(SubType.DEVIL); this.power = new MageInt(6); this.toughness = new MageInt(6); // Flying this.addAbility(FlyingAbility.getInstance()); // Whenever a creature card is put into an opponent's graveyard from anywhere, you may pay life equal to its mana value. If you do, put it onto the battlefield under your control. It's a Warlock in addition to its other types. this.addAbility(new PutCardIntoGraveFromAnywhereAllTriggeredAbility( new LorcanWarlockCollectorReturnEffect(), true, StaticFilters.FILTER_CARD_CREATURE_A, TargetController.OPPONENT, SetTargetPointer.CARD )); // If a Warlock you control would die, exile it instead. this.addAbility(new SimpleStaticAbility(new LorcanWarlockCollectorReplacementEffect())); } private LorcanWarlockCollector(final LorcanWarlockCollector card) { super(card); } @Override public LorcanWarlockCollector copy() { return new LorcanWarlockCollector(this); } } class LorcanWarlockCollectorReturnEffect extends OneShotEffect { LorcanWarlockCollectorReturnEffect() { super(Outcome.PutCreatureInPlay); staticText = "pay life equal to its mana value. If you do, " + "put it onto the battlefield under your control. " + "It's a Warlock in addition to its other types"; } private LorcanWarlockCollectorReturnEffect(final LorcanWarlockCollectorReturnEffect effect) { super(effect); } @Override public LorcanWarlockCollectorReturnEffect copy() { return new LorcanWarlockCollectorReturnEffect(this); } @Override public boolean apply(Game game, Ability source) { Player player = game.getPlayer(source.getControllerId()); Card card = game.getCard(getTargetPointer().getFirst(game, source)); if (player == null || card == null) { return false; } Cost cost = new PayLifeCost(card.getManaValue()); if (!cost.canPay(source, source, source.getControllerId(), game) || !cost.pay(source, game, source, source.getControllerId(), true)) { return false; } game.addEffect(new AddCardSubTypeTargetEffect(SubType.WARLOCK, Duration.Custom).setTargetPointer(new FixedTarget(card.getId(), card.getZoneChangeCounter(game) + 1)), source); player.moveCards(card, Zone.BATTLEFIELD, source, game); return true; } } class LorcanWarlockCollectorReplacementEffect extends ReplacementEffectImpl { LorcanWarlockCollectorReplacementEffect() { super(Duration.WhileOnBattlefield, Outcome.Exile); staticText = "if a Warlock you control would die, exile it instead"; } private LorcanWarlockCollectorReplacementEffect(final LorcanWarlockCollectorReplacementEffect effect) { super(effect); } @Override public LorcanWarlockCollectorReplacementEffect copy() { return new LorcanWarlockCollectorReplacementEffect(this); } @Override public boolean replaceEvent(GameEvent event, Ability source, Game game) { Permanent permanent = ((ZoneChangeEvent) event).getTarget(); if (permanent == null) { return false; } Player player = game.getPlayer(permanent.getControllerId()); return player != null && player.moveCards(permanent, Zone.EXILED, source, game); } @Override public boolean checksEventType(GameEvent event, Game game) { return event.getType() == GameEvent.EventType.ZONE_CHANGE; } @Override public boolean applies(GameEvent event, Ability source, Game game) { ZoneChangeEvent zEvent = (ZoneChangeEvent) event; return zEvent.getTarget() != null && zEvent.getTarget().isControlledBy(source.getControllerId()) && zEvent.getTarget().hasSubtype(SubType.WARLOCK, game) && zEvent.isDiesEvent(); } }
1,895
517
/** * Copyright@2010 <NAME> */ package ro.isdc.wro.util.io; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; /** * An input stream which allows to be read multiple times. The only condition is to call reset or close method after reading it. * * @author <NAME> * @created 18 Aug 2010 */ public class UnclosableBufferedInputStream extends BufferedInputStream { public UnclosableBufferedInputStream(final InputStream in) { super(in); super.mark(Integer.MAX_VALUE); } public UnclosableBufferedInputStream(final byte[] bytes) { this (new ByteArrayInputStream(bytes)); } @Override public void close() throws IOException { super.reset(); } }
246
852
#ifndef FWCore_Utilities_Span_h #define FWCore_Utilities_Span_h #include <cstddef> namespace edm { /* *An edm::Span wraps begin() and end() iterators to a contiguous sequence of objects with the first element of the sequence at position zero, In other words the iterators should refer to random-access containers. To be replaced with std::Span in C++20. */ template <class T> class Span { public: Span(T begin, T end) : begin_(begin), end_(end) {} T begin() const { return begin_; } T end() const { return end_; } bool empty() const { return begin_ == end_; } auto size() const { return end_ - begin_; } auto const& operator[](std::size_t idx) const { return *(begin_ + idx); } auto const& front() const { return *begin_; } auto const& back() const { return *(end_ - 1); } private: const T begin_; const T end_; }; }; // namespace edm #endif
341
1,083
<filename>sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.sdk.file; import junit.framework.TestCase; import org.apache.avro.generic.GenericData; import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.index.IndexStoreManager; import org.apache.carbondata.core.datastore.impl.FileFactory; import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.metadata.datatype.Field; import org.apache.carbondata.core.metadata.datatype.StructField; import org.apache.carbondata.core.scan.expression.ColumnExpression; import org.apache.carbondata.core.scan.expression.Expression; import org.apache.carbondata.core.scan.expression.LiteralExpression; import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; import org.apache.carbondata.core.scan.expression.conditional.InExpression; import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; import org.apache.carbondata.core.scan.expression.conditional.NotInExpression; import org.apache.carbondata.core.scan.expression.logical.AndExpression; import org.apache.carbondata.core.scan.expression.logical.OrExpression; import org.apache.carbondata.core.util.CarbonProperties; import org.apache.commons.io.FileUtils; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.apache.carbondata.core.scan.filter.FilterUtil.prepareEqualToExpression; import static org.apache.carbondata.core.scan.filter.FilterUtil.prepareEqualToExpressionSet; import static org.apache.carbondata.core.scan.filter.FilterUtil.prepareOrExpression; public class CarbonReaderTest extends TestCase { @Test public void testWriteAndReadFiles() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path, "_temp") .projection(new String[]{"name", "age"}).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 200); // Read again CarbonReader reader2 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .build(); i = 0; while (reader2.hasNext()) { Object[] row = (Object[]) reader2.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 200); reader2.close(); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testWriteAndReadJson() throws IOException, InterruptedException { int numRows = 100; String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); String json = "{\"name\":\"bob\", \"age\":10}"; Schema schema = new Schema( new Field[]{ new Field("name", "string"), new Field("age", "int")}); try { CarbonWriter writer = CarbonWriter.builder().outputPath(path) .withJsonInput(schema).writtenBy("AvroCarbonWriterTest").build(); for (int i = 0; i < numRows; i++) { writer.write(json); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File[] dataFiles = new File(path).listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT); } }); Assert.assertNotNull(dataFiles); Assert.assertEquals(1, dataFiles.length); // read it and verify CarbonReader reader = CarbonReader.builder(path, "_temp") .projection(new String[]{"name", "age"}).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals("bob", row[0]); Assert.assertEquals(10, row[1]); i++; } Assert.assertEquals(i, numRows); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithZeroBatchSize() throws Exception { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance().clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(10, new Schema(fields), path); CarbonReader reader; reader = CarbonReader.builder(path).withRowRecordReader().withBatch(0).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 10); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadBatchWithZeroBatchSize() throws Exception { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance().clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(10, new Schema(fields), path); CarbonReader reader; reader = CarbonReader.builder(path).withRowRecordReader().withBatch(0).build(); int i = 0; while (reader.hasNext()) { Object[] row = reader.readNextBatchRow(); Assert.assertEquals(row.length, 10); i++; } Assert.assertEquals(i, 1); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalSimple() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); String path1 = path + "/0testdir"; String path2 = path + "/testdir"; FileUtils.deleteDirectory(new File(path)); FileFactory.getCarbonFile(path1); FileFactory.mkdirs(path1); FileFactory.getCarbonFile(path2); FileFactory.mkdirs(path2); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("name", DataTypes.STRING); EqualToExpression equalToExpression = new EqualToExpression(columnExpression, new LiteralExpression("robot1", DataTypes.STRING)); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .filter(equalToExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); // Default sort column is applied for dimensions. So, need to validate accordingly assert ("robot1".equals(row[0])); i++; } Assert.assertEquals(i, 20); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactional2() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("age", DataTypes.INT); EqualToExpression equalToExpression = new EqualToExpression(columnExpression, new LiteralExpression("1", DataTypes.INT)); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .filter(equalToExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); // Default sort column is applied for dimensions. So, need to validate accordingly assert (((String) row[0]).contains("robot")); assert (1 == (int) (row[1])); i++; } Assert.assertEquals(i, 1); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalAnd() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); EqualToExpression equalToExpression = new EqualToExpression(columnExpression, new LiteralExpression("3.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); EqualToExpression equalToExpression2 = new EqualToExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(equalToExpression, equalToExpression2); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (((String) row[0]).contains("robot7")); assert (7 == (int) (row[1])); assert (3.5 == (double) (row[2])); i++; } Assert.assertEquals(i, 1); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalOr() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); EqualToExpression equalToExpression = new EqualToExpression(columnExpression, new LiteralExpression("3.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); EqualToExpression equalToExpression2 = new EqualToExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); OrExpression orExpression = new OrExpression(equalToExpression, equalToExpression2); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(orExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (((String) row[0]).contains("robot7")); assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); i++; } Assert.assertEquals(i, 20); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalGreaterThan() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); GreaterThanExpression greaterThanExpression = new GreaterThanExpression(columnExpression, new LiteralExpression("13.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); EqualToExpression equalToExpression2 = new EqualToExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(greaterThanExpression, equalToExpression2); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (((String) row[0]).contains("robot7")); assert (7 == ((int) (row[1]) % 10)); assert ((double) row[2] > 13.5); i++; } Assert.assertEquals(i, 17); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterEqualSet() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); List<Object> values = new ArrayList<>(); values.add("robot7"); values.add("robot1"); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(prepareEqualToExpressionSet("name", "String", values)) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); if (((String) row[0]).contains("robot7")) { assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else if (((String) row[0]).contains("robot1")) { assert (1 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else { Assert.assertTrue(false); } i++; } Assert.assertEquals(i, 40); reader.close(); List<Object> values2 = new ArrayList<>(); values2.add(1); values2.add(7); CarbonReader reader2 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(prepareEqualToExpressionSet("age", "int", values2)) .build(); i = 0; while (reader2.hasNext()) { Object[] row = (Object[]) reader2.readNextRow(); if (((String) row[0]).contains("robot7")) { assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else if (((String) row[0]).contains("robot1")) { assert (1 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else { Assert.assertTrue(false); } i++; } Assert.assertEquals(i, 2); reader2.close(); List<Object> values3 = new ArrayList<>(); values3.add(0.5); values3.add(3.5); CarbonReader reader3 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(prepareEqualToExpressionSet("doubleField", "double", values3)) .build(); i = 0; while (reader3.hasNext()) { Object[] row = (Object[]) reader3.readNextRow(); if (((String) row[0]).contains("robot7")) { assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else if (((String) row[0]).contains("robot1")) { assert (1 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else { Assert.assertTrue(false); } i++; } Assert.assertEquals(i, 2); reader3.close(); CarbonReader reader4 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(prepareEqualToExpression("name", "string", "robot7")) .build(); i = 0; while (reader4.hasNext()) { Object[] row = (Object[]) reader4.readNextRow(); if (((String) row[0]).contains("robot7")) { assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else { Assert.assertTrue(false); } i++; } Assert.assertEquals(i, 20); reader4.close(); List<Expression> expressions = new ArrayList<>(); expressions.add(prepareEqualToExpression("name", "String", "robot1")); expressions.add(prepareEqualToExpression("name", "String", "robot7")); expressions.add(prepareEqualToExpression("age", "int", "2")); CarbonReader reader5 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(prepareOrExpression(expressions)) .build(); i = 0; while (reader5.hasNext()) { Object[] row = (Object[]) reader5.readNextRow(); if (((String) row[0]).contains("robot7")) { assert (7 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else if (((String) row[0]).contains("robot1")) { assert (1 == ((int) (row[1]) % 10)); assert (0.5 == ((double) (row[2]) % 1)); } else if (((String) row[0]).contains("robot2")) { assert (2 == ((int) (row[1]) % 10)); assert (0 == ((double) (row[2]) % 1)); } else { Assert.assertTrue(false); } i++; } Assert.assertEquals(i, 41); reader5.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalLessThan() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); LessThanExpression lessThanExpression = new LessThanExpression(columnExpression, new LiteralExpression("13.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); EqualToExpression equalToExpression2 = new EqualToExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(lessThanExpression, equalToExpression2); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (((String) row[0]).contains("robot7")); assert (7 == ((int) (row[1]) % 10)); assert ((double) row[2] < 13.5); i++; } Assert.assertEquals(i, 2); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalNotEqual() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); LessThanExpression lessThanExpression = new LessThanExpression(columnExpression, new LiteralExpression("13.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); NotEqualsExpression notEqualsExpression = new NotEqualsExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(lessThanExpression, notEqualsExpression); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (!((String) row[0]).contains("robot7")); assert (7 != ((int) (row[1]) % 10)); assert ((double) row[2] < 13.5); i++; } Assert.assertEquals(i, 25); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalIn() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); LessThanExpression lessThanExpression = new LessThanExpression(columnExpression, new LiteralExpression("13.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); InExpression inExpression = new InExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(lessThanExpression, inExpression); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (((String) row[0]).contains("robot7")); assert (7 == ((int) (row[1]) % 10)); assert ((double) row[2] < 13.5); i++; } Assert.assertEquals(i, 2); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterOfNonTransactionalNotIn() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("doubleField", DataTypes.DOUBLE); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("doubleField", DataTypes.DOUBLE); LessThanExpression lessThanExpression = new LessThanExpression(columnExpression, new LiteralExpression("13.5", DataTypes.DOUBLE)); ColumnExpression columnExpression2 = new ColumnExpression("name", DataTypes.STRING); NotInExpression notInExpression = new NotInExpression(columnExpression2, new LiteralExpression("robot7", DataTypes.STRING)); AndExpression andExpression = new AndExpression(lessThanExpression, notInExpression); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "doubleField"}) .filter(andExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (!((String) row[0]).contains("robot7")); assert (7 != ((int) (row[1]) % 10)); assert ((double) row[2] < 13.5); i++; } Assert.assertEquals(i, 25); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testWriteAndReadFilesWithReaderBuildFail() throws IOException, InterruptedException { String path1 = "./testWriteFiles"; String path2 = "./testWriteFiles2"; FileUtils.deleteDirectory(new File(path1)); FileUtils.deleteDirectory(new File(path2)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path1), false); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path2), false); Field[] fields = new Field[] { new Field("c1", "string"), new Field("c2", "int") }; Schema schema = new Schema(fields); CarbonWriterBuilder builder = CarbonWriter.builder(); CarbonWriter carbonWriter = null; try { carbonWriter = builder.outputPath(path1).uniqueIdentifier(12345) .withCsvInput(schema).writtenBy("CarbonReaderTest").build(); } catch (InvalidLoadOptionException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } carbonWriter.write(new String[]{"MNO", "100"}); carbonWriter.close(); Field[] fields1 = new Field[]{new Field("p1", "string"), new Field("p2", "int")}; Schema schema1 = new Schema(fields1); CarbonWriterBuilder builder1 = CarbonWriter.builder(); CarbonWriter carbonWriter1 = null; try { carbonWriter1 = builder1.outputPath(path2).uniqueIdentifier(12345) .withCsvInput(schema1).writtenBy("CarbonReaderTest").build(); } catch (InvalidLoadOptionException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } carbonWriter1.write(new String[]{"PQR", "200"}); carbonWriter1.close(); try { CarbonReader reader = CarbonReader.builder(path1, "_temp") .projection(new String[]{"c1", "c3"}) .build(); Assert.fail(); } catch (Exception e) { System.out.println("Success"); Assert.assertTrue(true); } CarbonReader reader1 = CarbonReader.builder(path2, "_temp1") .projection(new String[]{"p1", "p2"}) .build(); while (reader1.hasNext()) { Object[] row1 = (Object[]) reader1.readNextRow(); System.out.println(row1[0]); System.out.println(row1[1]); } reader1.close(); FileUtils.deleteDirectory(new File(path1)); FileUtils.deleteDirectory(new File(path2)); } @Test public void testReadColumnTwice() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age", "age", "name"}) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); // Default sort column is applied for dimensions. So, need to validate accordingly Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); Assert.assertEquals(i, row[2]); Assert.assertEquals("robot" + (i % 10), row[3]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); } // Below test case was working with transactional table as schema file was present. // now we don't support transactional table from SDK. only flat folder is supported. // and currently flat folder will never check for schema files. @Ignore public void readFilesParallel() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .build(); // Reader 2 CarbonReader reader2 = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .build(); while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Object[] row2 = (Object[]) reader2.readNextRow(); // parallel compare Assert.assertEquals(row[0], row2[0]); Assert.assertEquals(row[1], row2[1]); } reader.close(); reader2.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadAfterClose() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path, "_temp") .projection(new String[]{"name", "age"}).build(); reader.close(); String msg = "CarbonReader not initialise, please create it first."; try { reader.hasNext(); assert (false); } catch (RuntimeException e) { assert (e.getMessage().equals(msg)); } try { reader.readNextRow(); assert (false); } catch (RuntimeException e) { assert (e.getMessage().equals(msg)); } try { reader.close(); assert (false); } catch (RuntimeException e) { assert (e.getMessage().equals(msg)); } FileUtils.deleteDirectory(new File(path)); } @Test public void testWriteAndReadFilesWithoutTableName() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader .builder(path) .projection(new String[]{"name", "age"}) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testWriteAndReadFilesWithoutTableName2() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); IndexStoreManager.getInstance() .clearIndexCache(AbsoluteTableIdentifier.from(path), false); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadSchemaFromDataFile() throws IOException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbondata"); } }); Assert.assertTrue(dataFiles != null); Assert.assertTrue(dataFiles.length > 0); Schema schema = CarbonSchemaReader.readSchema(dataFiles[0].getAbsolutePath()); Assert.assertTrue(schema.getFields().length == 2); Assert.assertEquals("name", (schema.getFields())[0].getFieldName()); Assert.assertEquals("age", (schema.getFields())[1].getFieldName()); Assert.assertEquals(DataTypes.STRING, (schema.getFields())[0].getDataType()); Assert.assertEquals(DataTypes.INT, (schema.getFields())[1].getDataType()); FileUtils.deleteDirectory(new File(path)); } @Test public void testWriteAndReadFilesNonTransactional() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); // Write to a Non Transactional Table TestUtil.writeFilesAndVerify(new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path, "_temp") .projection(new String[]{"name", "age"}) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); } CarbonProperties carbonProperties; @Override public void setUp() { carbonProperties = CarbonProperties.getInstance(); } private static final Logger LOGGER = LogServiceFactory.getLogService(CarbonReaderTest.class.getName()); @Test public void testTimeStampAndBadRecord() throws IOException, InterruptedException { String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT); String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT); String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL); String rootPath = new File(this.getClass().getResource("/").getPath() + "../../").getCanonicalPath(); String storeLocation = rootPath + "/target/"; carbonProperties .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation) .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss") .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT"); String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[9]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("intField", DataTypes.INT); fields[2] = new Field("shortField", DataTypes.SHORT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); try { CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path); CarbonWriter writer = builder.withCsvInput(new Schema(fields)).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 100; i++) { String[] row = new String[]{ "robot" + (i % 10), String.valueOf(i), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2018-05-12", "2018-05-12", "12.345" }; writer.write(row); String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345" }; writer.write(row2); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File folder = new File(path); Assert.assertTrue(folder.exists()); File[] dataFiles = folder.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT); } }); Assert.assertNotNull(dataFiles); Assert.assertTrue(dataFiles.length > 0); CarbonReader reader = CarbonReader.builder(path, "_temp") .projection(new String[]{ "stringField" , "shortField" , "intField" , "longField" , "doubleField" , "boolField" , "dateField" , "timeField" , "decimalField"}) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); int id = (int) row[2]; Assert.assertEquals("robot" + (id % 10), row[0]); Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]); Assert.assertEquals(Long.MAX_VALUE - id, row[3]); Assert.assertEquals((double) id / 2, row[4]); Assert.assertEquals(true, (boolean) row[5]); Assert.assertEquals("2019-03-02", row[6]); Assert.assertEquals("2019-02-12 03:03:34", row[7]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, badRecordLoc); } @Test public void testReadSchemaInDataFileAndSort() throws IOException, InterruptedException { String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT); String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT); String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL); String rootPath = new File(this.getClass().getResource("/").getPath() + "../../").getCanonicalPath(); String storeLocation = rootPath + "/target/"; carbonProperties .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation) .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss") .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT"); String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[9]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); try { CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path); CarbonWriter writer = builder.withCsvInput(new Schema(fields)).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 100; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345" }; writer.write(row2); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File[] dataFiles2 = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbondata"); } }); Schema schema = CarbonSchemaReader.readSchema(dataFiles2[0].getAbsolutePath()); // sort the schema Arrays.sort(schema.getFields(), new Comparator<Field>() { @Override public int compare(Field o1, Field o2) { return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal()); } }); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } File folder = new File(path); Assert.assertTrue(folder.exists()); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(strings) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); int id = (int) row[2]; Assert.assertEquals("robot" + (id % 10), row[0]); Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]); Assert.assertEquals(Long.MAX_VALUE - id, row[3]); Assert.assertEquals((double) id / 2, row[4]); Assert.assertEquals(true, (boolean) row[5]); Assert.assertEquals("2019-03-02", row[6]); Assert.assertEquals("2019-02-12 03:03:34", row[7]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, badRecordLoc); } @Test public void testReadUserSchema() throws IOException, InterruptedException { String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT); String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT); String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL); String rootPath = new File(this.getClass().getResource("/").getPath() + "../../").getCanonicalPath(); String storeLocation = rootPath + "/target/"; carbonProperties .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation) .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss") .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT"); String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[9]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); try { CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path) .writtenBy("SDK_1.0.0"); CarbonWriter writer = builder.withCsvInput(new Schema(fields)).build(); for (int i = 0; i < 100; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345" }; writer.write(row2); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File[] dataFiles1 = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbondata"); } }); String versionDetails = CarbonSchemaReader.getVersionDetails(dataFiles1[0].getAbsolutePath()); assertTrue(versionDetails.contains("SDK_1.0.0 in version: ")); File[] dataFiles2 = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbonindex"); } }); Schema schema = CarbonSchemaReader.readSchema(dataFiles2[0].getAbsolutePath()).asOriginOrder(); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } File folder = new File(path); Assert.assertTrue(folder.exists()); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(strings) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); int id = (int) row[2]; Assert.assertEquals("robot" + (id % 10), row[0]); Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]); Assert.assertEquals(Long.MAX_VALUE - id, row[3]); Assert.assertEquals((double) id / 2, row[4]); Assert.assertEquals(true, (boolean) row[5]); Assert.assertEquals("2019-03-02", row[6]); Assert.assertEquals("2019-02-12 03:03:34", row[7]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction); carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, badRecordLoc); } @Test public void testReadFilesWithProjectAllColumns() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path, "_temp").build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } Assert.assertEquals(i, 100); reader.close(); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadFilesWithDefaultProjection() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); CarbonReader reader = CarbonReader.builder(path, "_temp").build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(("robot" + (i % 10)), row[0]); Assert.assertEquals(i, row[1]); i++; } reader.close(); Assert.assertEquals(i, 100); } @Test public void testReadFilesWithNullProjection() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(100, new Schema(fields), path); try { CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{}) .build(); assert (false); } catch (RuntimeException e) { assert (e.getMessage().equalsIgnoreCase("Projection can't be empty")); } } private void WriteAvroComplexData(String mySchema, String json, String path) throws IOException, InvalidLoadOptionException { // conversion to GenericData.Record org.apache.avro.Schema nn = new org.apache.avro.Schema.Parser().parse(mySchema); GenericData.Record record = TestUtil.jsonToAvro(json, mySchema); try { CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withAvroInput(nn).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 100; i++) { writer.write(record); } writer.close(); } catch (Exception e) { e.printStackTrace(); throw e; } } // TODO: support get schema of complex data type @Ignore public void testReadUserSchemaOfComplex() throws IOException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); String mySchema = "{" + " \"name\": \"address\", " + " \"type\": \"record\", " + " \"fields\": [ " + " { \"name\": \"name\", \"type\": \"string\"}, " + " { \"name\": \"age\", \"type\": \"int\"}, " + " { " + " \"name\": \"address\", " + " \"type\": { " + " \"type\" : \"record\", " + " \"name\" : \"my_address\", " + " \"fields\" : [ " + " {\"name\": \"street\", \"type\": \"string\"}, " + " {\"name\": \"city\", \"type\": \"string\"} " + " ]} " + " }, " + " {\"name\" :\"doorNum\", " + " \"type\" : { " + " \"type\" :\"array\", " + " \"items\":{ " + " \"name\" :\"EachdoorNums\", " + " \"type\" : \"int\", " + " \"default\":-1} " + " } " + " }] " + "}"; String json = "{\"name\":\"bob\", \"age\":10, \"address\" : {\"street\":\"abc\", \"city\":\"bang\"}, " + " \"doorNum\" : [1,2,3,4]}"; try { WriteAvroComplexData(mySchema, json, path); } catch (InvalidLoadOptionException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } File folder = new File(path); Assert.assertTrue(folder.exists()); File[] dataFiles = folder.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT); } }); Assert.assertNotNull(dataFiles); Assert.assertEquals(1, dataFiles.length); File[] dataFiles2 = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("carbonindex"); } }); Schema schema = CarbonSchemaReader.readSchema(dataFiles2[0].getAbsolutePath()).asOriginOrder(); for (int i = 0; i < schema.getFields().length; i++) { System.out.println((schema.getFields())[i].getFieldName() + "\t" + schema.getFields()[i].getSchemaOrdinal()); } FileUtils.deleteDirectory(new File(path)); } @Test public void testReadMapType() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); String mySchema = "{ " + " \"name\": \"address\", " + " \"type\": \"record\", " + " \"fields\": [ " + " { " + " \"name\": \"name\", " + " \"type\": \"string\" " + " }, " + " { " + " \"name\": \"age\", " + " \"type\": \"int\" " + " }, " + " { " + " \"name\": \"mapRecord\", " + " \"type\": { " + " \"type\": \"map\", " + " \"values\": \"string\" " + " } " + " } " + " ] " + "} "; String json = "{\"name\":\"bob\", \"age\":10, \"mapRecord\": {\"street\": \"k-lane\", \"city\": \"bangalore\"}}"; try { WriteAvroComplexData(mySchema, json, path); } catch (InvalidLoadOptionException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); fields[2] = new Field("mapRecord", DataTypes.createMapType(DataTypes.STRING, DataTypes.STRING)); CarbonReader reader = CarbonReader.builder(path, "_temp").build(); // expected output String name = "bob"; int age = 10; Object[] mapKeValue = new Object[2]; mapKeValue[0] = new Object[]{"city", "street"}; mapKeValue[1] = new Object[]{"bangalore", "k-lane"}; int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); Assert.assertEquals(name, row[0]); Assert.assertArrayEquals(mapKeValue, (Object[]) row[1]); Assert.assertEquals(age, row[2]); i++; } reader.close(); Assert.assertEquals(i, 100); } @Test public void testReadWithFilterOfnonTransactionalwithsubfolders() throws IOException, InterruptedException { String path1 = "./testWriteFiles/1/" + System.nanoTime(); String path2 = "./testWriteFiles/2/" + System.nanoTime(); String path3 = "./testWriteFiles/3/" + System.nanoTime(); FileUtils.deleteDirectory(new File("./testWriteFiles")); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(200, new Schema(fields), path1); TestUtil.writeFilesAndVerify(200, new Schema(fields), path2); TestUtil.writeFilesAndVerify(200, new Schema(fields), path3); EqualToExpression equalToExpression = new EqualToExpression( new ColumnExpression("name", DataTypes.STRING), new LiteralExpression("robot1", DataTypes.STRING)); CarbonReader reader = CarbonReader .builder("./testWriteFiles", "_temp") .projection(new String[]{"name", "age"}) .filter(equalToExpression) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); // Default sort column is applied for dimensions. So, need to validate accordingly assert ("robot1".equals(row[0])); i++; } Assert.assertEquals(i, 60); reader.close(); FileUtils.deleteDirectory(new File("./testWriteFiles")); } @Test public void testReadSchemaFromDataFileArrayString() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[11]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "Hello#World#From#Carbon" }; writer.write(row2); } writer.close(); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } return name.endsWith("carbondata"); } }); if (dataFiles == null || dataFiles.length < 1) { throw new RuntimeException("Carbon data file not exists."); } Schema schema = CarbonSchemaReader .readSchema(dataFiles[0].getAbsolutePath()) .asOriginOrder(); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(strings) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (row[0].equals("robot" + i)); assert (row[2].equals(i)); assert (row[6].equals("2019-03-02")); Object[] arr = (Object[]) row[10]; assert (arr[0].equals("Hello")); assert (arr[3].equals("Carbon")); i++; } reader.close(); FileUtils.deleteDirectory(new File(path)); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testReadDateAndTimestampColumnInMap() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[6]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("dateField", DataTypes.DATE); fields[3] = new Field("timeField", DataTypes.TIMESTAMP); fields[4] = new Field("varcharField", DataTypes.VARCHAR); fields[5] = new Field("mapType", DataTypes.createMapType(DataTypes.TIMESTAMP, DataTypes.DATE)); CarbonWriter writer = CarbonWriter.builder().outputPath(path).withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[] { "robot" + (i % 10), String.valueOf(i % 10000), "2019-03-02", "2019-02-12 03:03:34", "varchar", "2019-03-30 17:22:31\u00022019-03-30" + "\u00012019-03-30 17:22:32\u00022019-03-10\u00012019-03-30 17:22:33\u00022019-03-14" + "\u00012019-03-30 17:22:36\u00022019-03-18" }; writer.write(row2); } writer.close(); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } return name.endsWith("carbondata"); } }); if (dataFiles == null || dataFiles.length < 1) { throw new RuntimeException("Carbon data file not exists."); } Schema schema = CarbonSchemaReader.readSchema(dataFiles[0].getAbsolutePath()).asOriginOrder(); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } // Read data CarbonReader reader = CarbonReader.builder(path).projection(strings).build(); Object[] mapKeValue = new Object[2]; mapKeValue[0] = new Object[] { "2019-03-30 17:22:36", "2019-03-30 17:22:33", "2019-03-30 17:22:32", "2019-03-30 17:22:31" }; mapKeValue[1] = new Object[] { "2019-03-18", "2019-03-14", "2019-03-10", "2019-03-30" }; int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (row[0].equals("robot" + i)); assert (row[2].equals("2019-03-02")); assert (row[3].equals("2019-02-12 03:03:34")); Assert.assertArrayEquals(mapKeValue, (Object[]) row[5]); i++; } Assert.assertEquals(i, 10); reader.close(); FileUtils.deleteDirectory(new File(path)); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testReadDateAndTimestampColumnInArray() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[7]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("dateField", DataTypes.DATE); fields[3] = new Field("timeField", DataTypes.TIMESTAMP); fields[4] = new Field("varcharField", DataTypes.VARCHAR); fields[5] = new Field("arrayFieldDate", DataTypes.createArrayType(DataTypes.DATE)); fields[6] = new Field("arrayFieldTimestamp", DataTypes.createArrayType(DataTypes.TIMESTAMP)); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder().outputPath(path).withLoadOptions(map) .withCsvInput(new Schema(fields)).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[] { "robot" + (i % 10), String.valueOf(i % 10000), "2019-03-02", "2019-02-12 03:03:34", "varchar", "2019-03-02#2019-03-03#2019-03-04#2019-03-05", "2019-02-12 03:03:34#2019-02-12 03:03:38#2019-02-12 03:03:41#2019-02-12 03:12:34" }; writer.write(row2); } writer.close(); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } return name.endsWith("carbondata"); } }); if (dataFiles == null || dataFiles.length < 1) { throw new RuntimeException("Carbon data file not exists."); } Schema schema = CarbonSchemaReader.readSchema(dataFiles[0].getAbsolutePath()).asOriginOrder(); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } // Read data CarbonReader reader = CarbonReader.builder(path).projection(strings).build(); Object[] arrDate = new Object[] { "2019-03-02", "2019-03-03", "2019-03-04", "2019-03-05" }; Object[] arrTimestamp = new Object[] { "2019-02-12 03:03:34", "2019-02-12 03:03:38", "2019-02-12 03:03:41", "2019-02-12 03:12:34" }; int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (row[0].equals("robot" + i)); assert (row[2].equals("2019-03-02")); assert (row[3].equals("2019-02-12 03:03:34")); Assert.assertArrayEquals(arrDate, (Object[]) row[5]); Assert.assertArrayEquals(arrTimestamp, (Object[]) row[6]); i++; } Assert.assertEquals(i, 10); reader.close(); FileUtils.deleteDirectory(new File(path)); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testReadDateAndTimestampColumnInStruct() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[3]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); ArrayList<StructField> structFields = new ArrayList<>(); structFields.add(new StructField("dateField", DataTypes.DATE)); structFields.add(new StructField("timestampField", DataTypes.TIMESTAMP)); fields[2] = new Field("structField", DataTypes.createStructType(structFields)); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder().outputPath(path).withLoadOptions(map) .withCsvInput(new Schema(fields)).writtenBy("CarbonReaderTest").build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[] { "robot" + (i % 10), String.valueOf(i % 10000), "2019-03-02#2019-02-12 03:12:34" }; writer.write(row2); } writer.close(); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } return name.endsWith("carbondata"); } }); if (dataFiles == null || dataFiles.length < 1) { throw new RuntimeException("Carbon data file not exists."); } Schema schema = CarbonSchemaReader.readSchema(dataFiles[0].getAbsolutePath()).asOriginOrder(); // Transform the schema String[] strings = new String[schema.getFields().length]; for (int i = 0; i < schema.getFields().length; i++) { strings[i] = (schema.getFields())[i].getFieldName(); } // Read data CarbonReader reader = CarbonReader.builder(path).projection(strings).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (row[0].equals("robot" + i)); Object[] arr = (Object[]) row[2]; assert (arr[0].equals("2019-03-02")); assert (arr[1].equals("2019-02-12 03:12:34")); i++; } Assert.assertEquals(i, 10); reader.close(); FileUtils.deleteDirectory(new File(path)); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testReadingDateAndTimestampColumnInArrayOfStruct() throws IOException { String path = "./testWriteFilesArrayStruct"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[4]; fields[0] = new Field("id", DataTypes.STRING); fields[1] = new Field("source", DataTypes.STRING); fields[2] = new Field("usage", DataTypes.STRING); List<StructField> structFieldsList = new ArrayList<>(); structFieldsList.add(new StructField("name", DataTypes.STRING)); structFieldsList.add(new StructField("type", DataTypes.STRING)); structFieldsList.add(new StructField("creation-date", DataTypes.DATE)); structFieldsList.add(new StructField("creation-timestamp", DataTypes.TIMESTAMP)); StructField structTypeByList = new StructField("annotation", DataTypes.createStructType(structFieldsList), structFieldsList); List<StructField> list = new ArrayList<>(); list.add(structTypeByList); Field arrayType = new Field("annotations", "array", list); fields[3] = arrayType; try { CarbonWriter writer = CarbonWriter.builder().outputPath(path).withCsvInput(new Schema(fields)) .writtenBy("complexTest").build(); for (int i = 0; i < 15; i++) { String[] row = new String[] { "robot" + i, String.valueOf(i), i + "." + i, "sunflowers" + (i % 10) + "\002" + "modelarts/image_classification" + "\002" + "2019-03-30" + "\002" + "2019-03-30 17:22:31" + "\001" + "roses" + (i % 10) + "\002" + "modelarts/image_classification" + "\002" + "2019-03-30" + "\002" + "2019-03-30 17:22:31" }; writer.write(row); } writer.close(); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } Schema schema = CarbonSchemaReader.readSchema(path).asOriginOrder(); assert (4 == schema.getFieldsLength()); CarbonReader reader = null; try { reader = CarbonReader.builder(path) .projection(new String[] { "id", "source", "usage", "annotations" }).build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); assert (4 == row.length); assert (row[0].equals("robot" + i)); int value = Integer.valueOf((String) row[1]); Float value2 = Float.valueOf((String) row[2]); assert (value > -1 || value < 15); assert (value2 > -1 || value2 < 15); Object[] annotations = (Object[]) row[3]; for (int j = 0; j < annotations.length; j++) { Object[] annotation = (Object[]) annotations[j]; assert (((String) annotation[0]).contains("sunflowers") || ((String) annotation[0]) .contains("roses")); assert (((String) annotation[1]).contains("modelarts/image_classification")); assert (annotation[2].equals("2019-03-30")); assert (annotation[3].equals("2019-03-30 17:22:31")); } i++; } assert (15 == i); reader.close(); } catch (InterruptedException e) { e.printStackTrace(); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testReadNextRowWithRowUtil() { String path = "./carbondata"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[12]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[11] = new Field("floatField", DataTypes.FLOAT); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "Hello#World#From#Carbon", "1.23" }; writer.write(row2); } writer.close(); File[] dataFiles = new File(path).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } return name.endsWith("carbonindex"); } }); if (dataFiles == null || dataFiles.length < 1) { throw new RuntimeException("Carbon index file not exists."); } Schema schema = CarbonSchemaReader .readSchema(dataFiles[0].getAbsolutePath()) .asOriginOrder(); // Transform the schema int count = 0; for (int i = 0; i < schema.getFields().length; i++) { if (!((schema.getFields())[i].getFieldName().contains("."))) { count++; } } String[] strings = new String[count]; int index = 0; for (int i = 0; i < schema.getFields().length; i++) { if (!((schema.getFields())[i].getFieldName().contains("."))) { strings[index] = (schema.getFields())[i].getFieldName(); index++; } } // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(strings) .build(); int i = 0; while (reader.hasNext()) { Object[] data = (Object[]) reader.readNextRow(); assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getShort(data, 1), i); assertEquals(RowUtil.getInt(data, 2), i); assertEquals(RowUtil.getLong(data, 3), Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 4), ((double) i) / 2); assert (RowUtil.getBoolean(data, 5)); assertEquals(RowUtil.getString(data, 6), "2019-03-02"); assert (RowUtil.getDecimal(data, 8).equals("12.35")); assert (RowUtil.getVarchar(data, 9).equals("varchar")); Object[] arr = RowUtil.getArray(data, 10); assert (arr[0].equals("Hello")); assert (arr[1].equals("World")); assert (arr[2].equals("From")); assert (arr[3].equals("Carbon")); assertEquals(RowUtil.getFloat(data, 11), (float) 1.23); i++; } reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } @Test public void testReadNextRowWithProjectionAndRowUtil() { String path = "./carbondata"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[12]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[11] = new Field("floatField", DataTypes.FLOAT); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "Hello#World#From#Carbon", "1.23" }; writer.write(row2); } writer.close(); // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .withRowRecordReader() .build(); int i = 0; while (reader.hasNext()) { Object[] data = (Object[]) reader.readNextRow(); assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getString(data, 1), "2019-03-02"); assert (RowUtil.getVarchar(data, 3).equals("varchar")); Object[] arr = RowUtil.getArray(data, 4); assert (arr[0].equals("Hello")); assert (arr[1].equals("World")); assert (arr[2].equals("From")); assert (arr[3].equals("Carbon")); assertEquals(RowUtil.getShort(data, 5), i); assertEquals(RowUtil.getInt(data, 6), i); assertEquals(RowUtil.getLong(data, 7), Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 8), ((double) i) / 2); assert (RowUtil.getBoolean(data, 9)); assert (RowUtil.getDecimal(data, 10).equals("12.35")); assertEquals(RowUtil.getFloat(data, 11), (float) 1.23); i++; } assert (i == 10); reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } @Test public void testVectorReader() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[12]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("byteField", DataTypes.BYTE); fields[11] = new Field("floatField", DataTypes.FLOAT); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 10; i++) { String[] row2 = new String[]{ "robot" + (i % 10), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", String.valueOf(i), "1.23" }; writer.write(row2); } writer.close(); // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .build(); int i = 0; while (reader.hasNext()) { Object[] data = (Object[]) reader.readNextRow(); assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getShort(data, 4), i); assertEquals(RowUtil.getInt(data, 5), i); assert (RowUtil.getLong(data, 6) == Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 7), ((double) i) / 2); assert (RowUtil.getBoolean(data, 8)); assertEquals(RowUtil.getString(data, 1), "2019-03-02"); assert (RowUtil.getDecimal(data, 9).equals("12.35")); assert (RowUtil.getString(data, 3).equals("varchar")); assertEquals(RowUtil.getByte(data, 10), new Byte(String.valueOf(i))); assertEquals(RowUtil.getFloat(data, 11), new Float("1.23")); i++; } assert (i == 10); reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } @Test public void testReadNextBatchRow() { String path = "./carbondata"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[12]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[11] = new Field("floatField", DataTypes.FLOAT); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 300; i++) { String[] row2 = new String[]{ "robot" + (i % 10000), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "Hello#World#From#Carbon", "1.23" }; writer.write(row2); } writer.close(); // Read data int batchSize = 150; CarbonReader reader = CarbonReader .builder(path, "_temp") .withBatch(batchSize) .build(); int i = 0; while (reader.hasNext()) { Object[] batch = reader.readNextBatchRow(); Assert.assertTrue(batch.length <= batchSize); for (int j = 0; j < batch.length; j++) { Object[] data = (Object[]) batch[j]; assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getString(data, 1), "2019-03-02"); assert (RowUtil.getVarchar(data, 3).equals("varchar")); Object[] arr = RowUtil.getArray(data, 4); assert (arr[0].equals("Hello")); assert (arr[1].equals("World")); assert (arr[2].equals("From")); assert (arr[3].equals("Carbon")); assertEquals(RowUtil.getShort(data, 5), i); assertEquals(RowUtil.getInt(data, 6), i); assertEquals(RowUtil.getLong(data, 7), Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 8), ((double) i) / 2); assert (RowUtil.getBoolean(data, 9)); assert (RowUtil.getDecimal(data, 10).equals("12.35")); assertEquals(RowUtil.getFloat(data, 11), (float) 1.23); i++; } System.out.println("batch is " + i); } reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); } } } @Test public void testReadNextBatchRowWithVectorReader() { String path = "./carbondata"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[11]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); // Vector don't support complex data type // fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[10] = new Field("floatField", DataTypes.FLOAT); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 300; i++) { String[] row2 = new String[]{ "robot" + (i % 10000), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "1.23" }; writer.write(row2); } writer.close(); // Read data int batchSize = 150; CarbonReader reader = CarbonReader .builder(path, "_temp") .withBatch(batchSize) .build(); int i = 0; while (reader.hasNext()) { Object[] batch = reader.readNextBatchRow(); Assert.assertTrue(batch.length <= batchSize); for (int j = 0; j < batch.length; j++) { Object[] data = (Object[]) batch[j]; assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getString(data, 1), "2019-03-02"); assert (RowUtil.getVarchar(data, 3).equals("varchar")); assertEquals(RowUtil.getShort(data, 4), i); assertEquals(RowUtil.getInt(data, 5), i); assertEquals(RowUtil.getLong(data, 6), Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 7), ((double) i) / 2); assert (RowUtil.getDecimal(data, 9).equals("12.35")); assertEquals(RowUtil.getFloat(data, 10), (float) 1.23); i++; } System.out.println("batch is " + i); } reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); } } } @Test public void testReadingNullValues() { String path = "./testWriteFiles"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("booleanField", DataTypes.BOOLEAN); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); for (int i = 0; i < 2; i++) { String[] row2 = new String[]{ "robot" + (i % 10), "", }; writer.write(row2); } writer.close(); // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .build(); int i = 0; while (reader.hasNext()) { reader.readNextRow(); i++; } assert (i == 2); reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } @Test public void testSdkWriteWhenArrayOfStringIsEmpty() throws IOException, InvalidLoadOptionException { String badRecordAction = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION); CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL"); String path = "./testSdkWriteWhenArrayOfStringIsEmpty"; String[] rec = {"aaa", "bbb", "<EMAIL>", "", "", "mmm", ""}; Field[] fields = new Field[7]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); fields[2] = new Field("stringField1", DataTypes.STRING); fields[3] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[4] = new Field("arrayField1", DataTypes.createArrayType(DataTypes.STRING)); fields[5] = new Field("arrayField2", DataTypes.createArrayType(DataTypes.STRING)); fields[6] = new Field("varcharField1", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("complex_delimiter_level_1", "#"); map.put("bad_records_logger_enable", "TRUE"); map.put("bad_record_path", path + "/badrec"); CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path); builder.withLoadOptions(map).withCsvInput(schema).enableLocalDictionary(false) .writtenBy("CarbonReaderTest"); CarbonWriter writer = builder.build(); writer.write(rec); writer.close(); CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction); FileUtils.deleteDirectory(new File(path)); } @Test public void testValidateBadRecordsActionWithImproperValue() throws IOException { String path = "./testValidateBadRecordsActionValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("BAD_RECORDS_ACTION", "FAL"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); Assert.fail(); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage().contains("option BAD_RECORDS_ACTION can have only either " + "FORCE or IGNORE or REDIRECT or FAIL. It shouldn't be FAL")); } catch (Exception e) { Assert.fail(); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateBadRecordsActionWithProperValue() throws IOException { String path = "./testValidateBadRecordsActionValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("BAD_RECORDS_ACTION", "FAIL"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); } catch (IllegalArgumentException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateBadRecordsLoggerEnableWithImproperValue() throws IOException { String path = "./testValidateBadRecordsLoggerEnableValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("bad_records_logger_enable", "FLSE"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); Assert.fail(); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage().contains( "Invalid value FLSE for key bad_records_logger_enable")); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateBadRecordsLoggerEnableWithProperValue() throws IOException { String path = "./testValidateBadRecordsLoggerEnableValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("bad_records_logger_enable", "FALSE"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); } catch (IllegalArgumentException e) { e.printStackTrace(); Assert.fail(); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateQuoteCharWithImproperValue() throws IOException { String path = "./testValidateQuoteCharWithImproperValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("quotechar", "##"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); Assert.fail(); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage().contains( "QUOTECHAR cannot be more than one character.")); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateQuoteCharWithProperValue() throws IOException { String path = "./testValidateQuoteCharWithProperValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("quotechar", "#"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); } catch (IllegalArgumentException e) { e.printStackTrace(); Assert.fail(); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateEscapeCharWithImproperValue() throws IOException { String path = "./testValidateEscapeCharWithImproperValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("escapechar", "##"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); Assert.fail(); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage().contains( "ESCAPECHAR cannot be more than one character.")); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testValidateEscapeCharWithProperValue() throws IOException { String path = "./testValidateEscapeCharWithProperValue"; Field[] fields = new Field[2]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("varcharField", DataTypes.VARCHAR); Schema schema = new Schema(fields); Map map = new HashMap(); map.put("escapechar", "#"); try { CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(schema) .enableLocalDictionary(false) .writtenBy("CarbonReaderTest") .build(); } catch (IllegalArgumentException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } catch (Exception e) { Assert.fail(e.getMessage()); } finally { FileUtils.deleteDirectory(new File(path)); } } @Test public void testWriteWithDifferentDataType() { String path = "./carbondata"; try { FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[13]; fields[0] = new Field("stringField", DataTypes.STRING); fields[1] = new Field("shortField", DataTypes.SHORT); fields[2] = new Field("intField", DataTypes.INT); fields[3] = new Field("longField", DataTypes.LONG); fields[4] = new Field("doubleField", DataTypes.DOUBLE); fields[5] = new Field("boolField", DataTypes.BOOLEAN); fields[6] = new Field("dateField", DataTypes.DATE); fields[7] = new Field("timeField", DataTypes.TIMESTAMP); fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2)); fields[9] = new Field("varcharField", DataTypes.VARCHAR); fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING)); fields[11] = new Field("floatField", DataTypes.FLOAT); fields[12] = new Field("binaryField", DataTypes.BINARY); Map<String, String> map = new HashMap<>(); map.put("complex_delimiter_level_1", "#"); CarbonWriter writer = CarbonWriter.builder() .outputPath(path) .withLoadOptions(map) .withCsvInput(new Schema(fields)) .writtenBy("CarbonReaderTest") .build(); byte[] value = "Binary".getBytes(); for (int i = 0; i < 10; i++) { Object[] row2 = new Object[]{ "robot" + (i % 10), i % 10000, i, (Long.MAX_VALUE - i), ((double) i / 2), (true), "2019-03-02", "2019-02-12 03:03:34", 12.345, "varchar", "Hello#World#From#Carbon", 1.23, value }; writer.write(row2); } writer.close(); // Read data CarbonReader reader = CarbonReader .builder(path, "_temp") .withRowRecordReader() .build(); int i = 0; while (reader.hasNext()) { Object[] data = (Object[]) reader.readNextRow(); assert (RowUtil.getString(data, 0).equals("robot" + i)); assertEquals(RowUtil.getString(data, 1), "2019-03-02"); Assert.assertEquals(new String(value), new String(RowUtil.getBinary(data, 3))); assert (RowUtil.getVarchar(data, 4).equals("varchar")); Object[] arr = RowUtil.getArray(data, 5); assert (arr[0].equals("Hello")); assert (arr[1].equals("World")); assert (arr[2].equals("From")); assert (arr[3].equals("Carbon")); assertEquals(RowUtil.getShort(data, 6), i); assertEquals(RowUtil.getInt(data, 7), i); assertEquals(RowUtil.getLong(data, 8), Long.MAX_VALUE - i); assertEquals(RowUtil.getDouble(data, 9), ((double) i) / 2); assert (RowUtil.getBoolean(data, 10)); assert (RowUtil.getDecimal(data, 11).equals("12.35")); assertEquals(RowUtil.getFloat(data, 12), (float) 1.23); i++; } assert (i == 10); reader.close(); } catch (Throwable e) { e.printStackTrace(); Assert.fail(e.getMessage()); } finally { try { FileUtils.deleteDirectory(new File(path)); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } @Test public void testReadBlocklet() throws IOException, InterruptedException { String path = "./testWriteFiles/" + System.nanoTime(); FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(1000 * 1000, new Schema(fields), path, null, 1, 100); InputSplit[] splits = CarbonReader.builder(path).getSplits(true); // check for 3 blocklet count (as only one carbon file will be created) Assert.assertEquals(splits.length, 3); int totalCount = 0; for (int k = 0; k < splits.length; k++) { CarbonReader reader = CarbonReader .builder(splits[k]) .build(); int i = 0; while (reader.hasNext()) { Object[] row = (Object[]) reader.readNextRow(); i++; } totalCount += i; reader.close(); } Assert.assertEquals(totalCount, 1000000); FileUtils.deleteDirectory(new File(path)); } @Test public void testGetSplits() throws IOException, InterruptedException { String path = "./testWriteFiles/" + System.nanoTime(); FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(1000 * 1000, new Schema(fields), path, null, 1, 100); InputSplit[] splits = CarbonReader.builder(path).getSplits(true); // check for 3 blocklet count (as only one carbon file will be created) Assert.assertEquals(splits.length, 3); InputSplit[] splits1 = CarbonReader.builder(path).getSplits(false); // check for 1 block count (as only one carbon file will be created) Assert.assertEquals(splits1.length, 1); FileUtils.deleteDirectory(new File(path)); } @Test public void testReadWithFilterNonResult() throws IOException, InterruptedException { String path = "./testWriteFiles"; FileUtils.deleteDirectory(new File(path)); Field[] fields = new Field[2]; fields[0] = new Field("name", DataTypes.STRING); fields[1] = new Field("age", DataTypes.INT); TestUtil.writeFilesAndVerify(200, new Schema(fields), path); ColumnExpression columnExpression = new ColumnExpression("age", DataTypes.INT); EqualToExpression equalToExpression = new EqualToExpression(columnExpression, new LiteralExpression("-11", DataTypes.INT)); CarbonReader reader = CarbonReader .builder(path, "_temp") .projection(new String[]{"name", "age"}) .filter(equalToExpression) .build(); int i = 0; while (reader.hasNext()) { Assert.assertTrue(false); i++; } Assert.assertEquals(i, 0); reader.close(); FileUtils.deleteDirectory(new File(path)); } }
44,580
488
<reponame>mtasaka/ox<filename>ext/ox/cache.h /* cache.h * Copyright (c) 2011, <NAME> * All rights reserved. */ #ifndef OX_CACHE_H #define OX_CACHE_H #include "ruby.h" typedef struct _cache *Cache; extern void ox_cache_new(Cache *cache); extern VALUE ox_cache_get(Cache cache, const char *key, VALUE **slot, const char **keyp); extern void ox_cache_print(Cache cache); #endif /* OX_CACHE_H */
177
792
/* * Copyright (c) Facebook, Inc. and its affiliates. * All rights reserved. * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. */ #pragma once #include <cstdint> // for std::int32_t #include "fbgemm/FbgemmBuild.h" namespace fbgemm { /** * @brief Sum a given vector. */ FBGEMM_API std::int32_t reduceAvx2(const std::uint8_t* A, int len); /** * @brief Transpose 8 rows from source matrix. */ void transpose_8rows( int N, const uint8_t* src, int ld_src, uint8_t* dst, int ld_dst); /** * @brief avx2 part of the spmdm code. */ void spmdmKernelAvx2( int N, const uint8_t* A_buffer, const int32_t* colptr, const int8_t* values, const int16_t* rowidx, int32_t* C_buffer); } // namespace fbgemm
340
956
<gh_stars>100-1000 /* SPDX-License-Identifier: BSD-3-Clause * Copyright 2020 Mellanox Technologies, Ltd */ #ifndef RTE_PMD_MLX5_REGEX_RXP_H_ #define RTE_PMD_MLX5_REGEX_RXP_H_ #define MLX5_RXP_MAX_JOB_LENGTH 16384 #define MLX5_RXP_MAX_SUBSETS 4095 #define MLX5_RXP_CSR_NUM_ENTRIES 31 #define MLX5_RXP_CTRL_TYPE_MASK 7 #define MLX5_RXP_CTRL_TYPE_JOB_DESCRIPTOR 0 #define MLX5_RXP_CTRL_TYPE_RESPONSE_DESCRIPTOR 1 #define MLX5_RXP_CTRL_TYPE_MEMORY_WRITE 4 #define MLX5_RXP_CSR_CTRL_DISABLE_L2C (1 << 7) #define MLX5_RXP_CTRL_JOB_DESC_SOF 0x0010 #define MLX5_RXP_CTRL_JOB_DESC_EOF 0x0020 #define MLX5_RXP_CTRL_JOB_DESC_HPM_ENABLE 0x0100 #define MLX5_RXP_CTRL_JOB_DESC_ANYMATCH_ENABLE 0x0200 #define MLX5_RXP_CTRL_JOB_DESC_FLAGS (MLX5_RXP_CTRL_JOB_DESC_SOF | \ MLX5_RXP_CTRL_JOB_DESC_EOF | \ MLX5_RXP_CTRL_JOB_DESC_HPM_ENABLE | \ MLX5_RXP_CTRL_JOB_DESC_ANYMATCH_ENABLE) #define MLX5_RXP_CTRL_VALID 0x8000 #define MLX5_RXP_RESP_STATUS_MAX_PRI_THREADS (1 << 3) #define MLX5_RXP_RESP_STATUS_MAX_SEC_THREADS (1 << 4) #define MLX5_RXP_RESP_STATUS_MAX_LATENCY (1 << 5) #define MLX5_RXP_RESP_STATUS_MAX_MATCH (1 << 6) #define MLX5_RXP_RESP_STATUS_MAX_PREFIX (1 << 7) #define MLX5_RXP_RESP_STATUS_HPM (1 << 8) #define MLX5_RXP_RESP_STATUS_ANYMATCH (1 << 9) #define MLX5_RXP_RESP_STATUS_PMI_SOJ (1 << 13) #define MLX5_RXP_RESP_STATUS_PMI_EOJ (1 << 14) /* This describes the header the RXP expects for any search data. */ struct mlx5_rxp_job_desc { uint32_t job_id; uint16_t ctrl; uint16_t len; uint16_t subset[4]; } __rte_packed; struct mlx5_rxp_response_desc { uint32_t job_id; uint16_t status; uint8_t detected_match_count; uint8_t match_count; uint16_t primary_thread_count; uint16_t instruction_count; uint16_t latency_count; uint16_t pmi_min_byte_ptr; } __rte_packed; struct mlx5_rxp_match_tuple { uint32_t rule_id; uint16_t start_ptr; uint16_t length; } __rte_packed; struct mlx5_rxp_response { struct mlx5_rxp_response_desc header; struct mlx5_rxp_match_tuple matches[0]; }; #define MLX5_RXP_MAX_MATCHES 254 #define MLX5_RXP_CTL_RULES_PGM 1 #define MLX5_RXP_CTL_RULES_PGM_INCR 2 #define MLX5_RXP_ROF_ENTRY_INST 0 #define MLX5_RXP_ROF_ENTRY_EQ 1 #define MLX5_RXP_ROF_ENTRY_GTE 2 #define MLX5_RXP_ROF_ENTRY_LTE 3 #define MLX5_RXP_ROF_ENTRY_CHECKSUM 4 #define MLX5_RXP_ROF_ENTRY_CHECKSUM_EX_EM 5 #define MLX5_RXP_ROF_ENTRY_IM 6 #define MLX5_RXP_ROF_ENTRY_EM 7 #define MLX5_RXP_ROF_ENTRY_TYPE_MAX 7 #define MLX5_RXP_INST_OFFSET 3 #define MLX5_RXP_INST_BLOCK_SIZE 8 #define MLX5_MAX_SIZE_RES_DES (sizeof(struct mlx5_rxp_response_desc)) #define MLX5_MAX_DB_SIZE (1u << 27u) #define MLX5_MAX_SIZE_MATCH_RESP (254 * sizeof(struct mlx5_rxp_match_tuple)) #define MLX5_RXP_SQ_NOT_BUSY false #define MLX5_RXP_SQ_BUSY true struct mlx5_rxp_ctl_hdr { uint16_t cmd; uint32_t len; }; struct mlx5_rxp_rof_entry { uint8_t type; uint32_t addr; uint64_t value; }; struct mlx5_rxp_rof { uint32_t rof_version; char *timestamp; char *rxp_compiler_version; uint32_t rof_revision; uint32_t number_of_entries; struct mlx5_rxp_rof_entry *rof_entries; }; struct mlx5_rxp_ctl_rules_pgm { struct mlx5_rxp_ctl_hdr hdr; uint32_t count; struct mlx5_rxp_rof_entry rules[0]; } __rte_packed; /* RXP programming mode setting. */ enum mlx5_rxp_program_mode { MLX5_RXP_MODE_NOT_DEFINED = 0, MLX5_RXP_SHARED_PROG_MODE, MLX5_RXP_PRIVATE_PROG_MODE, }; #define MLX5_RXP_POLL_CSR_FOR_VALUE_TIMEOUT 3000 /* Poll timeout in ms. */ #define MLX5_RXP_INITIALIZATION_TIMEOUT 60000 /* Initialize timeout in ms. */ #define MLX5_RXP_MAX_ENGINES 2u /* Number of RXP engines. */ #define MLX5_RXP_EM_COUNT 1u /* Extra External Memories to use. */ #define MLX5_RXP_DB_NOT_ASSIGNED 0xFF struct mlx5_regex_umem { struct mlx5dv_devx_umem *umem; uint32_t id; uint64_t offset; }; #endif /* RTE_PMD_MLX5_REGEX_RXP_H_ */
1,941
1,754
{ "branch": "master", "files": [ "cover.html", "praise.html", "titlepage.html", "copyright.html", "toc.html", "foreword.asciidoc", "preface.asciidoc", "ch01.asciidoc", "ch02.asciidoc", "ch03.asciidoc", "ch04.asciidoc", "ch05.asciidoc", "ch06.asciidoc", "ch07.asciidoc", "ch08.asciidoc", "ch09.asciidoc", "ix.html", "author_bio.html", "colo.html" ], "formats": { "pdf": { "version": "print", "toc": true, "index": true, "syntaxhighlighting": true, "show_comments": false, "color_count": "1", "trim_size": "6inx9in", "antennahouse_version": "AHFormatterV62_64-MR4" }, "epub": { "toc": true, "index": true, "syntaxhighlighting": true, "epubcheck": true, "show_comments": false, "downsample_images": false }, "mobi": { "toc": true, "index": true, "syntaxhighlighting": true, "show_comments": false, "downsample_images": false }, "html": { "toc": true, "index": true, "syntaxhighlighting": true, "show_comments": false, "consolidated": false } }, "theme": "oreillymedia/animal_theme_sass", "title": "Practical Modern JavaScript", "export_formats": [ "html", "pdf" ], "name": "oreillymedia/practical-modern-javascript", "templating": false, "print_isbn13": "9781491943533", "lang": "en", "accent_color": "cmyk(100%, 3%, 50%, 0%)" }
748
648
{"resourceType":"DataElement","id":"CarePlan.goal","meta":{"lastUpdated":"2017-04-19T07:44:43.294+10:00"},"url":"http://hl7.org/fhir/DataElement/CarePlan.goal","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"id":"CarePlan.goal","path":"CarePlan.goal","short":"Desired outcome of plan","definition":"Describes the intended objective(s) of carrying out the care plan.","comment":"Goal can be achieving a particular change or merely maintaining a current state or even slowing a decline.","requirements":"Provides context for plan. Allows plan effectiveness to be evaluated by clinicians.","min":0,"max":"*","type":[{"code":"Reference","targetProfile":"http://hl7.org/fhir/StructureDefinition/Goal"}],"mapping":[{"identity":"v2","map":"GOL.1"},{"identity":"rim","map":".outboundRelationship[typeCode<=OBJ]."}]}]}
228
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_MEDIATOR_H_ #define IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_MEDIATOR_H_ #import <Foundation/Foundation.h> #include <memory> #include "base/macros.h" #include "base/observer_list.h" #import "ios/chrome/browser/ui/fullscreen/fullscreen_animator.h" #import "ios/chrome/browser/ui/fullscreen/fullscreen_model_observer.h" class FullscreenController; class FullscreenControllerObserver; @class FullscreenResetAnimator; @class FullscreenScrollEndAnimator; @class FullscreenScrollToTopAnimator; @class ToolbarRevealAnimator; // A helper object that listens to FullscreenModel changes and forwards this // information to FullscreenControllerObservers. class FullscreenMediator : public FullscreenModelObserver { public: FullscreenMediator(FullscreenController* controller, FullscreenModel* model); ~FullscreenMediator() override; // Adds and removes FullscreenControllerObservers. void AddObserver(FullscreenControllerObserver* observer) { observers_.AddObserver(observer); } void RemoveObserver(FullscreenControllerObserver* observer) { observers_.RemoveObserver(observer); } // Instructs the mediator that a scroll-to-top animation has been triggered. void ScrollToTop(); // Instructs the mediator that the app will be foregrounded. void WillEnterForeground(); // Resets the model while animating changes. void AnimateModelReset(); // Instructs the mediator to stop observing its model. void Disconnect(); private: // FullscreenModelObserver: void FullscreenModelProgressUpdated(FullscreenModel* model) override; void FullscreenModelEnabledStateChanged(FullscreenModel* model) override; void FullscreenModelScrollEventStarted(FullscreenModel* model) override; void FullscreenModelScrollEventEnded(FullscreenModel* model) override; void FullscreenModelWasReset(FullscreenModel* model) override; // Sets up |animator_| with |style|. void SetUpAnimator(FullscreenAnimatorStyle style); // Starts |animator+| if it has animations to run. |animator_| will be reset // if no animations have been added. void StartAnimator(); // Stops the current scroll end animation if one is in progress. If // |update_model| is true, the FullscreenModel will be updated with the active // animator's current progress value. void StopAnimating(bool update_model); // The controller. FullscreenController* controller_ = nullptr; // The model. FullscreenModel* model_ = nullptr; // The active animator. __strong FullscreenAnimator* animator_ = nil; // The FullscreenControllerObservers that need to get notified of model // changes. base::ObserverList<FullscreenControllerObserver> observers_; DISALLOW_COPY_AND_ASSIGN(FullscreenMediator); }; #endif // IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_MEDIATOR_H_
893
981
<gh_stars>100-1000 #if !(defined(GO) && defined(GOM) && defined(GO2) && defined(DATA)) #error meh! #endif //GO(xcb_glx_are_textures_resident, //GO(xcb_glx_are_textures_resident_data, //GO(xcb_glx_are_textures_resident_data_end, //GO(xcb_glx_are_textures_resident_data_length, //GO(xcb_glx_are_textures_resident_reply, //GO(xcb_glx_are_textures_resident_sizeof, //GO(xcb_glx_are_textures_resident_unchecked, //GO(xcb_glx_bool32_end, //GO(xcb_glx_bool32_next, //GO(xcb_glx_change_drawable_attributes, //GO(xcb_glx_change_drawable_attributes_attribs, //GO(xcb_glx_change_drawable_attributes_attribs_end, //GO(xcb_glx_change_drawable_attributes_attribs_length, //GO(xcb_glx_change_drawable_attributes_checked, //GO(xcb_glx_change_drawable_attributes_sizeof, //GO(xcb_glx_client_info, //GO(xcb_glx_client_info_checked, //GO(xcb_glx_client_info_sizeof, //GO(xcb_glx_client_info_string, //GO(xcb_glx_client_info_string_end, //GO(xcb_glx_client_info_string_length, //GO(xcb_glx_context_end, //GO(xcb_glx_context_next, //GO(xcb_glx_context_tag_end, //GO(xcb_glx_context_tag_next, //GO(xcb_glx_copy_context, //GO(xcb_glx_copy_context_checked, //GO(xcb_glx_create_context, //GO(xcb_glx_create_context_attribs_arb, //GO(xcb_glx_create_context_attribs_arb_attribs, //GO(xcb_glx_create_context_attribs_arb_attribs_end, //GO(xcb_glx_create_context_attribs_arb_attribs_length, //GO(xcb_glx_create_context_attribs_arb_checked, //GO(xcb_glx_create_context_attribs_arb_sizeof, //GO(xcb_glx_create_context_checked, //GO(xcb_glx_create_glx_pixmap, //GO(xcb_glx_create_glx_pixmap_checked, //GO(xcb_glx_create_new_context, //GO(xcb_glx_create_new_context_checked, //GO(xcb_glx_create_pbuffer, //GO(xcb_glx_create_pbuffer_attribs, //GO(xcb_glx_create_pbuffer_attribs_end, //GO(xcb_glx_create_pbuffer_attribs_length, //GO(xcb_glx_create_pbuffer_checked, //GO(xcb_glx_create_pbuffer_sizeof, //GO(xcb_glx_create_pixmap, //GO(xcb_glx_create_pixmap_attribs, //GO(xcb_glx_create_pixmap_attribs_end, //GO(xcb_glx_create_pixmap_attribs_length, //GO(xcb_glx_create_pixmap_checked, //GO(xcb_glx_create_pixmap_sizeof, //GO(xcb_glx_create_window, //GO(xcb_glx_create_window_attribs, //GO(xcb_glx_create_window_attribs_end, //GO(xcb_glx_create_window_attribs_length, //GO(xcb_glx_create_window_checked, //GO(xcb_glx_create_window_sizeof, //GO(xcb_glx_delete_lists, //GO(xcb_glx_delete_lists_checked, //GO(xcb_glx_delete_queries_arb, //GO(xcb_glx_delete_queries_arb_checked, //GO(xcb_glx_delete_queries_arb_ids, //GO(xcb_glx_delete_queries_arb_ids_end, //GO(xcb_glx_delete_queries_arb_ids_length, //GO(xcb_glx_delete_queries_arb_sizeof, //GO(xcb_glx_delete_textures, //GO(xcb_glx_delete_textures_checked, //GO(xcb_glx_delete_textures_sizeof, //GO(xcb_glx_delete_textures_textures, //GO(xcb_glx_delete_textures_textures_end, //GO(xcb_glx_delete_textures_textures_length, //GO(xcb_glx_delete_window, //GO(xcb_glx_delete_window_checked, //GO(xcb_glx_destroy_context, //GO(xcb_glx_destroy_context_checked, //GO(xcb_glx_destroy_glx_pixmap, //GO(xcb_glx_destroy_glx_pixmap_checked, //GO(xcb_glx_destroy_pbuffer, //GO(xcb_glx_destroy_pbuffer_checked, //GO(xcb_glx_destroy_pixmap, //GO(xcb_glx_destroy_pixmap_checked, //GO(xcb_glx_drawable_end, //GO(xcb_glx_drawable_next, //GO(xcb_glx_end_list, //GO(xcb_glx_end_list_checked, //GO(xcb_glx_fbconfig_end, //GO(xcb_glx_fbconfig_next, //GO(xcb_glx_feedback_buffer, //GO(xcb_glx_feedback_buffer_checked, //GO(xcb_glx_finish, //GO(xcb_glx_finish_reply, //GO(xcb_glx_finish_unchecked, //GO(xcb_glx_float32_end, //GO(xcb_glx_float32_next, //GO(xcb_glx_float64_end, //GO(xcb_glx_float64_next, //GO(xcb_glx_flush, //GO(xcb_glx_flush_checked, //GO(xcb_glx_gen_lists, //GO(xcb_glx_gen_lists_reply, //GO(xcb_glx_gen_lists_unchecked, //GO(xcb_glx_gen_queries_arb, //GO(xcb_glx_gen_queries_arb_data, //GO(xcb_glx_gen_queries_arb_data_end, //GO(xcb_glx_gen_queries_arb_data_length, //GO(xcb_glx_gen_queries_arb_reply, //GO(xcb_glx_gen_queries_arb_sizeof, //GO(xcb_glx_gen_queries_arb_unchecked, //GO(xcb_glx_gen_textures, //GO(xcb_glx_gen_textures_data, //GO(xcb_glx_gen_textures_data_end, //GO(xcb_glx_gen_textures_data_length, //GO(xcb_glx_gen_textures_reply, //GO(xcb_glx_gen_textures_sizeof, //GO(xcb_glx_gen_textures_unchecked, //GO(xcb_glx_get_booleanv, //GO(xcb_glx_get_booleanv_data, //GO(xcb_glx_get_booleanv_data_end, //GO(xcb_glx_get_booleanv_data_length, //GO(xcb_glx_get_booleanv_reply, //GO(xcb_glx_get_booleanv_sizeof, //GO(xcb_glx_get_booleanv_unchecked, //GO(xcb_glx_get_clip_plane, //GO(xcb_glx_get_clip_plane_data, //GO(xcb_glx_get_clip_plane_data_end, //GO(xcb_glx_get_clip_plane_data_length, //GO(xcb_glx_get_clip_plane_reply, //GO(xcb_glx_get_clip_plane_sizeof, //GO(xcb_glx_get_clip_plane_unchecked, //GO(xcb_glx_get_color_table, //GO(xcb_glx_get_color_table_data, //GO(xcb_glx_get_color_table_data_end, //GO(xcb_glx_get_color_table_data_length, //GO(xcb_glx_get_color_table_parameterfv, //GO(xcb_glx_get_color_table_parameterfv_data, //GO(xcb_glx_get_color_table_parameterfv_data_end, //GO(xcb_glx_get_color_table_parameterfv_data_length, //GO(xcb_glx_get_color_table_parameterfv_reply, //GO(xcb_glx_get_color_table_parameterfv_sizeof, //GO(xcb_glx_get_color_table_parameterfv_unchecked, //GO(xcb_glx_get_color_table_parameteriv, //GO(xcb_glx_get_color_table_parameteriv_data, //GO(xcb_glx_get_color_table_parameteriv_data_end, //GO(xcb_glx_get_color_table_parameteriv_data_length, //GO(xcb_glx_get_color_table_parameteriv_reply, //GO(xcb_glx_get_color_table_parameteriv_sizeof, //GO(xcb_glx_get_color_table_parameteriv_unchecked, //GO(xcb_glx_get_color_table_reply, //GO(xcb_glx_get_color_table_sizeof, //GO(xcb_glx_get_color_table_unchecked, //GO(xcb_glx_get_compressed_tex_image_arb, //GO(xcb_glx_get_compressed_tex_image_arb_data, //GO(xcb_glx_get_compressed_tex_image_arb_data_end, //GO(xcb_glx_get_compressed_tex_image_arb_data_length, //GO(xcb_glx_get_compressed_tex_image_arb_reply, //GO(xcb_glx_get_compressed_tex_image_arb_sizeof, //GO(xcb_glx_get_compressed_tex_image_arb_unchecked, //GO(xcb_glx_get_convolution_filter, //GO(xcb_glx_get_convolution_filter_data, //GO(xcb_glx_get_convolution_filter_data_end, //GO(xcb_glx_get_convolution_filter_data_length, //GO(xcb_glx_get_convolution_filter_reply, //GO(xcb_glx_get_convolution_filter_sizeof, //GO(xcb_glx_get_convolution_filter_unchecked, //GO(xcb_glx_get_convolution_parameterfv, //GO(xcb_glx_get_convolution_parameterfv_data, //GO(xcb_glx_get_convolution_parameterfv_data_end, //GO(xcb_glx_get_convolution_parameterfv_data_length, //GO(xcb_glx_get_convolution_parameterfv_reply, //GO(xcb_glx_get_convolution_parameterfv_sizeof, //GO(xcb_glx_get_convolution_parameterfv_unchecked, //GO(xcb_glx_get_convolution_parameteriv, //GO(xcb_glx_get_convolution_parameteriv_data, //GO(xcb_glx_get_convolution_parameteriv_data_end, //GO(xcb_glx_get_convolution_parameteriv_data_length, //GO(xcb_glx_get_convolution_parameteriv_reply, //GO(xcb_glx_get_convolution_parameteriv_sizeof, //GO(xcb_glx_get_convolution_parameteriv_unchecked, //GO(xcb_glx_get_doublev, //GO(xcb_glx_get_doublev_data, //GO(xcb_glx_get_doublev_data_end, //GO(xcb_glx_get_doublev_data_length, //GO(xcb_glx_get_doublev_reply, //GO(xcb_glx_get_doublev_sizeof, //GO(xcb_glx_get_doublev_unchecked, //GO(xcb_glx_get_drawable_attributes, //GO(xcb_glx_get_drawable_attributes_attribs, //GO(xcb_glx_get_drawable_attributes_attribs_end, //GO(xcb_glx_get_drawable_attributes_attribs_length, //GO(xcb_glx_get_drawable_attributes_reply, //GO(xcb_glx_get_drawable_attributes_sizeof, //GO(xcb_glx_get_drawable_attributes_unchecked, //GO(xcb_glx_get_error, //GO(xcb_glx_get_error_reply, //GO(xcb_glx_get_error_unchecked, //GO(xcb_glx_get_fb_configs, //GO(xcb_glx_get_fb_configs_property_list, //GO(xcb_glx_get_fb_configs_property_list_end, //GO(xcb_glx_get_fb_configs_property_list_length, //GO(xcb_glx_get_fb_configs_reply, //GO(xcb_glx_get_fb_configs_sizeof, //GO(xcb_glx_get_fb_configs_unchecked, //GO(xcb_glx_get_floatv, //GO(xcb_glx_get_floatv_data, //GO(xcb_glx_get_floatv_data_end, //GO(xcb_glx_get_floatv_data_length, //GO(xcb_glx_get_floatv_reply, //GO(xcb_glx_get_floatv_sizeof, //GO(xcb_glx_get_floatv_unchecked, //GO(xcb_glx_get_histogram, //GO(xcb_glx_get_histogram_data, //GO(xcb_glx_get_histogram_data_end, //GO(xcb_glx_get_histogram_data_length, //GO(xcb_glx_get_histogram_parameterfv, //GO(xcb_glx_get_histogram_parameterfv_data, //GO(xcb_glx_get_histogram_parameterfv_data_end, //GO(xcb_glx_get_histogram_parameterfv_data_length, //GO(xcb_glx_get_histogram_parameterfv_reply, //GO(xcb_glx_get_histogram_parameterfv_sizeof, //GO(xcb_glx_get_histogram_parameterfv_unchecked, //GO(xcb_glx_get_histogram_parameteriv, //GO(xcb_glx_get_histogram_parameteriv_data, //GO(xcb_glx_get_histogram_parameteriv_data_end, //GO(xcb_glx_get_histogram_parameteriv_data_length, //GO(xcb_glx_get_histogram_parameteriv_reply, //GO(xcb_glx_get_histogram_parameteriv_sizeof, //GO(xcb_glx_get_histogram_parameteriv_unchecked, //GO(xcb_glx_get_histogram_reply, //GO(xcb_glx_get_histogram_sizeof, //GO(xcb_glx_get_histogram_unchecked, //GO(xcb_glx_get_integerv, //GO(xcb_glx_get_integerv_data, //GO(xcb_glx_get_integerv_data_end, //GO(xcb_glx_get_integerv_data_length, //GO(xcb_glx_get_integerv_reply, //GO(xcb_glx_get_integerv_sizeof, //GO(xcb_glx_get_integerv_unchecked, //GO(xcb_glx_get_lightfv, //GO(xcb_glx_get_lightfv_data, //GO(xcb_glx_get_lightfv_data_end, //GO(xcb_glx_get_lightfv_data_length, //GO(xcb_glx_get_lightfv_reply, //GO(xcb_glx_get_lightfv_sizeof, //GO(xcb_glx_get_lightfv_unchecked, //GO(xcb_glx_get_lightiv, //GO(xcb_glx_get_lightiv_data, //GO(xcb_glx_get_lightiv_data_end, //GO(xcb_glx_get_lightiv_data_length, //GO(xcb_glx_get_lightiv_reply, //GO(xcb_glx_get_lightiv_sizeof, //GO(xcb_glx_get_lightiv_unchecked, //GO(xcb_glx_get_mapdv, //GO(xcb_glx_get_mapdv_data, //GO(xcb_glx_get_mapdv_data_end, //GO(xcb_glx_get_mapdv_data_length, //GO(xcb_glx_get_mapdv_reply, //GO(xcb_glx_get_mapdv_sizeof, //GO(xcb_glx_get_mapdv_unchecked, //GO(xcb_glx_get_mapfv, //GO(xcb_glx_get_mapfv_data, //GO(xcb_glx_get_mapfv_data_end, //GO(xcb_glx_get_mapfv_data_length, //GO(xcb_glx_get_mapfv_reply, //GO(xcb_glx_get_mapfv_sizeof, //GO(xcb_glx_get_mapfv_unchecked, //GO(xcb_glx_get_mapiv, //GO(xcb_glx_get_mapiv_data, //GO(xcb_glx_get_mapiv_data_end, //GO(xcb_glx_get_mapiv_data_length, //GO(xcb_glx_get_mapiv_reply, //GO(xcb_glx_get_mapiv_sizeof, //GO(xcb_glx_get_mapiv_unchecked, //GO(xcb_glx_get_materialfv, //GO(xcb_glx_get_materialfv_data, //GO(xcb_glx_get_materialfv_data_end, //GO(xcb_glx_get_materialfv_data_length, //GO(xcb_glx_get_materialfv_reply, //GO(xcb_glx_get_materialfv_sizeof, //GO(xcb_glx_get_materialfv_unchecked, //GO(xcb_glx_get_materialiv, //GO(xcb_glx_get_materialiv_data, //GO(xcb_glx_get_materialiv_data_end, //GO(xcb_glx_get_materialiv_data_length, //GO(xcb_glx_get_materialiv_reply, //GO(xcb_glx_get_materialiv_sizeof, //GO(xcb_glx_get_materialiv_unchecked, //GO(xcb_glx_get_minmax, //GO(xcb_glx_get_minmax_data, //GO(xcb_glx_get_minmax_data_end, //GO(xcb_glx_get_minmax_data_length, //GO(xcb_glx_get_minmax_parameterfv, //GO(xcb_glx_get_minmax_parameterfv_data, //GO(xcb_glx_get_minmax_parameterfv_data_end, //GO(xcb_glx_get_minmax_parameterfv_data_length, //GO(xcb_glx_get_minmax_parameterfv_reply, //GO(xcb_glx_get_minmax_parameterfv_sizeof, //GO(xcb_glx_get_minmax_parameterfv_unchecked, //GO(xcb_glx_get_minmax_parameteriv, //GO(xcb_glx_get_minmax_parameteriv_data, //GO(xcb_glx_get_minmax_parameteriv_data_end, //GO(xcb_glx_get_minmax_parameteriv_data_length, //GO(xcb_glx_get_minmax_parameteriv_reply, //GO(xcb_glx_get_minmax_parameteriv_sizeof, //GO(xcb_glx_get_minmax_parameteriv_unchecked, //GO(xcb_glx_get_minmax_reply, //GO(xcb_glx_get_minmax_sizeof, //GO(xcb_glx_get_minmax_unchecked, //GO(xcb_glx_get_pixel_mapfv, //GO(xcb_glx_get_pixel_mapfv_data, //GO(xcb_glx_get_pixel_mapfv_data_end, //GO(xcb_glx_get_pixel_mapfv_data_length, //GO(xcb_glx_get_pixel_mapfv_reply, //GO(xcb_glx_get_pixel_mapfv_sizeof, //GO(xcb_glx_get_pixel_mapfv_unchecked, //GO(xcb_glx_get_pixel_mapuiv, //GO(xcb_glx_get_pixel_mapuiv_data, //GO(xcb_glx_get_pixel_mapuiv_data_end, //GO(xcb_glx_get_pixel_mapuiv_data_length, //GO(xcb_glx_get_pixel_mapuiv_reply, //GO(xcb_glx_get_pixel_mapuiv_sizeof, //GO(xcb_glx_get_pixel_mapuiv_unchecked, //GO(xcb_glx_get_pixel_mapusv, //GO(xcb_glx_get_pixel_mapusv_data, //GO(xcb_glx_get_pixel_mapusv_data_end, //GO(xcb_glx_get_pixel_mapusv_data_length, //GO(xcb_glx_get_pixel_mapusv_reply, //GO(xcb_glx_get_pixel_mapusv_sizeof, //GO(xcb_glx_get_pixel_mapusv_unchecked, //GO(xcb_glx_get_polygon_stipple, //GO(xcb_glx_get_polygon_stipple_data, //GO(xcb_glx_get_polygon_stipple_data_end, //GO(xcb_glx_get_polygon_stipple_data_length, //GO(xcb_glx_get_polygon_stipple_reply, //GO(xcb_glx_get_polygon_stipple_sizeof, //GO(xcb_glx_get_polygon_stipple_unchecked, //GO(xcb_glx_get_queryiv_arb, //GO(xcb_glx_get_queryiv_arb_data, //GO(xcb_glx_get_queryiv_arb_data_end, //GO(xcb_glx_get_queryiv_arb_data_length, //GO(xcb_glx_get_queryiv_arb_reply, //GO(xcb_glx_get_queryiv_arb_sizeof, //GO(xcb_glx_get_queryiv_arb_unchecked, //GO(xcb_glx_get_query_objectiv_arb, //GO(xcb_glx_get_query_objectiv_arb_data, //GO(xcb_glx_get_query_objectiv_arb_data_end, //GO(xcb_glx_get_query_objectiv_arb_data_length, //GO(xcb_glx_get_query_objectiv_arb_reply, //GO(xcb_glx_get_query_objectiv_arb_sizeof, //GO(xcb_glx_get_query_objectiv_arb_unchecked, //GO(xcb_glx_get_query_objectuiv_arb, //GO(xcb_glx_get_query_objectuiv_arb_data, //GO(xcb_glx_get_query_objectuiv_arb_data_end, //GO(xcb_glx_get_query_objectuiv_arb_data_length, //GO(xcb_glx_get_query_objectuiv_arb_reply, //GO(xcb_glx_get_query_objectuiv_arb_sizeof, //GO(xcb_glx_get_query_objectuiv_arb_unchecked, //GO(xcb_glx_get_separable_filter, //GO(xcb_glx_get_separable_filter_reply, //GO(xcb_glx_get_separable_filter_rows_and_cols, //GO(xcb_glx_get_separable_filter_rows_and_cols_end, //GO(xcb_glx_get_separable_filter_rows_and_cols_length, //GO(xcb_glx_get_separable_filter_sizeof, //GO(xcb_glx_get_separable_filter_unchecked, //GO(xcb_glx_get_string, //GO(xcb_glx_get_string_reply, //GO(xcb_glx_get_string_sizeof, //GO(xcb_glx_get_string_string, //GO(xcb_glx_get_string_string_end, //GO(xcb_glx_get_string_string_length, //GO(xcb_glx_get_string_unchecked, //GO(xcb_glx_get_tex_envfv, //GO(xcb_glx_get_tex_envfv_data, //GO(xcb_glx_get_tex_envfv_data_end, //GO(xcb_glx_get_tex_envfv_data_length, //GO(xcb_glx_get_tex_envfv_reply, //GO(xcb_glx_get_tex_envfv_sizeof, //GO(xcb_glx_get_tex_envfv_unchecked, //GO(xcb_glx_get_tex_enviv, //GO(xcb_glx_get_tex_enviv_data, //GO(xcb_glx_get_tex_enviv_data_end, //GO(xcb_glx_get_tex_enviv_data_length, //GO(xcb_glx_get_tex_enviv_reply, //GO(xcb_glx_get_tex_enviv_sizeof, //GO(xcb_glx_get_tex_enviv_unchecked, //GO(xcb_glx_get_tex_gendv, //GO(xcb_glx_get_tex_gendv_data, //GO(xcb_glx_get_tex_gendv_data_end, //GO(xcb_glx_get_tex_gendv_data_length, //GO(xcb_glx_get_tex_gendv_reply, //GO(xcb_glx_get_tex_gendv_sizeof, //GO(xcb_glx_get_tex_gendv_unchecked, //GO(xcb_glx_get_tex_genfv, //GO(xcb_glx_get_tex_genfv_data, //GO(xcb_glx_get_tex_genfv_data_end, //GO(xcb_glx_get_tex_genfv_data_length, //GO(xcb_glx_get_tex_genfv_reply, //GO(xcb_glx_get_tex_genfv_sizeof, //GO(xcb_glx_get_tex_genfv_unchecked, //GO(xcb_glx_get_tex_geniv, //GO(xcb_glx_get_tex_geniv_data, //GO(xcb_glx_get_tex_geniv_data_end, //GO(xcb_glx_get_tex_geniv_data_length, //GO(xcb_glx_get_tex_geniv_reply, //GO(xcb_glx_get_tex_geniv_sizeof, //GO(xcb_glx_get_tex_geniv_unchecked, //GO(xcb_glx_get_tex_image, //GO(xcb_glx_get_tex_image_data, //GO(xcb_glx_get_tex_image_data_end, //GO(xcb_glx_get_tex_image_data_length, //GO(xcb_glx_get_tex_image_reply, //GO(xcb_glx_get_tex_image_sizeof, //GO(xcb_glx_get_tex_image_unchecked, //GO(xcb_glx_get_tex_level_parameterfv, //GO(xcb_glx_get_tex_level_parameterfv_data, //GO(xcb_glx_get_tex_level_parameterfv_data_end, //GO(xcb_glx_get_tex_level_parameterfv_data_length, //GO(xcb_glx_get_tex_level_parameterfv_reply, //GO(xcb_glx_get_tex_level_parameterfv_sizeof, //GO(xcb_glx_get_tex_level_parameterfv_unchecked, //GO(xcb_glx_get_tex_level_parameteriv, //GO(xcb_glx_get_tex_level_parameteriv_data, //GO(xcb_glx_get_tex_level_parameteriv_data_end, //GO(xcb_glx_get_tex_level_parameteriv_data_length, //GO(xcb_glx_get_tex_level_parameteriv_reply, //GO(xcb_glx_get_tex_level_parameteriv_sizeof, //GO(xcb_glx_get_tex_level_parameteriv_unchecked, //GO(xcb_glx_get_tex_parameterfv, //GO(xcb_glx_get_tex_parameterfv_data, //GO(xcb_glx_get_tex_parameterfv_data_end, //GO(xcb_glx_get_tex_parameterfv_data_length, //GO(xcb_glx_get_tex_parameterfv_reply, //GO(xcb_glx_get_tex_parameterfv_sizeof, //GO(xcb_glx_get_tex_parameterfv_unchecked, //GO(xcb_glx_get_tex_parameteriv, //GO(xcb_glx_get_tex_parameteriv_data, //GO(xcb_glx_get_tex_parameteriv_data_end, //GO(xcb_glx_get_tex_parameteriv_data_length, //GO(xcb_glx_get_tex_parameteriv_reply, //GO(xcb_glx_get_tex_parameteriv_sizeof, //GO(xcb_glx_get_tex_parameteriv_unchecked, //GO(xcb_glx_get_visual_configs, //GO(xcb_glx_get_visual_configs_property_list, //GO(xcb_glx_get_visual_configs_property_list_end, //GO(xcb_glx_get_visual_configs_property_list_length, //GO(xcb_glx_get_visual_configs_reply, //GO(xcb_glx_get_visual_configs_sizeof, //GO(xcb_glx_get_visual_configs_unchecked, DATA(xcb_glx_id, 8) //GO(xcb_glx_is_direct, //GO(xcb_glx_is_direct_reply, //GO(xcb_glx_is_direct_unchecked, //GO(xcb_glx_is_enabled, //GO(xcb_glx_is_enabled_reply, //GO(xcb_glx_is_enabled_unchecked, //GO(xcb_glx_is_list, //GO(xcb_glx_is_list_reply, //GO(xcb_glx_is_list_unchecked, //GO(xcb_glx_is_query_arb, //GO(xcb_glx_is_query_arb_reply, //GO(xcb_glx_is_query_arb_unchecked, //GO(xcb_glx_is_texture, //GO(xcb_glx_is_texture_reply, //GO(xcb_glx_is_texture_unchecked, //GO(xcb_glx_make_context_current, //GO(xcb_glx_make_context_current_reply, //GO(xcb_glx_make_context_current_unchecked, //GO(xcb_glx_make_current, //GO(xcb_glx_make_current_reply, //GO(xcb_glx_make_current_unchecked, //GO(xcb_glx_new_list, //GO(xcb_glx_new_list_checked, //GO(xcb_glx_pbuffer_end, //GO(xcb_glx_pbuffer_next, //GO(xcb_glx_pixel_storef, //GO(xcb_glx_pixel_storef_checked, //GO(xcb_glx_pixel_storei, //GO(xcb_glx_pixel_storei_checked, //GO(xcb_glx_pixmap_end, //GO(xcb_glx_pixmap_next, //GO(xcb_glx_query_context, //GO(xcb_glx_query_context_attribs, //GO(xcb_glx_query_context_attribs_end, //GO(xcb_glx_query_context_attribs_length, //GO(xcb_glx_query_context_reply, //GO(xcb_glx_query_context_sizeof, //GO(xcb_glx_query_context_unchecked, //GO(xcb_glx_query_extensions_string, //GO(xcb_glx_query_extensions_string_reply, //GO(xcb_glx_query_extensions_string_unchecked, //GO(xcb_glx_query_server_string, //GO(xcb_glx_query_server_string_reply, //GO(xcb_glx_query_server_string_sizeof, //GO(xcb_glx_query_server_string_string, //GO(xcb_glx_query_server_string_string_end, //GO(xcb_glx_query_server_string_string_length, //GO(xcb_glx_query_server_string_unchecked, GO(xcb_glx_query_version, pFpuu) GO(xcb_glx_query_version_reply, pFppp) GO(xcb_glx_query_version_unchecked, pFuu) //GO(xcb_glx_read_pixels, //GO(xcb_glx_read_pixels_data, //GO(xcb_glx_read_pixels_data_end, //GO(xcb_glx_read_pixels_data_length, //GO(xcb_glx_read_pixels_reply, //GO(xcb_glx_read_pixels_sizeof, //GO(xcb_glx_read_pixels_unchecked, //GO(xcb_glx_render, //GO(xcb_glx_render_checked, //GO(xcb_glx_render_data, //GO(xcb_glx_render_data_end, //GO(xcb_glx_render_data_length, //GO(xcb_glx_render_large, //GO(xcb_glx_render_large_checked, //GO(xcb_glx_render_large_data, //GO(xcb_glx_render_large_data_end, //GO(xcb_glx_render_large_data_length, //GO(xcb_glx_render_large_sizeof, //GO(xcb_glx_render_mode, //GO(xcb_glx_render_mode_data, //GO(xcb_glx_render_mode_data_end, //GO(xcb_glx_render_mode_data_length, //GO(xcb_glx_render_mode_reply, //GO(xcb_glx_render_mode_sizeof, //GO(xcb_glx_render_mode_unchecked, //GO(xcb_glx_render_sizeof, //GO(xcb_glx_select_buffer, //GO(xcb_glx_select_buffer_checked, //GO(xcb_glx_set_client_info_2arb, //GO(xcb_glx_set_client_info_2arb_checked, //GO(xcb_glx_set_client_info_2arb_gl_extension_string, //GO(xcb_glx_set_client_info_2arb_gl_extension_string_end, //GO(xcb_glx_set_client_info_2arb_gl_extension_string_length, //GO(xcb_glx_set_client_info_2arb_gl_versions, //GO(xcb_glx_set_client_info_2arb_gl_versions_end, //GO(xcb_glx_set_client_info_2arb_gl_versions_length, //GO(xcb_glx_set_client_info_2arb_glx_extension_string, //GO(xcb_glx_set_client_info_2arb_glx_extension_string_end, //GO(xcb_glx_set_client_info_2arb_glx_extension_string_length, //GO(xcb_glx_set_client_info_2arb_sizeof, //GO(xcb_glx_set_client_info_arb, //GO(xcb_glx_set_client_info_arb_checked, //GO(xcb_glx_set_client_info_arb_gl_extension_string, //GO(xcb_glx_set_client_info_arb_gl_extension_string_end, //GO(xcb_glx_set_client_info_arb_gl_extension_string_length, //GO(xcb_glx_set_client_info_arb_gl_versions, //GO(xcb_glx_set_client_info_arb_gl_versions_end, //GO(xcb_glx_set_client_info_arb_gl_versions_length, //GO(xcb_glx_set_client_info_arb_glx_extension_string, //GO(xcb_glx_set_client_info_arb_glx_extension_string_end, //GO(xcb_glx_set_client_info_arb_glx_extension_string_length, //GO(xcb_glx_set_client_info_arb_sizeof, //GO(xcb_glx_swap_buffers, //GO(xcb_glx_swap_buffers_checked, //GO(xcb_glx_use_x_font, //GO(xcb_glx_use_x_font_checked, //GO(xcb_glx_vendor_private, //GO(xcb_glx_vendor_private_checked, //GO(xcb_glx_vendor_private_data, //GO(xcb_glx_vendor_private_data_end, //GO(xcb_glx_vendor_private_data_length, //GO(xcb_glx_vendor_private_sizeof, //GO(xcb_glx_vendor_private_with_reply, //GO(xcb_glx_vendor_private_with_reply_data_2, //GO(xcb_glx_vendor_private_with_reply_data_2_end, //GO(xcb_glx_vendor_private_with_reply_data_2_length, //GO(xcb_glx_vendor_private_with_reply_reply, //GO(xcb_glx_vendor_private_with_reply_sizeof, //GO(xcb_glx_vendor_private_with_reply_unchecked, //GO(xcb_glx_wait_gl, //GO(xcb_glx_wait_gl_checked, //GO(xcb_glx_wait_x, //GO(xcb_glx_wait_x_checked, //GO(xcb_glx_window_end, //GO(xcb_glx_window_next,
11,672
2,381
package com.github.dockerjava.cmd.swarm; import com.github.dockerjava.api.DockerClient; import com.github.dockerjava.api.exception.DockerException; import com.github.dockerjava.api.model.Info; import com.github.dockerjava.api.model.LocalNodeState; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; public class LeaveSwarmCmdExecIT extends SwarmCmdIT { public static final Logger LOG = LoggerFactory.getLogger(LeaveSwarmCmdExecIT.class); @Test public void leaveSwarmAsMaster() throws DockerException { DockerClient dockerClient = startSwarm(); Info info = dockerClient.infoCmd().exec(); LOG.info("Inspected docker: {}", info.toString()); assertThat(info.getSwarm().getLocalNodeState(), is(LocalNodeState.ACTIVE)); dockerClient.leaveSwarmCmd() .withForceEnabled(true) .exec(); LOG.info("Left swarm"); info = dockerClient.infoCmd().exec(); LOG.info("Inspected docker: {}", info.toString()); assertThat(info.getSwarm().getLocalNodeState(), is(LocalNodeState.INACTIVE)); } @Test(expected = DockerException.class) public void leavingSwarmThrowsWhenNotInSwarm() throws Exception { DockerClient dockerClient = startDockerInDocker(); dockerClient.leaveSwarmCmd().exec(); } }
534
719
<filename>src/test/java/com/googlecode/objectify/test/EmbeddedLifecycleTests.java package com.googlecode.objectify.test; import com.googlecode.objectify.annotation.Cache; import com.googlecode.objectify.annotation.Id; import com.googlecode.objectify.annotation.Ignore; import com.googlecode.objectify.annotation.OnLoad; import com.googlecode.objectify.annotation.OnSave; import com.googlecode.objectify.test.util.TestBase; import org.junit.jupiter.api.Test; import static com.google.common.truth.Truth.assertThat; import static com.googlecode.objectify.ObjectifyService.factory; /** * Tests the lifecycle annotations on embedded classes */ class EmbeddedLifecycleTests extends TestBase { @com.googlecode.objectify.annotation.Entity @Cache private static class Outer { @Id private Long id; private HasLifecycle life; } private static class HasLifecycle { @Ignore private boolean onSaved; @Ignore private boolean onLoaded; @OnSave void onSave() { this.onSaved = true; } @OnLoad void onLoad() { this.onLoaded = true; } } /** */ @Test void lifecycleInEmbeddedClassWorks() throws Exception { factory().register(Outer.class); final Outer outer = new Outer(); outer.life = new HasLifecycle(); final Outer fetched = saveClearLoad(outer); assertThat(outer.life.onSaved).isTrue(); assertThat(fetched.life.onLoaded).isTrue(); // would fail without session clear } }
457
506
#include "../../inc.h" bool __fastcall hook::ShouldDrawFog( uintptr_t ecx, uintptr_t edx ) { return !g_vars.visuals.misc.fog; }
56
405
from ..core import WesternCalendar, FRI from ..registry_tools import iso_register @iso_register('MH') class MarshallIslands(WesternCalendar): "Marshall Islands" FIXED_HOLIDAYS = WesternCalendar.FIXED_HOLIDAYS + ( (3, 3, "Remembrance Day"), (5, 1, "Constitution Day"), (11, 17, "Presidents' Day"), (12, 31, "New Year's Eve"), ) include_good_friday = True def get_variable_days(self, year): days = super().get_variable_days(year) days.append(( MarshallIslands.get_nth_weekday_in_month(year, 7, FRI), "Fishermen's Holiday" )) days.append(( MarshallIslands.get_nth_weekday_in_month(year, 9, FRI), "Labour Day" )) days.append(( MarshallIslands.get_last_weekday_in_month(year, 9, FRI), "Manit Day" )) days.append(( MarshallIslands.get_nth_weekday_in_month(year, 12, FRI), "Gospel Day" )) return days
501
6,036
/*++ Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. Module Name: ConvSymKernelCommon.h Abstract: This module contains common kernel macros and structures for the symmetric quantized integer convolution operation. --*/ // // Define the convolution kernel flags. // #define MLAS_CONV_SYM_FLAG_INPUT_DIRECT 0x00000001 #define MLAS_CONV_SYM_FLAG_PER_CHANNEL_SCALE 0x00000002 // // Define the structure of the post process parameter block. // .equ .LConvSymPostProcessParams_Bias, 0 .equ .LConvSymPostProcessParams_Scale, 8 .equ .LConvSymPostProcessParams_MinimumValue, 16 .equ .LConvSymPostProcessParams_MaximumValue, 20 .equ .LConvSymPostProcessParams_OutputZeroPoint, 24 // // Stack frame layout for the symmetric convolution kernels. // .equ .LConvSymKernelFrame_InputChannels, 0 .equ .LConvSymKernelFrame_OutputChannels, 8 .equ .LConvSymKernelFrame_Padding, 16 .equ .LConvSymKernelFrame_SavedR15, 24 .equ .LConvSymKernelFrame_SavedR14, 32 .equ .LConvSymKernelFrame_SavedR13, 40 .equ .LConvSymKernelFrame_SavedR12, 48 .equ .LConvSymKernelFrame_SavedRbx, 56 .equ .LConvSymKernelFrame_SavedRbp, 64 .equ .LConvSymKernelFrame_ReturnAddress, 72 .equ .LConvSymKernelFrame_ChannelCount, 80 .equ .LConvSymKernelFrame_OutputCount, 88 .equ .LConvSymKernelFrame_PostProcessParams, 96 .equ .LConvSymKernelFrame_KernelFlags, 104 .equ .LConvSymDepthwiseKernelFrame_Channels, 0 .equ .LConvSymDepthwiseKernelFrame_ChannelOffset, 8 .equ .LConvSymDepthwiseKernelFrame_Padding, 16 .equ .LConvSymDepthwiseKernelFrame_SavedR15, 24 .equ .LConvSymDepthwiseKernelFrame_SavedR14, 32 .equ .LConvSymDepthwiseKernelFrame_SavedR13, 40 .equ .LConvSymDepthwiseKernelFrame_SavedR12, 48 .equ .LConvSymDepthwiseKernelFrame_SavedRbx, 56 .equ .LConvSymDepthwiseKernelFrame_SavedRbp, 64 .equ .LConvSymDepthwiseKernelFrame_ReturnAddress, 72 .equ .LConvSymDepthwiseKernelFrame_ChannelCount, 80 .equ .LConvSymDepthwiseKernelFrame_OutputCount, 88 .equ .LConvSymDepthwiseKernelFrame_PostProcessParams, 96 .equ .LConvSymDepthwiseKernelFrame_KernelFlags, 104
1,113
464
package dev.fiki.forgehax.api.cmd.listener; public class Listeners { public static IOnUpdate onUpdate(IOnUpdate o) { return o; } }
53
471
from corehq.apps.api.es import ReportCaseESView from pact.enums import PACT_DOMAIN from io import BytesIO from django.test.client import RequestFactory from corehq.apps.receiverwrapper.views import post from corehq.apps.es import filters from corehq.apps.es.cases import CaseES from corehq.apps.es.forms import FormES def submit_xform(url_path, domain, submission_xml_string, extra_meta=None): """ RequestFactory submitter """ rf = RequestFactory() f = BytesIO(submission_xml_string.encode('utf-8')) f.name = 'form.xml' req = rf.post(url_path, data={'xml_submission_file': f}) #, content_type='multipart/form-data') if extra_meta: req.META.update(extra_meta) return post(req, domain) def pact_script_fields(): """ This is a hack of the query to allow for the encounter date and pact_ids to show up as first class properties """ return { "script_pact_id": { "script": """if(_source['form']['note'] != null) { _source['form']['note']['pact_id']['#value']; } else if (_source['form']['pact_id'] != null) { _source['form']['pact_id']['#value']; } else { null; } """ }, "script_encounter_date": { "script": """if(_source['form']['note'] != null) { _source['form']['note']['encounter_date']['#value']; } else if (_source['form']['encounter_date'] != null) { _source['form']['encounter_date']['#value']; } else { _source['received_on']; } """ } } def get_case_id(xform): if 'case' in xform['form']: if 'case_id' in xform['form']['case']: return xform['form']['case']['case_id'] elif '@case_id' in xform['form']['case']: return xform['form']['case']['@case_id'] return None def get_patient_display_cache(case_ids): """ For a given set of case_ids, return name and pact_ids """ if len(case_ids) == 0: return {} case_es = ReportCaseESView(PACT_DOMAIN) query = ( CaseES() .remove_default_filters() .domain(PACT_DOMAIN) .source(["_id", "name"]) .size(len(case_ids)) ) query = query.add_query({"ids": {"values": case_ids}}) query["script_fields"] = { "case_id": { "script": "_source._id" }, "pactid": get_report_script_field("pactid"), "first_name": get_report_script_field("first_name"), "last_name": get_report_script_field("last_name"), } res = case_es.run_query(query.raw_query) from pact.reports.patient import PactPatientInfoReport ret = {} for entry in res['hits']['hits']: case_id = entry['case_id'] ret[case_id] = entry ret[case_id]['url'] = PactPatientInfoReport.get_url(*['pact']) + "?patient_id=%s" % case_id return ret DEFAULT_SIZE = 10 def get_base_form_es_query(start=0, size=DEFAULT_SIZE): return (FormES() .remove_default_filters() .domain(PACT_DOMAIN) .filter(filters.term('doc_type', 'XFormInstance')) .start(start) .size(size)) def get_base_case_es_query(start=0, size=DEFAULT_SIZE): return (CaseES() .remove_default_filters() .domain(PACT_DOMAIN) .start(start) .size(size)) def get_by_case_id_form_es_query(start, size, case_id): base_query = get_base_form_es_query(start, size) return (base_query .filter( filters.nested( 'form.case', filters.OR( filters.term('form.case.@case_id', case_id), filters.term('form.case.case_id', case_id) ) ) ) ) def get_report_script_field(field_path, is_known=False): """ Generate a script field string for easier querying. field_path: is the path.to.property.name in the _source is_known: if true, then query as is, if false, then it's a dynamically mapped item, so put on the #value property at the end. """ property_split = field_path.split('.') property_path = '_source%s' % ''.join("['%s']" % x for x in property_split) if is_known: script_string = property_path else: full_script_path = "%s['#value']" % property_path script_string = """if (%(prop_path)s != null) { %(value_path)s; } else { null; }""" % { 'prop_path': property_path, 'value_path': full_script_path } ret = {"script": script_string} return ret
2,159
2,151
<filename>src/trusted/fault_injection/test_injection.h<gh_stars>1000+ /* * Copyright (c) 2012 The Native Client Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ /* * A Simple Test Code Injection Framework. * * While small unit-tests can exercise small components in isolation, * some tests require a more systemic test where an independent "test * jig" separate from the main code can be hard to maintain. For this * kind of testing, especially those needing to add "scary" test code * that should never be linked into production binaries, we need to * have a way to inject this code into the "production" code. At the * same time, the addition of the injected test code should not * interrupt the flow of reading/auditing the production code. To * this end, we define a macro that allows us to define a function * call to the injected test code. This will be a no-op in production * linkages -- but the test for the presence of the injection will * remain, so that the *same* compilation is tested because the test * injection is done at the linkage level by supplying different * linkage units. */ #ifndef NATIVE_CLIENT_SRC_TRUSTED_FAULT_INJECTION_TEST_INJECTION_H_ #define NATIVE_CLIENT_SRC_TRUSTED_FAULT_INJECTION_TEST_INJECTION_H_ #include "native_client/src/include/portability.h" /* * The g_nacl_test_injection_functions object cannot be an array, * since the type signatures of the test functions differ -- they will * take whatever is appropriate in the local scope of the test * injection site. NB: this also makes the generality of the test * injection somewhat problematic, since new tests that want to inject * test code at the same site may potentially need additional * arguments, which means that adding such a new test would require * modifying other tests to include new arguments to maintain type * signature conformance. (If we could pass in the full stack frame / * make the function be dynamically scoped, then this problem would * disappear.) * * We could approach this by adding direct function calls and use the * linkage unit substitution idea, but that would require a naming * convention to inform auditors that a function is merely for test * injection and is a no-op in the production linkage, which is more * likely to go wrong. Using a structure in the following manner * enforces collecting all test injection functions together (though * the actual global function name / name of file/linkage unit is * still a convention). * * The NaCl convention for the use of this library is that * possibly-scary/dangerous test injection code is in a file named * <unit-under-test>_test_injection.c, with the * g_nacl_test_injection_functions global variable in a file named * nacl_<UUT>_test_injection_test.c and the non-injection version of * functions and global variable is in a single file * nacl_test_injection_null.c. Since it is a link-time error to * provide two definitions of g_nacl_test_injection_functions, we * should be safe from linking in the possibly dangerous * <UUT>_test_injection.o files into a production binary. Changing * the injection table is done NaClTestInjectionSetInjectionTable * below, to avoid cross-dynamic-library global variable accesses. */ #define NACL_TEST_INJECTION(identifier, arguments) \ do { \ if (NULL != g_nacl_test_injection_functions->identifier) { \ (*g_nacl_test_injection_functions->identifier)arguments; \ } \ } while (0) struct NaClApp; struct NaClTestInjectionTable { void (*ChangeTrampolines)(struct NaClApp *); void (*BeforeMainThreadLaunches)(void); /* * Except for -Werror=missing-field-initializers, extending this * structure should not affect existing tests -- since the new * function pointer members will get zero filled, the new call sites * will just not call anything in the pre-existing tests. */ }; extern struct NaClTestInjectionTable const *g_nacl_test_injection_functions; void NaClTestInjectionSetInjectionTable( struct NaClTestInjectionTable const *new_table); #endif
1,264
575
/* * Copyright (C) 1999 <NAME> (<EMAIL>) * (C) 1999 <NAME> (<EMAIL>) * (C) 2001 <NAME> <EMAIL>) * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Apple Inc. All rights * reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #include "third_party/blink/renderer/core/html/html_script_element.h" #include "third_party/blink/public/mojom/script/script_type.mojom-blink.h" #include "third_party/blink/renderer/bindings/core/v8/html_script_element_or_svg_script_element.h" #include "third_party/blink/renderer/bindings/core/v8/string_or_trusted_script.h" #include "third_party/blink/renderer/core/dom/attribute.h" #include "third_party/blink/renderer/core/dom/document.h" #include "third_party/blink/renderer/core/dom/events/event.h" #include "third_party/blink/renderer/core/dom/text.h" #include "third_party/blink/renderer/core/execution_context/execution_context.h" #include "third_party/blink/renderer/core/frame/csp/content_security_policy.h" #include "third_party/blink/renderer/core/html_names.h" #include "third_party/blink/renderer/core/script/script_loader.h" #include "third_party/blink/renderer/core/script/script_runner.h" #include "third_party/blink/renderer/core/trustedtypes/trusted_script.h" #include "third_party/blink/renderer/core/trustedtypes/trusted_types_util.h" #include "third_party/blink/renderer/platform/bindings/exception_state.h" #include "third_party/blink/renderer/platform/instrumentation/use_counter.h" #include "third_party/blink/renderer/platform/runtime_enabled_features.h" namespace blink { HTMLScriptElement::HTMLScriptElement(Document& document, const CreateElementFlags flags) : HTMLElement(html_names::kScriptTag, document), children_changed_by_api_(false), loader_(InitializeScriptLoader(flags)) {} const AttrNameToTrustedType& HTMLScriptElement::GetCheckedAttributeTypes() const { DEFINE_STATIC_LOCAL(AttrNameToTrustedType, attribute_map, ({{"src", SpecificTrustedType::kScriptURL}})); return attribute_map; } bool HTMLScriptElement::IsURLAttribute(const Attribute& attribute) const { return attribute.GetName() == html_names::kSrcAttr || HTMLElement::IsURLAttribute(attribute); } bool HTMLScriptElement::HasLegalLinkAttribute(const QualifiedName& name) const { return name == html_names::kSrcAttr || HTMLElement::HasLegalLinkAttribute(name); } const QualifiedName& HTMLScriptElement::SubResourceAttributeName() const { return html_names::kSrcAttr; } void HTMLScriptElement::ChildrenChanged(const ChildrenChange& change) { HTMLElement::ChildrenChanged(change); if (change.IsChildInsertion()) loader_->ChildrenChanged(); // We'll record whether the script element children were ever changed by // the API (as opposed to the parser). children_changed_by_api_ |= !change.ByParser(); } void HTMLScriptElement::DidMoveToNewDocument(Document& old_document) { ScriptRunner::MovePendingScript(old_document, GetDocument(), loader_.Get()); HTMLElement::DidMoveToNewDocument(old_document); } void HTMLScriptElement::ParseAttribute( const AttributeModificationParams& params) { if (params.name == html_names::kSrcAttr) { loader_->HandleSourceAttribute(params.new_value); LogUpdateAttributeIfIsolatedWorldAndInDocument("script", params); } else if (params.name == html_names::kAsyncAttr) { loader_->HandleAsyncAttribute(); } else if (params.name == html_names::kImportanceAttr && RuntimeEnabledFeatures::PriorityHintsEnabled( GetExecutionContext())) { // The only thing we need to do for the the importance attribute/Priority // Hints is count usage upon parsing. Processing the value happens when the // element loads. UseCounter::Count(GetDocument(), WebFeature::kPriorityHints); } else { HTMLElement::ParseAttribute(params); } } Node::InsertionNotificationRequest HTMLScriptElement::InsertedInto( ContainerNode& insertion_point) { if (insertion_point.isConnected() && HasSourceAttribute() && ScriptLoader::GetScriptTypeAtPrepare( TypeAttributeValue(), LanguageAttributeValue(), ScriptLoader::kDisallowLegacyTypeInTypeAttribute) == ScriptLoader::ScriptTypeAtPrepare::kInvalid) { UseCounter::Count(GetDocument(), WebFeature::kScriptElementWithInvalidTypeHasSrc); } HTMLElement::InsertedInto(insertion_point); LogAddElementIfIsolatedWorldAndInDocument("script", html_names::kSrcAttr); return kInsertionShouldCallDidNotifySubtreeInsertions; } void HTMLScriptElement::DidNotifySubtreeInsertionsToDocument() { loader_->DidNotifySubtreeInsertionsToDocument(); } void HTMLScriptElement::setText(const String& string) { setTextContent(string); } void HTMLScriptElement::text(StringOrTrustedScript& result) { result.SetString(TextFromChildren()); } void HTMLScriptElement::setInnerText( const StringOrTrustedScript& string_or_trusted_script, ExceptionState& exception_state) { String value = TrustedTypesCheckForScript( string_or_trusted_script, GetExecutionContext(), exception_state); if (!exception_state.HadException()) { // https://w3c.github.io/webappsec-trusted-types/dist/spec/#setting-slot-values // On setting, the innerText [...] perform the regular steps, and then set // content object's [[ScriptText]] internal slot value [...]. HTMLElement::setInnerText(value, exception_state); script_text_internal_slot_ = ParkableString(value.Impl()); } } void HTMLScriptElement::setTextContent(const String& string) { // https://w3c.github.io/webappsec-trusted-types/dist/spec/#setting-slot-values // On setting, [..] textContent [..] perform the regular steps, and then set // content object's [[ScriptText]] internal slot value [...]. Node::setTextContent(string); script_text_internal_slot_ = ParkableString(string.Impl()); } void HTMLScriptElement::setTextContent( const StringOrTrustedScript& string_or_trusted_script, ExceptionState& exception_state) { String value = TrustedTypesCheckForScript( string_or_trusted_script, GetExecutionContext(), exception_state); if (!exception_state.HadException()) { // https://w3c.github.io/webappsec-trusted-types/dist/spec/#setting-slot-values // On setting, [..] textContent [..] perform the regular steps, and then set // content object's [[ScriptText]] internal slot value [...]. Node::setTextContent(value); script_text_internal_slot_ = ParkableString(value.Impl()); } } void HTMLScriptElement::setAsync(bool async) { SetBooleanAttribute(html_names::kAsyncAttr, async); loader_->HandleAsyncAttribute(); } void HTMLScriptElement::FinishParsingChildren() { Element::FinishParsingChildren(); // We normally expect the parser to finish parsing before any script gets // a chance to manipulate the script. However, if script parsing gets // deferrred (or similar; see crbug.com/1033101) then a script might get // access to the HTMLScriptElement before. In this case, we cannot blindly // accept the current TextFromChildren as a parser result. DCHECK(children_changed_by_api_ || !script_text_internal_slot_.length()); if (!children_changed_by_api_) script_text_internal_slot_ = ParkableString(TextFromChildren().Impl()); } bool HTMLScriptElement::async() const { return FastHasAttribute(html_names::kAsyncAttr) || loader_->IsNonBlocking(); } String HTMLScriptElement::SourceAttributeValue() const { return FastGetAttribute(html_names::kSrcAttr).GetString(); } String HTMLScriptElement::CharsetAttributeValue() const { return FastGetAttribute(html_names::kCharsetAttr).GetString(); } String HTMLScriptElement::TypeAttributeValue() const { return FastGetAttribute(html_names::kTypeAttr).GetString(); } String HTMLScriptElement::LanguageAttributeValue() const { return FastGetAttribute(html_names::kLanguageAttr).GetString(); } bool HTMLScriptElement::NomoduleAttributeValue() const { return FastHasAttribute(html_names::kNomoduleAttr); } String HTMLScriptElement::ForAttributeValue() const { return FastGetAttribute(html_names::kForAttr).GetString(); } String HTMLScriptElement::EventAttributeValue() const { return FastGetAttribute(html_names::kEventAttr).GetString(); } String HTMLScriptElement::CrossOriginAttributeValue() const { return FastGetAttribute(html_names::kCrossoriginAttr); } String HTMLScriptElement::IntegrityAttributeValue() const { return FastGetAttribute(html_names::kIntegrityAttr); } String HTMLScriptElement::ReferrerPolicyAttributeValue() const { return FastGetAttribute(html_names::kReferrerpolicyAttr); } String HTMLScriptElement::ImportanceAttributeValue() const { return FastGetAttribute(html_names::kImportanceAttr); } String HTMLScriptElement::ChildTextContent() { return TextFromChildren(); } String HTMLScriptElement::ScriptTextInternalSlot() const { return script_text_internal_slot_.ToString(); } bool HTMLScriptElement::AsyncAttributeValue() const { return FastHasAttribute(html_names::kAsyncAttr); } bool HTMLScriptElement::DeferAttributeValue() const { return FastHasAttribute(html_names::kDeferAttr); } bool HTMLScriptElement::HasSourceAttribute() const { return FastHasAttribute(html_names::kSrcAttr); } bool HTMLScriptElement::IsConnected() const { return Node::isConnected(); } bool HTMLScriptElement::HasChildren() const { return Node::hasChildren(); } const AtomicString& HTMLScriptElement::GetNonceForElement() const { return ContentSecurityPolicy::IsNonceableElement(this) ? nonce() : g_null_atom; } bool HTMLScriptElement::AllowInlineScriptForCSP( const AtomicString& nonce, const WTF::OrdinalNumber& context_line, const String& script_content) { return GetExecutionContext() ->GetContentSecurityPolicyForCurrentWorld() ->AllowInline(ContentSecurityPolicy::InlineType::kScript, this, script_content, nonce, GetDocument().Url(), context_line); } Document& HTMLScriptElement::GetDocument() const { return Node::GetDocument(); } ExecutionContext* HTMLScriptElement::GetExecutionContext() const { return Node::GetExecutionContext(); } void HTMLScriptElement::DispatchLoadEvent() { DispatchEvent(*Event::Create(event_type_names::kLoad)); } void HTMLScriptElement::DispatchErrorEvent() { DispatchEvent(*Event::Create(event_type_names::kError)); } void HTMLScriptElement::SetScriptElementForBinding( HTMLScriptElementOrSVGScriptElement& element) { if (!IsInShadowTree()) element.SetHTMLScriptElement(this); } ScriptElementBase::Type HTMLScriptElement::GetScriptElementType() { return ScriptElementBase::Type::kHTMLScriptElement; } Element& HTMLScriptElement::CloneWithoutAttributesAndChildren( Document& factory) const { CreateElementFlags flags = CreateElementFlags::ByCloneNode().SetAlreadyStarted( loader_->AlreadyStarted()); return *factory.CreateElement(TagQName(), flags, IsValue()); } void HTMLScriptElement::Trace(Visitor* visitor) const { visitor->Trace(loader_); HTMLElement::Trace(visitor); ScriptElementBase::Trace(visitor); } } // namespace blink
3,844
353
package com.wepay.waltz.storage.common.message; import com.wepay.waltz.common.message.RecordHeader; public class RecordHeaderResponse extends StorageMessage { public final RecordHeader recordHeader; public RecordHeaderResponse(long sessionId, long seqNum, int partitionId, RecordHeader recordHeader) { super(sessionId, seqNum, partitionId); this.recordHeader = recordHeader; } @Override public byte type() { return StorageMessageType.RECORD_HEADER_RESPONSE; } }
174
369
// Copyright (c) 2017-2021, <NAME>. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #pragma once #include <list> #include <string> #include <type_traits> namespace utils { template <template <class, class> class Container, class T, class Allocator = std::allocator<T>> auto split(T strv, T delims = " ") { static_assert(std::is_same<T, std::string>::value || std::is_same<T, std::string_view>::value, "std::string or std::string_view expected"); Container<T, Allocator> output; size_t first = 0; while (first < strv.size()) { const auto second = strv.find_first_of(delims, first); if (first != second) output.emplace_back(strv.substr(first, second - first)); if (second == std::string_view::npos) break; first = second + 1; } return output; } } // namespace utils
436
2,094
//////////////////////////////////////////////////////////////////////////////////////////////////// // // Project: Embedded Learning Library (ELL) // File: CompilerOptions.cpp (emitters) // Authors: <NAME> // //////////////////////////////////////////////////////////////////////////////////////////////////// #include "CompilerOptions.h" #include <utilities/include/Exception.h> #include <map> #define ADD_TO_STRING_ENTRY(NAMESPACE, ENTRY) \ case NAMESPACE::ENTRY: \ return #ENTRY; #define BEGIN_FROM_STRING if (false) #define ADD_FROM_STRING_ENTRY(NAMESPACE, ENTRY) else if (s == #ENTRY) return NAMESPACE::ENTRY namespace ell { namespace emitters { std::string ToString(BlasType t) { switch (t) { case BlasType::unknown: return "unknown"; case BlasType::openBLAS: return "openBLAS"; case BlasType::atlas: return "atlas"; default: throw utilities::InputException(utilities::InputExceptionErrors::invalidArgument); } } /// <summary> Constructor from a property bag </summary> CompilerOptions::CompilerOptions(const utilities::PropertyBag& properties) { AddOptions(properties); } CompilerOptions CompilerOptions::AppendOptions(const utilities::PropertyBag& properties) const { CompilerOptions result = *this; result.AddOptions(properties); return result; } void CompilerOptions::AddOptions(const utilities::PropertyBag& properties) { blasType = properties.GetOrParseEntry<BlasType>("blasType", blasType); if (properties.HasEntry("positionIndependentCode")) { positionIndependentCode = properties.GetOrParseEntry<bool>("positionIndependentCode"); } optimize = properties.GetOrParseEntry("optimize", optimize); unrollLoops = properties.GetOrParseEntry("unrollLoops", unrollLoops); inlineOperators = properties.GetOrParseEntry<bool>("inlineOperators", inlineOperators); allowVectorInstructions = properties.GetOrParseEntry<bool>("allowVectorInstructions", allowVectorInstructions); vectorWidth = properties.GetOrParseEntry<int>("vectorWidth", vectorWidth); useBlas = properties.GetOrParseEntry<bool>("useBlas", useBlas); profile = properties.GetOrParseEntry<bool>("profile", profile); includeDiagnosticInfo = properties.GetOrParseEntry<bool>("includeDiagnosticInfo", includeDiagnosticInfo); parallelize = properties.GetOrParseEntry<bool>("parallelize", parallelize); useThreadPool = properties.GetOrParseEntry<bool>("useThreadPool", useThreadPool); maxThreads = properties.GetOrParseEntry<int>("maxThreads", maxThreads); useFastMath = properties.GetOrParseEntry<bool>("useFastMath", useFastMath); debug = properties.GetOrParseEntry<bool>("debug", debug); globalValueAlignment = properties.GetOrParseEntry<int>("globalValueAlignment", globalValueAlignment); skip_ellcode = properties.GetOrParseEntry<bool>("skip_ellcode", skip_ellcode); if (properties.HasEntry("deviceName")) { targetDevice = GetTargetDevice(properties.GetEntry<std::string>("deviceName")); } } } // namespace emitters namespace utilities { template <> emitters::BlasType FromString<emitters::BlasType>(const std::string& s) { static std::map<std::string, emitters::BlasType> nameMap = { { "unknown", emitters::BlasType::unknown }, { "openBLAS", emitters::BlasType::openBLAS }, { "atlas", emitters::BlasType::atlas } }; auto it = nameMap.find(s); if (it == nameMap.end()) { throw utilities::InputException(utilities::InputExceptionErrors::indexOutOfRange, "Unknown BlasType"); } return it->second; } } // namespace utilities } // namespace ell
1,604
348
<gh_stars>100-1000 {"nom":"Jouy-Mauvoisin","circ":"9ème circonscription","dpt":"Yvelines","inscrits":411,"abs":233,"votants":178,"blancs":19,"nuls":7,"exp":152,"res":[{"nuance":"MDM","nom":"<NAME>","voix":86},{"nuance":"LR","nom":"<NAME>","voix":66}]}
105
2,151
<reponame>zipated/src // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_UI_APP_LIST_SEARCH_SETTINGS_SHORTCUT_SETTINGS_SHORTCUT_METADATA_H_ #define CHROME_BROWSER_UI_APP_LIST_SEARCH_SETTINGS_SHORTCUT_SETTINGS_SHORTCUT_METADATA_H_ #include <vector> namespace gfx { struct VectorIcon; } namespace app_list { // Metadata for Settings shortcut. // TODO(wutao): Add UI controls, e.g. toggle, slider, to change the settings. struct SettingsShortcut { const char* shortcut_id; int name_string_resource_id = 0; const gfx::VectorIcon& vector_icon; // Settings subpage, e.g. bluetooth, network etc. const char* subpage; // The string used for search query in addition to the name. int searchable_string_resource_id = 0; }; // Returns a list of Settings shortcuts, which are searchable in launcher. const std::vector<SettingsShortcut>& GetSettingsShortcutList(); } // namespace app_list #endif // CHROME_BROWSER_UI_APP_LIST_SEARCH_SETTINGS_SHORTCUT_SETTINGS_SHORTCUT_METADATA_H_
390
384
/* * This file is part of the GROMACS molecular simulation package. * * Copyright (c) 2018,2019,2020, by the GROMACS development team, led by * <NAME>, <NAME>, <NAME>, and <NAME>, * and including many others, as listed in the AUTHORS file in the * top-level source directory and at http://www.gromacs.org. * * GROMACS is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * * GROMACS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with GROMACS; if not, see * http://www.gnu.org/licenses, or write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * If you want to redistribute modifications to GROMACS, please * consider that scientific software is very special. Version * control is crucial - bugs must be traceable. We will be happy to * consider code for inclusion in the official distribution, but * derived work must not be called official GROMACS. Details are found * in the README & COPYING files - if they are missing, get the * official version at http://www.gromacs.org. * * To help us fund GROMACS development, we humbly ask that you cite * the research papers on the package. Check out http://www.gromacs.org. */ #ifndef GMXAPI_GROMACSFWD_H #define GMXAPI_GROMACSFWD_H /*! \ingroup gmxapi * \file * \brief Provide forward declarations for symbols in the GROMACS public headers. * * Basic API clients only need to compile * and link against the gmxapi target, but some gmxapi classes use opaque pointers to * library classes that are forward-declared here. * Client code should not need to include this header directly. * * For maximal compatibility with other libgmxapi clients (such as third-party * Python modules), client code should use the wrappers and protocols in the * gmxapi.h header. * * Note that there is a separate CMake target to build the full * developer documentation for gmxapi. * Refer to GMXAPI developer docs for the protocols that map gmxapi interfaces to * GROMACS library interfaces. * Refer to the GROMACS developer * documentation for details on library interfaces forward-declared in this header. * * \todo Improve documentation cross-linking. */ // Forward declaration for src/gromacs/mdtypes/inputrec.h struct t_inputrec; namespace gmx { // Forward declaration for libgromacs header gromacs/restraint/restraintpotential.h class IRestraintPotential; } // end namespace gmx #endif // GMXAPI_GROMACSFWD_H
805
1,449
#ifndef FLYWEIGHT_CACHE_H #define FLYWEIGHT_CACHE_H #include <QMap> #include <QSharedPointer> #include <QWeakPointer> template <class K, class T> class FlyweightCache { public: static QSharedPointer<T> Get(const K &key); private: static QMap<K, QWeakPointer<T>> Map; }; template <class K, class T> QMap<K, QWeakPointer<T>> FlyweightCache<K, T>::Map; template <class K, class T> QSharedPointer<T> FlyweightCache<K, T>::Get(const K &key) { auto it = Map.find(key); if (it != Map.end() && !it.value().isNull()) { return QSharedPointer<T>(it.value()); } const QSharedPointer<T> shared(new T(key)); const QWeakPointer<T> weak(shared); Map.insert(key, weak); return shared; } #endif // FLYWEIGHT_CACHE_H
304
2,151
//===-- ARMSelectionDAGInfo.h - ARM SelectionDAG Info -----------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file defines the ARM subclass for TargetSelectionDAGInfo. // //===----------------------------------------------------------------------===// #ifndef ARMSELECTIONDAGINFO_H #define ARMSELECTIONDAGINFO_H #include "MCTargetDesc/ARMAddressingModes.h" #include "llvm/Target/TargetSelectionDAGInfo.h" namespace llvm { namespace ARM_AM { static inline ShiftOpc getShiftOpcForNode(unsigned Opcode) { switch (Opcode) { default: return ARM_AM::no_shift; case ISD::SHL: return ARM_AM::lsl; case ISD::SRL: return ARM_AM::lsr; case ISD::SRA: return ARM_AM::asr; case ISD::ROTR: return ARM_AM::ror; //case ISD::ROTL: // Only if imm -> turn into ROTR. // Can't handle RRX here, because it would require folding a flag into // the addressing mode. :( This causes us to miss certain things. //case ARMISD::RRX: return ARM_AM::rrx; } } } // end namespace ARM_AM class ARMSelectionDAGInfo : public TargetSelectionDAGInfo { /// Subtarget - Keep a pointer to the ARMSubtarget around so that we can /// make the right decision when generating code for different targets. const ARMSubtarget *Subtarget; public: explicit ARMSelectionDAGInfo(const TargetMachine &TM); ~ARMSelectionDAGInfo(); virtual SDValue EmitTargetCodeForMemcpy(SelectionDAG &DAG, DebugLoc dl, SDValue Chain, SDValue Dst, SDValue Src, SDValue Size, unsigned Align, bool isVolatile, bool AlwaysInline, MachinePointerInfo DstPtrInfo, MachinePointerInfo SrcPtrInfo) const; // Adjust parameters for memset, see RTABI section 4.3.4 virtual SDValue EmitTargetCodeForMemset(SelectionDAG &DAG, DebugLoc dl, SDValue Chain, SDValue Op1, SDValue Op2, SDValue Op3, unsigned Align, bool isVolatile, MachinePointerInfo DstPtrInfo) const; }; } #endif
1,085
303
<reponame>mr-c/LightZone /* Copyright (C) 2005-2011 <NAME> */ #import <Cocoa/Cocoa.h> /** * An LC_Arg is used to pass a simple data type via a selector. */ @interface LC_Arg : NSObject { @public bool b; int i; } /** * Allocate an LC_Arg with a bool value. */ + (LC_Arg*) allocBool: (BOOL)arg; /** * Allocate an LC_Arg with an integer value. */ + (LC_Arg*) allocInt: (int)arg; @end /* vim:set et sw=4 ts=4: */
193
852
#ifndef SimDataFormats_Associations_LayerClusterToCaloParticleAssociatorBaseImpl_h #define SimDataFormats_Associations_LayerClusterToCaloParticleAssociatorBaseImpl_h /** \class LayerClusterToCaloParticleAssociatorBaseImpl * * Base class for LayerClusterToCaloParticleAssociators. Methods take as input * the handle of LayerClusters and the CaloParticle collections and return an * AssociationMap (oneToManyWithQuality) * * \author <NAME> */ #include "DataFormats/Common/interface/Handle.h" #include "DataFormats/Common/interface/AssociationMap.h" #include "DataFormats/CaloRecHit/interface/CaloClusterFwd.h" #include "SimDataFormats/CaloAnalysis/interface/CaloParticleFwd.h" namespace hgcal { typedef edm::AssociationMap< edm::OneToManyWithQualityGeneric<CaloParticleCollection, reco::CaloClusterCollection, std::pair<float, float>>> SimToRecoCollection; typedef edm::AssociationMap< edm::OneToManyWithQualityGeneric<reco::CaloClusterCollection, CaloParticleCollection, float>> RecoToSimCollection; class LayerClusterToCaloParticleAssociatorBaseImpl { public: /// Constructor LayerClusterToCaloParticleAssociatorBaseImpl(); /// Destructor virtual ~LayerClusterToCaloParticleAssociatorBaseImpl(); /// Associate a LayerCluster to CaloParticles virtual hgcal::RecoToSimCollection associateRecoToSim(const edm::Handle<reco::CaloClusterCollection> &cCH, const edm::Handle<CaloParticleCollection> &cPCH) const; /// Associate a CaloParticle to LayerClusters virtual hgcal::SimToRecoCollection associateSimToReco(const edm::Handle<reco::CaloClusterCollection> &cCH, const edm::Handle<CaloParticleCollection> &cPCH) const; }; } // namespace hgcal #endif
700
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_tools.hxx" #define _TOOLS_RESARY_CXX #include <tools/resary.hxx> #include <tools/rcid.h> // ======================================================================= ResStringArray::ResStringArray( const ResId& rResId ) { rResId.SetRT( RSC_STRINGARRAY ); ResMgr* pMgr = rResId.GetResMgr(); if( pMgr && pMgr->GetResource( rResId ) ) { pMgr->GetClass(); pMgr->Increment( sizeof( RSHEADER_TYPE ) ); const sal_uInt32 nItems = pMgr->ReadLong(); if ( nItems ) { m_aStrings.reserve( nItems ); for ( sal_uInt32 i = 0; i < nItems; i++ ) { // load string m_aStrings.push_back( ImplResStringItem( pMgr->ReadString() ) ); // load value m_aStrings[i].m_nValue = pMgr->ReadLong(); } } } } // ----------------------------------------------------------------------- ResStringArray::~ResStringArray() { } // ----------------------------------------------------------------------- sal_uInt32 ResStringArray::FindIndex( long nValue ) const { const sal_uInt32 nItems = m_aStrings.size(); for ( sal_uInt32 i = 0; i < nItems; i++ ) { if ( m_aStrings[i].m_nValue == nValue ) return i; } return RESARRAY_INDEX_NOTFOUND; }
816
648
{"code":{"coding":[{"code":"323733007","display":"Amoxcycillin 250mg/5ml oral suspension (product)","system":"http://snomed.info/sct"},{"code":"0131314003","display":"Amoxicillin (Amoxcillin Trihydrate) 250mg/5mL","system":"http://hc-sc.gc.ca"}]},"id":"medexample002","isBrand":false,"product":{"form":{"coding":[{"code":"385024007","display":"Oral Suspension","system":"http://snomed.info/sct"}]}},"resourceType":"Medication","text":{"div":"<div>Amoxicillin 250mg/5ml Suspension</div>","status":"generated"}}
163
4,812
//===-- MSP430TargetMachine.h - Define TargetMachine for MSP430 -*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file declares the MSP430 specific subclass of TargetMachine. // //===----------------------------------------------------------------------===// #ifndef LLVM_LIB_TARGET_MSP430_MSP430TARGETMACHINE_H #define LLVM_LIB_TARGET_MSP430_MSP430TARGETMACHINE_H #include "MSP430Subtarget.h" #include "llvm/CodeGen/TargetFrameLowering.h" #include "llvm/Target/TargetMachine.h" namespace llvm { /// MSP430TargetMachine /// class MSP430TargetMachine : public LLVMTargetMachine { std::unique_ptr<TargetLoweringObjectFile> TLOF; MSP430Subtarget Subtarget; public: MSP430TargetMachine(const Target &T, const Triple &TT, StringRef CPU, StringRef FS, const TargetOptions &Options, Optional<Reloc::Model> RM, Optional<CodeModel::Model> CM, CodeGenOpt::Level OL, bool JIT); ~MSP430TargetMachine() override; const MSP430Subtarget *getSubtargetImpl(const Function &F) const override { return &Subtarget; } TargetPassConfig *createPassConfig(PassManagerBase &PM) override; TargetLoweringObjectFile *getObjFileLowering() const override { return TLOF.get(); } }; // MSP430TargetMachine. } // end namespace llvm #endif
538
4,262
<reponame>rikvb/camel /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jcache.policy; import java.util.UUID; import javax.cache.Cache; import javax.cache.CacheManager; import javax.cache.Caching; import org.apache.camel.component.jcache.support.HazelcastTest; import org.apache.camel.test.junit5.CamelTestSupport; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @HazelcastTest public class JCachePolicyTestBase extends CamelTestSupport { @BeforeEach public void before() { //Setup mock getMockEndpoint("mock:value") .whenAnyExchangeReceived(e -> e.getMessage().setBody(generateValue(e.getMessage().getBody(String.class)))); } public static String randomString() { return UUID.randomUUID().toString(); } public static Cache lookupCache(String cacheName) { //This will also open a closed cache return Caching.getCachingProvider().getCacheManager().getCache(cacheName); } public static String generateValue(String key) { return "value-" + key; } @AfterEach public void after() { //The RouteBuilder code is called for every test, so we destroy cache after each test CacheManager cacheManager = Caching.getCachingProvider().getCacheManager(); cacheManager.getCacheNames().forEach(s -> cacheManager.destroyCache(s)); Caching.getCachingProvider().close(); } }
710
45,293
package test; class Int { { int r = IntKt.lll(1); } }
41
2,151
<gh_stars>1000+ # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections PackageInfo = collections.namedtuple( 'PackageInfo', ['package', 'activity', 'cmdline_file', 'devtools_socket']) PACKAGE_INFO = { 'chrome_document': PackageInfo( 'com.google.android.apps.chrome.document', 'com.google.android.apps.chrome.document.ChromeLauncherActivity', 'chrome-command-line', 'chrome_devtools_remote'), 'chrome': PackageInfo( 'com.google.android.apps.chrome', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'chrome_beta': PackageInfo( 'com.chrome.beta', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'chrome_stable': PackageInfo( 'com.android.chrome', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'chrome_dev': PackageInfo( 'com.chrome.dev', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'chrome_canary': PackageInfo( 'com.chrome.canary', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'chromium': PackageInfo( 'org.chromium.chrome', 'com.google.android.apps.chrome.Main', 'chrome-command-line', 'chrome_devtools_remote'), 'content_shell': PackageInfo( 'org.chromium.content_shell_apk', '.ContentShellActivity', 'content-shell-command-line', 'content_shell_devtools_remote'), }
756
480
<filename>lib/python2.7/site-packages/samba/external/testtools/tests/test_matchers.py # Copyright (c) 2008-2011 testtools developers. See LICENSE for details. """Tests for matchers.""" import doctest import re import os import shutil import sys import tarfile import tempfile from testtools import ( Matcher, # check that Matcher is exposed at the top level for docs. skipIf, TestCase, ) from testtools.compat import ( StringIO, str_is_unicode, text_repr, _b, _u, ) from testtools.matchers import ( AfterPreprocessing, AllMatch, Annotate, AnnotatedMismatch, _BinaryMismatch, Contains, DirContains, DirExists, DocTestMatches, DoesNotEndWith, DoesNotStartWith, EndsWith, Equals, FileContains, FileExists, HasPermissions, KeysEqual, Is, IsInstance, LessThan, GreaterThan, MatchesAny, MatchesAll, MatchesException, MatchesListwise, MatchesPredicate, MatchesRegex, MatchesSetwise, MatchesStructure, Mismatch, MismatchDecorator, MismatchError, Not, NotEquals, PathExists, Raises, raises, SamePath, StartsWith, TarballContains, ) from testtools.tests.helpers import FullStackRunTest # Silence pyflakes. Matcher class TestMismatch(TestCase): run_tests_with = FullStackRunTest def test_constructor_arguments(self): mismatch = Mismatch("some description", {'detail': "things"}) self.assertEqual("some description", mismatch.describe()) self.assertEqual({'detail': "things"}, mismatch.get_details()) def test_constructor_no_arguments(self): mismatch = Mismatch() self.assertThat(mismatch.describe, Raises(MatchesException(NotImplementedError))) self.assertEqual({}, mismatch.get_details()) class TestMismatchError(TestCase): def test_is_assertion_error(self): # MismatchError is an AssertionError, so that most of the time, it # looks like a test failure, rather than an error. def raise_mismatch_error(): raise MismatchError(2, Equals(3), Equals(3).match(2)) self.assertRaises(AssertionError, raise_mismatch_error) def test_default_description_is_mismatch(self): mismatch = Equals(3).match(2) e = MismatchError(2, Equals(3), mismatch) self.assertEqual(mismatch.describe(), str(e)) def test_default_description_unicode(self): matchee = _u('\xa7') matcher = Equals(_u('a')) mismatch = matcher.match(matchee) e = MismatchError(matchee, matcher, mismatch) self.assertEqual(mismatch.describe(), str(e)) def test_verbose_description(self): matchee = 2 matcher = Equals(3) mismatch = matcher.match(2) e = MismatchError(matchee, matcher, mismatch, True) expected = ( 'Match failed. Matchee: %r\n' 'Matcher: %s\n' 'Difference: %s\n' % ( matchee, matcher, matcher.match(matchee).describe(), )) self.assertEqual(expected, str(e)) def test_verbose_unicode(self): # When assertThat is given matchees or matchers that contain non-ASCII # unicode strings, we can still provide a meaningful error. matchee = _u('\xa7') matcher = Equals(_u('a')) mismatch = matcher.match(matchee) expected = ( 'Match failed. Matchee: %s\n' 'Matcher: %s\n' 'Difference: %s\n' % ( text_repr(matchee), matcher, mismatch.describe(), )) e = MismatchError(matchee, matcher, mismatch, True) if str_is_unicode: actual = str(e) else: actual = unicode(e) # Using str() should still work, and return ascii only self.assertEqual( expected.replace(matchee, matchee.encode("unicode-escape")), str(e).decode("ascii")) self.assertEqual(expected, actual) class Test_BinaryMismatch(TestCase): """Mismatches from binary comparisons need useful describe output""" _long_string = "This is a longish multiline non-ascii string\n\xa7" _long_b = _b(_long_string) _long_u = _u(_long_string) def test_short_objects(self): o1, o2 = object(), object() mismatch = _BinaryMismatch(o1, "!~", o2) self.assertEqual(mismatch.describe(), "%r !~ %r" % (o1, o2)) def test_short_mixed_strings(self): b, u = _b("\xa7"), _u("\xa7") mismatch = _BinaryMismatch(b, "!~", u) self.assertEqual(mismatch.describe(), "%r !~ %r" % (b, u)) def test_long_bytes(self): one_line_b = self._long_b.replace(_b("\n"), _b(" ")) mismatch = _BinaryMismatch(one_line_b, "!~", self._long_b) self.assertEqual(mismatch.describe(), "%s:\nreference = %s\nactual = %s\n" % ("!~", text_repr(one_line_b), text_repr(self._long_b, multiline=True))) def test_long_unicode(self): one_line_u = self._long_u.replace("\n", " ") mismatch = _BinaryMismatch(one_line_u, "!~", self._long_u) self.assertEqual(mismatch.describe(), "%s:\nreference = %s\nactual = %s\n" % ("!~", text_repr(one_line_u), text_repr(self._long_u, multiline=True))) def test_long_mixed_strings(self): mismatch = _BinaryMismatch(self._long_b, "!~", self._long_u) self.assertEqual(mismatch.describe(), "%s:\nreference = %s\nactual = %s\n" % ("!~", text_repr(self._long_b, multiline=True), text_repr(self._long_u, multiline=True))) def test_long_bytes_and_object(self): obj = object() mismatch = _BinaryMismatch(self._long_b, "!~", obj) self.assertEqual(mismatch.describe(), "%s:\nreference = %s\nactual = %s\n" % ("!~", text_repr(self._long_b, multiline=True), repr(obj))) def test_long_unicode_and_object(self): obj = object() mismatch = _BinaryMismatch(self._long_u, "!~", obj) self.assertEqual(mismatch.describe(), "%s:\nreference = %s\nactual = %s\n" % ("!~", text_repr(self._long_u, multiline=True), repr(obj))) class TestMatchersInterface(object): run_tests_with = FullStackRunTest def test_matches_match(self): matcher = self.matches_matcher matches = self.matches_matches mismatches = self.matches_mismatches for candidate in matches: self.assertEqual(None, matcher.match(candidate)) for candidate in mismatches: mismatch = matcher.match(candidate) self.assertNotEqual(None, mismatch) self.assertNotEqual(None, getattr(mismatch, 'describe', None)) def test__str__(self): # [(expected, object to __str__)]. examples = self.str_examples for expected, matcher in examples: self.assertThat(matcher, DocTestMatches(expected)) def test_describe_difference(self): # [(expected, matchee, matcher), ...] examples = self.describe_examples for difference, matchee, matcher in examples: mismatch = matcher.match(matchee) self.assertEqual(difference, mismatch.describe()) def test_mismatch_details(self): # The mismatch object must provide get_details, which must return a # dictionary mapping names to Content objects. examples = self.describe_examples for difference, matchee, matcher in examples: mismatch = matcher.match(matchee) details = mismatch.get_details() self.assertEqual(dict(details), details) class TestDocTestMatchesInterface(TestCase, TestMatchersInterface): matches_matcher = DocTestMatches("Ran 1 test in ...s", doctest.ELLIPSIS) matches_matches = ["Ran 1 test in 0.000s", "Ran 1 test in 1.234s"] matches_mismatches = ["Ran 1 tests in 0.000s", "Ran 2 test in 0.000s"] str_examples = [("DocTestMatches('Ran 1 test in ...s\\n')", DocTestMatches("Ran 1 test in ...s")), ("DocTestMatches('foo\\n', flags=8)", DocTestMatches("foo", flags=8)), ] describe_examples = [('Expected:\n Ran 1 tests in ...s\nGot:\n' ' Ran 1 test in 0.123s\n', "Ran 1 test in 0.123s", DocTestMatches("Ran 1 tests in ...s", doctest.ELLIPSIS))] class TestDocTestMatchesInterfaceUnicode(TestCase, TestMatchersInterface): matches_matcher = DocTestMatches(_u("\xa7..."), doctest.ELLIPSIS) matches_matches = [_u("\xa7"), _u("\xa7 more\n")] matches_mismatches = ["\\xa7", _u("more \xa7"), _u("\n\xa7")] str_examples = [("DocTestMatches(%r)" % (_u("\xa7\n"),), DocTestMatches(_u("\xa7"))), ] describe_examples = [( _u("Expected:\n \xa7\nGot:\n a\n"), "a", DocTestMatches(_u("\xa7"), doctest.ELLIPSIS))] class TestDocTestMatchesSpecific(TestCase): run_tests_with = FullStackRunTest def test___init__simple(self): matcher = DocTestMatches("foo") self.assertEqual("foo\n", matcher.want) def test___init__flags(self): matcher = DocTestMatches("bar\n", doctest.ELLIPSIS) self.assertEqual("bar\n", matcher.want) self.assertEqual(doctest.ELLIPSIS, matcher.flags) def test_describe_non_ascii_bytes(self): """Even with bytestrings, the mismatch should be coercible to unicode DocTestMatches is intended for text, but the Python 2 str type also permits arbitrary binary inputs. This is a slightly bogus thing to do, and under Python 3 using bytes objects will reasonably raise an error. """ header = _b("\x89PNG\r\n\x1a\n...") if str_is_unicode: self.assertRaises(TypeError, DocTestMatches, header, doctest.ELLIPSIS) return matcher = DocTestMatches(header, doctest.ELLIPSIS) mismatch = matcher.match(_b("GIF89a\1\0\1\0\0\0\0;")) # Must be treatable as unicode text, the exact output matters less self.assertTrue(unicode(mismatch.describe())) class TestEqualsInterface(TestCase, TestMatchersInterface): matches_matcher = Equals(1) matches_matches = [1] matches_mismatches = [2] str_examples = [("Equals(1)", Equals(1)), ("Equals('1')", Equals('1'))] describe_examples = [("1 != 2", 2, Equals(1))] class TestNotEqualsInterface(TestCase, TestMatchersInterface): matches_matcher = NotEquals(1) matches_matches = [2] matches_mismatches = [1] str_examples = [ ("NotEquals(1)", NotEquals(1)), ("NotEquals('1')", NotEquals('1'))] describe_examples = [("1 == 1", 1, NotEquals(1))] class TestIsInterface(TestCase, TestMatchersInterface): foo = object() bar = object() matches_matcher = Is(foo) matches_matches = [foo] matches_mismatches = [bar, 1] str_examples = [("Is(2)", Is(2))] describe_examples = [("1 is not 2", 2, Is(1))] class TestIsInstanceInterface(TestCase, TestMatchersInterface): class Foo:pass matches_matcher = IsInstance(Foo) matches_matches = [Foo()] matches_mismatches = [object(), 1, Foo] str_examples = [ ("IsInstance(str)", IsInstance(str)), ("IsInstance(str, int)", IsInstance(str, int)), ] describe_examples = [ ("'foo' is not an instance of int", 'foo', IsInstance(int)), ("'foo' is not an instance of any of (int, type)", 'foo', IsInstance(int, type)), ] class TestLessThanInterface(TestCase, TestMatchersInterface): matches_matcher = LessThan(4) matches_matches = [-5, 3] matches_mismatches = [4, 5, 5000] str_examples = [ ("LessThan(12)", LessThan(12)), ] describe_examples = [ ('4 is not > 5', 5, LessThan(4)), ('4 is not > 4', 4, LessThan(4)), ] class TestGreaterThanInterface(TestCase, TestMatchersInterface): matches_matcher = GreaterThan(4) matches_matches = [5, 8] matches_mismatches = [-2, 0, 4] str_examples = [ ("GreaterThan(12)", GreaterThan(12)), ] describe_examples = [ ('5 is not < 4', 4, GreaterThan(5)), ('4 is not < 4', 4, GreaterThan(4)), ] class TestContainsInterface(TestCase, TestMatchersInterface): matches_matcher = Contains('foo') matches_matches = ['foo', 'afoo', 'fooa'] matches_mismatches = ['f', 'fo', 'oo', 'faoo', 'foao'] str_examples = [ ("Contains(1)", Contains(1)), ("Contains('foo')", Contains('foo')), ] describe_examples = [("1 not in 2", 2, Contains(1))] def make_error(type, *args, **kwargs): try: raise type(*args, **kwargs) except type: return sys.exc_info() class TestMatchesExceptionInstanceInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesException(ValueError("foo")) error_foo = make_error(ValueError, 'foo') error_bar = make_error(ValueError, 'bar') error_base_foo = make_error(Exception, 'foo') matches_matches = [error_foo] matches_mismatches = [error_bar, error_base_foo] str_examples = [ ("MatchesException(Exception('foo',))", MatchesException(Exception('foo'))) ] describe_examples = [ ("%r is not a %r" % (Exception, ValueError), error_base_foo, MatchesException(ValueError("foo"))), ("ValueError('bar',) has different arguments to ValueError('foo',).", error_bar, MatchesException(ValueError("foo"))), ] class TestMatchesExceptionTypeInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesException(ValueError) error_foo = make_error(ValueError, 'foo') error_sub = make_error(UnicodeError, 'bar') error_base_foo = make_error(Exception, 'foo') matches_matches = [error_foo, error_sub] matches_mismatches = [error_base_foo] str_examples = [ ("MatchesException(%r)" % Exception, MatchesException(Exception)) ] describe_examples = [ ("%r is not a %r" % (Exception, ValueError), error_base_foo, MatchesException(ValueError)), ] class TestMatchesExceptionTypeReInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesException(ValueError, 'fo.') error_foo = make_error(ValueError, 'foo') error_sub = make_error(UnicodeError, 'foo') error_bar = make_error(ValueError, 'bar') matches_matches = [error_foo, error_sub] matches_mismatches = [error_bar] str_examples = [ ("MatchesException(%r)" % Exception, MatchesException(Exception, 'fo.')) ] describe_examples = [ ("'bar' does not match /fo./", error_bar, MatchesException(ValueError, "fo.")), ] class TestMatchesExceptionTypeMatcherInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesException( ValueError, AfterPreprocessing(str, Equals('foo'))) error_foo = make_error(ValueError, 'foo') error_sub = make_error(UnicodeError, 'foo') error_bar = make_error(ValueError, 'bar') matches_matches = [error_foo, error_sub] matches_mismatches = [error_bar] str_examples = [ ("MatchesException(%r)" % Exception, MatchesException(Exception, Equals('foo'))) ] describe_examples = [ ("5 != %r" % (error_bar[1],), error_bar, MatchesException(ValueError, Equals(5))), ] class TestNotInterface(TestCase, TestMatchersInterface): matches_matcher = Not(Equals(1)) matches_matches = [2] matches_mismatches = [1] str_examples = [ ("Not(Equals(1))", Not(Equals(1))), ("Not(Equals('1'))", Not(Equals('1')))] describe_examples = [('1 matches Equals(1)', 1, Not(Equals(1)))] class TestMatchersAnyInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesAny(DocTestMatches("1"), DocTestMatches("2")) matches_matches = ["1", "2"] matches_mismatches = ["3"] str_examples = [( "MatchesAny(DocTestMatches('1\\n'), DocTestMatches('2\\n'))", MatchesAny(DocTestMatches("1"), DocTestMatches("2"))), ] describe_examples = [("""Differences: [ Expected: 1 Got: 3 Expected: 2 Got: 3 ]""", "3", MatchesAny(DocTestMatches("1"), DocTestMatches("2")))] class TestMatchesAllInterface(TestCase, TestMatchersInterface): matches_matcher = MatchesAll(NotEquals(1), NotEquals(2)) matches_matches = [3, 4] matches_mismatches = [1, 2] str_examples = [ ("MatchesAll(NotEquals(1), NotEquals(2))", MatchesAll(NotEquals(1), NotEquals(2)))] describe_examples = [ ("""Differences: [ 1 == 1 ]""", 1, MatchesAll(NotEquals(1), NotEquals(2))), ("1 == 1", 1, MatchesAll(NotEquals(2), NotEquals(1), Equals(3), first_only=True)), ] class TestKeysEqual(TestCase, TestMatchersInterface): matches_matcher = KeysEqual('foo', 'bar') matches_matches = [ {'foo': 0, 'bar': 1}, ] matches_mismatches = [ {}, {'foo': 0}, {'bar': 1}, {'foo': 0, 'bar': 1, 'baz': 2}, {'a': None, 'b': None, 'c': None}, ] str_examples = [ ("KeysEqual('foo', 'bar')", KeysEqual('foo', 'bar')), ] describe_examples = [ ("['bar', 'foo'] does not match {'baz': 2, 'foo': 0, 'bar': 1}: " "Keys not equal", {'foo': 0, 'bar': 1, 'baz': 2}, KeysEqual('foo', 'bar')), ] class TestAnnotate(TestCase, TestMatchersInterface): matches_matcher = Annotate("foo", Equals(1)) matches_matches = [1] matches_mismatches = [2] str_examples = [ ("Annotate('foo', Equals(1))", Annotate("foo", Equals(1)))] describe_examples = [("1 != 2: foo", 2, Annotate('foo', Equals(1)))] def test_if_message_no_message(self): # Annotate.if_message returns the given matcher if there is no # message. matcher = Equals(1) not_annotated = Annotate.if_message('', matcher) self.assertIs(matcher, not_annotated) def test_if_message_given_message(self): # Annotate.if_message returns an annotated version of the matcher if a # message is provided. matcher = Equals(1) expected = Annotate('foo', matcher) annotated = Annotate.if_message('foo', matcher) self.assertThat( annotated, MatchesStructure.fromExample(expected, 'annotation', 'matcher')) class TestAnnotatedMismatch(TestCase): run_tests_with = FullStackRunTest def test_forwards_details(self): x = Mismatch('description', {'foo': 'bar'}) annotated = AnnotatedMismatch("annotation", x) self.assertEqual(x.get_details(), annotated.get_details()) class TestRaisesInterface(TestCase, TestMatchersInterface): matches_matcher = Raises() def boom(): raise Exception('foo') matches_matches = [boom] matches_mismatches = [lambda:None] # Tricky to get function objects to render constantly, and the interfaces # helper uses assertEqual rather than (for instance) DocTestMatches. str_examples = [] describe_examples = [] class TestRaisesExceptionMatcherInterface(TestCase, TestMatchersInterface): matches_matcher = Raises( exception_matcher=MatchesException(Exception('foo'))) def boom_bar(): raise Exception('bar') def boom_foo(): raise Exception('foo') matches_matches = [boom_foo] matches_mismatches = [lambda:None, boom_bar] # Tricky to get function objects to render constantly, and the interfaces # helper uses assertEqual rather than (for instance) DocTestMatches. str_examples = [] describe_examples = [] class TestRaisesBaseTypes(TestCase): run_tests_with = FullStackRunTest def raiser(self): raise KeyboardInterrupt('foo') def test_KeyboardInterrupt_matched(self): # When KeyboardInterrupt is matched, it is swallowed. matcher = Raises(MatchesException(KeyboardInterrupt)) self.assertThat(self.raiser, matcher) def test_KeyboardInterrupt_propogates(self): # The default 'it raised' propogates KeyboardInterrupt. match_keyb = Raises(MatchesException(KeyboardInterrupt)) def raise_keyb_from_match(): matcher = Raises() matcher.match(self.raiser) self.assertThat(raise_keyb_from_match, match_keyb) def test_KeyboardInterrupt_match_Exception_propogates(self): # If the raised exception isn't matched, and it is not a subclass of # Exception, it is propogated. match_keyb = Raises(MatchesException(KeyboardInterrupt)) def raise_keyb_from_match(): if sys.version_info > (2, 5): matcher = Raises(MatchesException(Exception)) else: # On Python 2.4 KeyboardInterrupt is a StandardError subclass # but should propogate from less generic exception matchers matcher = Raises(MatchesException(EnvironmentError)) matcher.match(self.raiser) self.assertThat(raise_keyb_from_match, match_keyb) class TestRaisesConvenience(TestCase): run_tests_with = FullStackRunTest def test_exc_type(self): self.assertThat(lambda: 1/0, raises(ZeroDivisionError)) def test_exc_value(self): e = RuntimeError("You lose!") def raiser(): raise e self.assertThat(raiser, raises(e)) class DoesNotStartWithTests(TestCase): run_tests_with = FullStackRunTest def test_describe(self): mismatch = DoesNotStartWith("fo", "bo") self.assertEqual("'fo' does not start with 'bo'.", mismatch.describe()) def test_describe_non_ascii_unicode(self): string = _u("A\xA7") suffix = _u("B\xA7") mismatch = DoesNotStartWith(string, suffix) self.assertEqual("%s does not start with %s." % ( text_repr(string), text_repr(suffix)), mismatch.describe()) def test_describe_non_ascii_bytes(self): string = _b("A\xA7") suffix = _b("B\xA7") mismatch = DoesNotStartWith(string, suffix) self.assertEqual("%r does not start with %r." % (string, suffix), mismatch.describe()) class StartsWithTests(TestCase): run_tests_with = FullStackRunTest def test_str(self): matcher = StartsWith("bar") self.assertEqual("StartsWith('bar')", str(matcher)) def test_str_with_bytes(self): b = _b("\xA7") matcher = StartsWith(b) self.assertEqual("StartsWith(%r)" % (b,), str(matcher)) def test_str_with_unicode(self): u = _u("\xA7") matcher = StartsWith(u) self.assertEqual("StartsWith(%r)" % (u,), str(matcher)) def test_match(self): matcher = StartsWith("bar") self.assertIs(None, matcher.match("barf")) def test_mismatch_returns_does_not_start_with(self): matcher = StartsWith("bar") self.assertIsInstance(matcher.match("foo"), DoesNotStartWith) def test_mismatch_sets_matchee(self): matcher = StartsWith("bar") mismatch = matcher.match("foo") self.assertEqual("foo", mismatch.matchee) def test_mismatch_sets_expected(self): matcher = StartsWith("bar") mismatch = matcher.match("foo") self.assertEqual("bar", mismatch.expected) class DoesNotEndWithTests(TestCase): run_tests_with = FullStackRunTest def test_describe(self): mismatch = DoesNotEndWith("fo", "bo") self.assertEqual("'fo' does not end with 'bo'.", mismatch.describe()) def test_describe_non_ascii_unicode(self): string = _u("A\xA7") suffix = _u("B\xA7") mismatch = DoesNotEndWith(string, suffix) self.assertEqual("%s does not end with %s." % ( text_repr(string), text_repr(suffix)), mismatch.describe()) def test_describe_non_ascii_bytes(self): string = _b("A\xA7") suffix = _b("B\xA7") mismatch = DoesNotEndWith(string, suffix) self.assertEqual("%r does not end with %r." % (string, suffix), mismatch.describe()) class EndsWithTests(TestCase): run_tests_with = FullStackRunTest def test_str(self): matcher = EndsWith("bar") self.assertEqual("EndsWith('bar')", str(matcher)) def test_str_with_bytes(self): b = _b("\xA7") matcher = EndsWith(b) self.assertEqual("EndsWith(%r)" % (b,), str(matcher)) def test_str_with_unicode(self): u = _u("\xA7") matcher = EndsWith(u) self.assertEqual("EndsWith(%r)" % (u,), str(matcher)) def test_match(self): matcher = EndsWith("arf") self.assertIs(None, matcher.match("barf")) def test_mismatch_returns_does_not_end_with(self): matcher = EndsWith("bar") self.assertIsInstance(matcher.match("foo"), DoesNotEndWith) def test_mismatch_sets_matchee(self): matcher = EndsWith("bar") mismatch = matcher.match("foo") self.assertEqual("foo", mismatch.matchee) def test_mismatch_sets_expected(self): matcher = EndsWith("bar") mismatch = matcher.match("foo") self.assertEqual("bar", mismatch.expected) def run_doctest(obj, name): p = doctest.DocTestParser() t = p.get_doctest( obj.__doc__, sys.modules[obj.__module__].__dict__, name, '', 0) r = doctest.DocTestRunner() output = StringIO() r.run(t, out=output.write) return r.failures, output.getvalue() class TestMatchesListwise(TestCase): run_tests_with = FullStackRunTest def test_docstring(self): failure_count, output = run_doctest( MatchesListwise, "MatchesListwise") if failure_count: self.fail("Doctest failed with %s" % output) class TestMatchesStructure(TestCase, TestMatchersInterface): class SimpleClass: def __init__(self, x, y): self.x = x self.y = y matches_matcher = MatchesStructure(x=Equals(1), y=Equals(2)) matches_matches = [SimpleClass(1, 2)] matches_mismatches = [ SimpleClass(2, 2), SimpleClass(1, 1), SimpleClass(3, 3), ] str_examples = [ ("MatchesStructure(x=Equals(1))", MatchesStructure(x=Equals(1))), ("MatchesStructure(y=Equals(2))", MatchesStructure(y=Equals(2))), ("MatchesStructure(x=Equals(1), y=Equals(2))", MatchesStructure(x=Equals(1), y=Equals(2))), ] describe_examples = [ ("""\ Differences: [ 3 != 1: x ]""", SimpleClass(1, 2), MatchesStructure(x=Equals(3), y=Equals(2))), ("""\ Differences: [ 3 != 2: y ]""", SimpleClass(1, 2), MatchesStructure(x=Equals(1), y=Equals(3))), ("""\ Differences: [ 0 != 1: x 0 != 2: y ]""", SimpleClass(1, 2), MatchesStructure(x=Equals(0), y=Equals(0))), ] def test_fromExample(self): self.assertThat( self.SimpleClass(1, 2), MatchesStructure.fromExample(self.SimpleClass(1, 3), 'x')) def test_byEquality(self): self.assertThat( self.SimpleClass(1, 2), MatchesStructure.byEquality(x=1)) def test_withStructure(self): self.assertThat( self.SimpleClass(1, 2), MatchesStructure.byMatcher(LessThan, x=2)) def test_update(self): self.assertThat( self.SimpleClass(1, 2), MatchesStructure(x=NotEquals(1)).update(x=Equals(1))) def test_update_none(self): self.assertThat( self.SimpleClass(1, 2), MatchesStructure(x=Equals(1), z=NotEquals(42)).update( z=None)) class TestMatchesRegex(TestCase, TestMatchersInterface): matches_matcher = MatchesRegex('a|b') matches_matches = ['a', 'b'] matches_mismatches = ['c'] str_examples = [ ("MatchesRegex('a|b')", MatchesRegex('a|b')), ("MatchesRegex('a|b', re.M)", MatchesRegex('a|b', re.M)), ("MatchesRegex('a|b', re.I|re.M)", MatchesRegex('a|b', re.I|re.M)), ("MatchesRegex(%r)" % (_b("\xA7"),), MatchesRegex(_b("\xA7"))), ("MatchesRegex(%r)" % (_u("\xA7"),), MatchesRegex(_u("\xA7"))), ] describe_examples = [ ("'c' does not match /a|b/", 'c', MatchesRegex('a|b')), ("'c' does not match /a\d/", 'c', MatchesRegex(r'a\d')), ("%r does not match /\\s+\\xa7/" % (_b('c'),), _b('c'), MatchesRegex(_b("\\s+\xA7"))), ("%r does not match /\\s+\\xa7/" % (_u('c'),), _u('c'), MatchesRegex(_u("\\s+\xA7"))), ] class TestMatchesSetwise(TestCase): run_tests_with = FullStackRunTest def assertMismatchWithDescriptionMatching(self, value, matcher, description_matcher): mismatch = matcher.match(value) if mismatch is None: self.fail("%s matched %s" % (matcher, value)) actual_description = mismatch.describe() self.assertThat( actual_description, Annotate( "%s matching %s" % (matcher, value), description_matcher)) def test_matches(self): self.assertIs( None, MatchesSetwise(Equals(1), Equals(2)).match([2, 1])) def test_mismatches(self): self.assertMismatchWithDescriptionMatching( [2, 3], MatchesSetwise(Equals(1), Equals(2)), MatchesRegex('.*There was 1 mismatch$', re.S)) def test_too_many_matchers(self): self.assertMismatchWithDescriptionMatching( [2, 3], MatchesSetwise(Equals(1), Equals(2), Equals(3)), Equals('There was 1 matcher left over: Equals(1)')) def test_too_many_values(self): self.assertMismatchWithDescriptionMatching( [1, 2, 3], MatchesSetwise(Equals(1), Equals(2)), Equals('There was 1 value left over: [3]')) def test_two_too_many_matchers(self): self.assertMismatchWithDescriptionMatching( [3], MatchesSetwise(Equals(1), Equals(2), Equals(3)), MatchesRegex( 'There were 2 matchers left over: Equals\([12]\), ' 'Equals\([12]\)')) def test_two_too_many_values(self): self.assertMismatchWithDescriptionMatching( [1, 2, 3, 4], MatchesSetwise(Equals(1), Equals(2)), MatchesRegex( 'There were 2 values left over: \[[34], [34]\]')) def test_mismatch_and_too_many_matchers(self): self.assertMismatchWithDescriptionMatching( [2, 3], MatchesSetwise(Equals(0), Equals(1), Equals(2)), MatchesRegex( '.*There was 1 mismatch and 1 extra matcher: Equals\([01]\)', re.S)) def test_mismatch_and_too_many_values(self): self.assertMismatchWithDescriptionMatching( [2, 3, 4], MatchesSetwise(Equals(1), Equals(2)), MatchesRegex( '.*There was 1 mismatch and 1 extra value: \[[34]\]', re.S)) def test_mismatch_and_two_too_many_matchers(self): self.assertMismatchWithDescriptionMatching( [3, 4], MatchesSetwise( Equals(0), Equals(1), Equals(2), Equals(3)), MatchesRegex( '.*There was 1 mismatch and 2 extra matchers: ' 'Equals\([012]\), Equals\([012]\)', re.S)) def test_mismatch_and_two_too_many_values(self): self.assertMismatchWithDescriptionMatching( [2, 3, 4, 5], MatchesSetwise(Equals(1), Equals(2)), MatchesRegex( '.*There was 1 mismatch and 2 extra values: \[[145], [145]\]', re.S)) class TestAfterPreprocessing(TestCase, TestMatchersInterface): def parity(x): return x % 2 matches_matcher = AfterPreprocessing(parity, Equals(1)) matches_matches = [3, 5] matches_mismatches = [2] str_examples = [ ("AfterPreprocessing(<function parity>, Equals(1))", AfterPreprocessing(parity, Equals(1))), ] describe_examples = [ ("1 != 0: after <function parity> on 2", 2, AfterPreprocessing(parity, Equals(1))), ("1 != 0", 2, AfterPreprocessing(parity, Equals(1), annotate=False)), ] class TestMismatchDecorator(TestCase): run_tests_with = FullStackRunTest def test_forwards_description(self): x = Mismatch("description", {'foo': 'bar'}) decorated = MismatchDecorator(x) self.assertEqual(x.describe(), decorated.describe()) def test_forwards_details(self): x = Mismatch("description", {'foo': 'bar'}) decorated = MismatchDecorator(x) self.assertEqual(x.get_details(), decorated.get_details()) def test_repr(self): x = Mismatch("description", {'foo': 'bar'}) decorated = MismatchDecorator(x) self.assertEqual( '<testtools.matchers.MismatchDecorator(%r)>' % (x,), repr(decorated)) class TestAllMatch(TestCase, TestMatchersInterface): matches_matcher = AllMatch(LessThan(10)) matches_matches = [ [9, 9, 9], (9, 9), iter([9, 9, 9, 9, 9]), ] matches_mismatches = [ [11, 9, 9], iter([9, 12, 9, 11]), ] str_examples = [ ("AllMatch(LessThan(12))", AllMatch(LessThan(12))), ] describe_examples = [ ('Differences: [\n' '10 is not > 11\n' '10 is not > 10\n' ']', [11, 9, 10], AllMatch(LessThan(10))), ] class PathHelpers(object): def mkdtemp(self): directory = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, directory) return directory def create_file(self, filename, contents=''): fp = open(filename, 'w') try: fp.write(contents) finally: fp.close() def touch(self, filename): return self.create_file(filename) class TestPathExists(TestCase, PathHelpers): def test_exists(self): tempdir = self.mkdtemp() self.assertThat(tempdir, PathExists()) def test_not_exists(self): doesntexist = os.path.join(self.mkdtemp(), 'doesntexist') mismatch = PathExists().match(doesntexist) self.assertThat( "%s does not exist." % doesntexist, Equals(mismatch.describe())) class TestDirExists(TestCase, PathHelpers): def test_exists(self): tempdir = self.mkdtemp() self.assertThat(tempdir, DirExists()) def test_not_exists(self): doesntexist = os.path.join(self.mkdtemp(), 'doesntexist') mismatch = DirExists().match(doesntexist) self.assertThat( PathExists().match(doesntexist).describe(), Equals(mismatch.describe())) def test_not_a_directory(self): filename = os.path.join(self.mkdtemp(), 'foo') self.touch(filename) mismatch = DirExists().match(filename) self.assertThat( "%s is not a directory." % filename, Equals(mismatch.describe())) class TestFileExists(TestCase, PathHelpers): def test_exists(self): tempdir = self.mkdtemp() filename = os.path.join(tempdir, 'filename') self.touch(filename) self.assertThat(filename, FileExists()) def test_not_exists(self): doesntexist = os.path.join(self.mkdtemp(), 'doesntexist') mismatch = FileExists().match(doesntexist) self.assertThat( PathExists().match(doesntexist).describe(), Equals(mismatch.describe())) def test_not_a_file(self): tempdir = self.mkdtemp() mismatch = FileExists().match(tempdir) self.assertThat( "%s is not a file." % tempdir, Equals(mismatch.describe())) class TestDirContains(TestCase, PathHelpers): def test_empty(self): tempdir = self.mkdtemp() self.assertThat(tempdir, DirContains([])) def test_not_exists(self): doesntexist = os.path.join(self.mkdtemp(), 'doesntexist') mismatch = DirContains([]).match(doesntexist) self.assertThat( PathExists().match(doesntexist).describe(), Equals(mismatch.describe())) def test_contains_files(self): tempdir = self.mkdtemp() self.touch(os.path.join(tempdir, 'foo')) self.touch(os.path.join(tempdir, 'bar')) self.assertThat(tempdir, DirContains(['bar', 'foo'])) def test_matcher(self): tempdir = self.mkdtemp() self.touch(os.path.join(tempdir, 'foo')) self.touch(os.path.join(tempdir, 'bar')) self.assertThat(tempdir, DirContains(matcher=Contains('bar'))) def test_neither_specified(self): self.assertRaises(AssertionError, DirContains) def test_both_specified(self): self.assertRaises( AssertionError, DirContains, filenames=[], matcher=Contains('a')) def test_does_not_contain_files(self): tempdir = self.mkdtemp() self.touch(os.path.join(tempdir, 'foo')) mismatch = DirContains(['bar', 'foo']).match(tempdir) self.assertThat( Equals(['bar', 'foo']).match(['foo']).describe(), Equals(mismatch.describe())) class TestFileContains(TestCase, PathHelpers): def test_not_exists(self): doesntexist = os.path.join(self.mkdtemp(), 'doesntexist') mismatch = FileContains('').match(doesntexist) self.assertThat( PathExists().match(doesntexist).describe(), Equals(mismatch.describe())) def test_contains(self): tempdir = self.mkdtemp() filename = os.path.join(tempdir, 'foo') self.create_file(filename, 'Hello World!') self.assertThat(filename, FileContains('Hello World!')) def test_matcher(self): tempdir = self.mkdtemp() filename = os.path.join(tempdir, 'foo') self.create_file(filename, 'Hello World!') self.assertThat( filename, FileContains(matcher=DocTestMatches('Hello World!'))) def test_neither_specified(self): self.assertRaises(AssertionError, FileContains) def test_both_specified(self): self.assertRaises( AssertionError, FileContains, contents=[], matcher=Contains('a')) def test_does_not_contain(self): tempdir = self.mkdtemp() filename = os.path.join(tempdir, 'foo') self.create_file(filename, 'Goodbye Cruel World!') mismatch = FileContains('Hello World!').match(filename) self.assertThat( Equals('Hello World!').match('Goodbye Cruel World!').describe(), Equals(mismatch.describe())) def is_even(x): return x % 2 == 0 class TestMatchesPredicate(TestCase, TestMatchersInterface): matches_matcher = MatchesPredicate(is_even, "%s is not even") matches_matches = [2, 4, 6, 8] matches_mismatches = [3, 5, 7, 9] str_examples = [ ("MatchesPredicate(%r, %r)" % (is_even, "%s is not even"), MatchesPredicate(is_even, "%s is not even")), ] describe_examples = [ ('7 is not even', 7, MatchesPredicate(is_even, "%s is not even")), ] class TestTarballContains(TestCase, PathHelpers): def test_match(self): tempdir = self.mkdtemp() in_temp_dir = lambda x: os.path.join(tempdir, x) self.touch(in_temp_dir('a')) self.touch(in_temp_dir('b')) tarball = tarfile.open(in_temp_dir('foo.tar.gz'), 'w') tarball.add(in_temp_dir('a'), 'a') tarball.add(in_temp_dir('b'), 'b') tarball.close() self.assertThat( in_temp_dir('foo.tar.gz'), TarballContains(['b', 'a'])) def test_mismatch(self): tempdir = self.mkdtemp() in_temp_dir = lambda x: os.path.join(tempdir, x) self.touch(in_temp_dir('a')) self.touch(in_temp_dir('b')) tarball = tarfile.open(in_temp_dir('foo.tar.gz'), 'w') tarball.add(in_temp_dir('a'), 'a') tarball.add(in_temp_dir('b'), 'b') tarball.close() mismatch = TarballContains(['d', 'c']).match(in_temp_dir('foo.tar.gz')) self.assertEqual( mismatch.describe(), Equals(['c', 'd']).match(['a', 'b']).describe()) class TestSamePath(TestCase, PathHelpers): def test_same_string(self): self.assertThat('foo', SamePath('foo')) def test_relative_and_absolute(self): path = 'foo' abspath = os.path.abspath(path) self.assertThat(path, SamePath(abspath)) self.assertThat(abspath, SamePath(path)) def test_real_path(self): symlink = getattr(os, 'symlink', None) skipIf(symlink is None, "No symlink support") tempdir = self.mkdtemp() source = os.path.join(tempdir, 'source') self.touch(source) target = os.path.join(tempdir, 'target') symlink(source, target) self.assertThat(source, SamePath(target)) self.assertThat(target, SamePath(source)) class TestHasPermissions(TestCase, PathHelpers): def test_match(self): tempdir = self.mkdtemp() filename = os.path.join(tempdir, 'filename') self.touch(filename) permissions = oct(os.stat(filename).st_mode)[-4:] self.assertThat(filename, HasPermissions(permissions)) def test_suite(): from unittest import TestLoader return TestLoader().loadTestsFromName(__name__)
18,849
528
/****************************************************************************** Copyright 2019-2020 <NAME> Licensed under the Apache License, Version 2.0 (the "License"), you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ******************************************************************************* FILE: Methane/Graphics/Vulkan/ShaderVK.cpp Vulkan implementation of the shader interface. ******************************************************************************/ #include "ShaderVK.h" #include "ProgramVK.h" #include "ContextVK.h" #include "DeviceVK.h" #include <Methane/Graphics/ContextBase.h> #include <Methane/Instrumentation.h> #include <spirv_cross.hpp> #include <spirv_hlsl.hpp> namespace Methane::Graphics { static vk::ShaderStageFlagBits ConvertShaderTypeToStageFlagBits(Shader::Type shader_type) { META_FUNCTION_TASK(); switch(shader_type) { case Shader::Type::All: return vk::ShaderStageFlagBits::eAll; case Shader::Type::Vertex: return vk::ShaderStageFlagBits::eVertex; case Shader::Type::Pixel: return vk::ShaderStageFlagBits::eFragment; default: META_UNEXPECTED_ARG_RETURN(shader_type, vk::ShaderStageFlagBits::eAll); } } static vk::VertexInputRate ConvertInputBufferLayoutStepTypeToVertexInputRate(Program::InputBufferLayout::StepType step_type) { META_FUNCTION_TASK(); using StepType = Program::InputBufferLayout::StepType; switch(step_type) { case StepType::PerVertex: return vk::VertexInputRate::eVertex; case StepType::PerInstance: return vk::VertexInputRate::eInstance; default: META_UNEXPECTED_ARG_RETURN(step_type, vk::VertexInputRate::eVertex); } } static vk::Format GetFloatVectorFormat(uint32_t vector_size) { META_FUNCTION_TASK(); switch (vector_size) { case 1: return vk::Format::eR32Sfloat; case 2: return vk::Format::eR32G32Sfloat; case 3: return vk::Format::eR32G32B32Sfloat; case 4: return vk::Format::eR32G32B32A32Sfloat; default: META_UNEXPECTED_ARG_RETURN(vector_size, vk::Format::eUndefined); } } static vk::Format GetSignedIntegerVectorFormat(uint32_t vector_size) { META_FUNCTION_TASK(); switch (vector_size) { case 1: return vk::Format::eR32Sint; case 2: return vk::Format::eR32G32Sint; case 3: return vk::Format::eR32G32B32Sint; case 4: return vk::Format::eR32G32B32A32Sint; default: META_UNEXPECTED_ARG_RETURN(vector_size, vk::Format::eUndefined); } } static vk::Format GetUnsignedIntegerVectorFormat(uint32_t vector_size) { META_FUNCTION_TASK(); switch (vector_size) { case 1: return vk::Format::eR32Uint; case 2: return vk::Format::eR32G32Uint; case 3: return vk::Format::eR32G32B32Uint; case 4: return vk::Format::eR32G32B32A32Uint; default: META_UNEXPECTED_ARG_RETURN(vector_size, vk::Format::eUndefined); } } static vk::Format GetVertexAttributeFormatFromSpirvType(const spirv_cross::SPIRType& attribute_type) { META_FUNCTION_TASK(); switch(attribute_type.basetype) { case spirv_cross::SPIRType::Float: return GetFloatVectorFormat(attribute_type.vecsize); case spirv_cross::SPIRType::UInt: return GetSignedIntegerVectorFormat(attribute_type.vecsize); case spirv_cross::SPIRType::Int: return GetUnsignedIntegerVectorFormat(attribute_type.vecsize); default: META_UNEXPECTED_ARG_RETURN(attribute_type.basetype, vk::Format::eUndefined); } } Ptr<Shader> Shader::Create(Shader::Type shader_type, const Context& context, const Settings& settings) { META_FUNCTION_TASK(); return std::make_shared<ShaderVK>(shader_type, dynamic_cast<const ContextBase&>(context), settings); } ShaderVK::ShaderVK(Shader::Type shader_type, const ContextBase& context, const Settings& settings) : ShaderBase(shader_type, context, settings) , m_byte_code_chunk_ptr(std::make_unique<Data::Chunk>(settings.data_provider.GetData(fmt::format("{}.spirv", GetCompiledEntryFunctionName(settings))))) , m_vk_unique_module(GetContextVK().GetDeviceVK().GetNativeDevice().createShaderModuleUnique( vk::ShaderModuleCreateInfo(vk::ShaderModuleCreateFlags{}, m_byte_code_chunk_ptr->GetDataSize(), m_byte_code_chunk_ptr->GetDataPtr<uint32_t>()))) { META_FUNCTION_TASK(); } ShaderBase::ArgumentBindings ShaderVK::GetArgumentBindings(const Program::ArgumentAccessors&) const { META_FUNCTION_TASK(); ArgumentBindings argument_bindings; return argument_bindings; } const spirv_cross::Compiler& ShaderVK::GetNativeCompiler() const { META_FUNCTION_TASK(); if (m_spirv_compiler_ptr) return *m_spirv_compiler_ptr; META_CHECK_ARG_NOT_NULL(m_byte_code_chunk_ptr); m_spirv_compiler_ptr = std::make_unique<spirv_cross::Compiler>(m_byte_code_chunk_ptr->GetDataPtr<uint32_t>(), m_byte_code_chunk_ptr->GetDataSize<uint32_t>()); return *m_spirv_compiler_ptr; } vk::PipelineShaderStageCreateInfo ShaderVK::GetNativeStageCreateInfo() const { META_FUNCTION_TASK(); return vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags{}, ConvertShaderTypeToStageFlagBits(GetType()), GetNativeModule(), GetSettings().entry_function.function_name.c_str() ); } vk::PipelineVertexInputStateCreateInfo ShaderVK::GetNativeVertexInputStateCreateInfo(const ProgramVK& program) { META_FUNCTION_TASK(); META_CHECK_ARG_EQUAL(GetType(), Shader::Type::Vertex); if (!m_vertex_input_initialized) InitializeVertexInputDescriptions(program); return vk::PipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags{}, m_vertex_input_binding_descriptions, m_vertex_input_attribute_descriptions ); } void ShaderVK::InitializeVertexInputDescriptions(const ProgramVK& program) { META_FUNCTION_TASK(); META_CHECK_ARG_EQUAL(GetType(), Shader::Type::Vertex); META_CHECK_ARG_FALSE_DESCR(m_vertex_input_initialized, "vertex input descriptions are already initialized"); const ProgramBase::InputBufferLayouts& input_buffer_layouts = program.GetSettings().input_buffer_layouts; m_vertex_input_binding_descriptions.reserve(input_buffer_layouts.size()); uint32_t input_buffer_index = 0U; for(const Program::InputBufferLayout& input_buffer_layout : input_buffer_layouts) { m_vertex_input_binding_descriptions.emplace_back( input_buffer_index, 0U, // stride is auto calculated by vertex attributes ConvertInputBufferLayoutStepTypeToVertexInputRate(input_buffer_layout.step_type) ); input_buffer_index++; } const spirv_cross::Compiler& spirv_compiler = GetNativeCompiler(); const spirv_cross::ShaderResources shader_resources = spirv_compiler.get_shader_resources(); m_vertex_input_attribute_descriptions.reserve(shader_resources.stage_inputs.size()); for(const spirv_cross::Resource& input_resource : shader_resources.stage_inputs) { const bool has_semantic = spirv_compiler.has_decoration(input_resource.id, spv::DecorationHlslSemanticGOOGLE); const bool has_location = spirv_compiler.has_decoration(input_resource.id, spv::DecorationLocation); META_CHECK_ARG_TRUE(has_semantic && has_location); const std::string& semantic_name = spirv_compiler.get_decoration_string(input_resource.id, spv::DecorationHlslSemanticGOOGLE); const spirv_cross::SPIRType& attribute_type = spirv_compiler.get_type(input_resource.base_type_id); const uint32_t buffer_index = GetProgramInputBufferIndexByArgumentSemantic(program, semantic_name); META_CHECK_ARG_LESS(buffer_index, m_vertex_input_binding_descriptions.size()); vk::VertexInputBindingDescription& input_binding_desc = m_vertex_input_binding_descriptions[buffer_index]; m_vertex_input_attribute_descriptions.emplace_back( spirv_compiler.get_decoration(input_resource.id, spv::DecorationLocation), buffer_index, GetVertexAttributeFormatFromSpirvType(attribute_type), input_binding_desc.stride ); // Tight packing of attributes in vertex buffer is assumed input_binding_desc.stride += attribute_type.vecsize * 4; } m_vertex_input_initialized = true; } const IContextVK& ShaderVK::GetContextVK() const noexcept { META_FUNCTION_TASK(); return static_cast<const IContextVK&>(GetContext()); } } // namespace Methane::Graphics
3,432
325
<gh_stars>100-1000 #include "il2c_private.h" ///////////////////////////////////////////////////////////// // System.ValueType System_String* System_ValueType_ToString(System_ValueType* this__) { return System_Object_ToString((System_Object*)this__); } int32_t System_ValueType_GetHashCode(System_ValueType* this__) { // TODO: //il2c_assert(0); return (int32_t)(intptr_t)this__; } bool System_ValueType_Equals__System_Object(System_ValueType* this__, System_Object* obj) { // TODO: //il2c_assert(0); return false; } ///////////////////////////////////////////////// // VTable and runtime type info declarations // NOTE: // This System_ValueType's vtable unreferenced from the System.ValueType type. // Because it's abstract class and can't instantiate directly. // The vtable uses from derived types, these are all derived value types. // And the IL2C uses the vtable when value type turn to boxed instance, // these instances have the vptr fields (vptr0 and other interfaces.) // The boxed value type has data fields next to vptr0. // // The boxed value type instance: // +----------------------+ // | IL2C_REF_HEADER | // +----------------------+ <-- pBoxed --------------------------- <-------------+----+ // | vptr0__ | <-- pVTable | sizeof(System_ValueType) | | // +----------------------+ ----------- | | // | : | ^ | | // | (value data) | | The value real data | | Adjustor offset into the vtable for the interface // | : | v | | (These vtables are generated by IL2C) // +----------------------+ --------------------------- <-----+ | // | vptr_IFoo__ | | (optional implemented interface vptr) | // +----------------------+ --------------------------- <----------+ // | vptr_IBar__ | | (optional implemented interface vptr) // +----------------------+ --------------------------- System_ValueType_VTABLE_DECL__ System_ValueType_VTABLE__ = { 0, // Adjustor offset (bool(*)(void*, System_Object*))System_ValueType_Equals__System_Object, (void(*)(void*))System_Object_Finalize, (int32_t(*)(void*))System_ValueType_GetHashCode, (System_String* (*)(void*))System_ValueType_ToString }; IL2C_RUNTIME_TYPE_ABSTRACT_BEGIN( System_ValueType, "System.ValueType", sizeof(System_ValueType), System_Object, 0, 0) IL2C_RUNTIME_TYPE_END();
1,199
14,668
<gh_stars>1000+ // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/fullscreen_control/fullscreen_control_view.h" #include <memory> #include "base/callback.h" #include "cc/paint/paint_flags.h" #include "components/strings/grit/components_strings.h" #include "components/vector_icons/vector_icons.h" #include "third_party/skia/include/core/SkColor.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/metadata/metadata_header_macros.h" #include "ui/base/metadata/metadata_impl_macros.h" #include "ui/gfx/canvas.h" #include "ui/gfx/geometry/insets.h" #include "ui/gfx/geometry/point_f.h" #include "ui/gfx/geometry/size.h" #include "ui/gfx/paint_vector_icon.h" #include "ui/views/background.h" #include "ui/views/controls/button/button.h" #include "ui/views/controls/image_view.h" #include "ui/views/layout/fill_layout.h" #include "ui/views/vector_icons.h" namespace { // Partially-transparent background color. const SkColor kButtonBackgroundColor = SkColorSetARGB(0xcc, 0x28, 0x2c, 0x32); constexpr int kCloseIconSize = 24; class CloseFullscreenButton : public views::Button { public: METADATA_HEADER(CloseFullscreenButton); explicit CloseFullscreenButton(PressedCallback callback) : views::Button(std::move(callback)) { std::unique_ptr<views::ImageView> close_image_view = std::make_unique<views::ImageView>(); close_image_view->SetImage(gfx::CreateVectorIcon( views::kIcCloseIcon, kCloseIconSize, SK_ColorWHITE)); // Not focusable by default, only for accessibility. SetFocusBehavior(FocusBehavior::ACCESSIBLE_ONLY); SetAccessibleName(l10n_util::GetStringUTF16(IDS_EXIT_FULLSCREEN_MODE)); AddChildView(close_image_view.release()); SetLayoutManager(std::make_unique<views::FillLayout>()); } CloseFullscreenButton(const CloseFullscreenButton&) = delete; CloseFullscreenButton& operator=(const CloseFullscreenButton&) = delete; private: void PaintButtonContents(gfx::Canvas* canvas) override { // TODO(robliao): If we decide to move forward with this, use themes. cc::PaintFlags flags; flags.setAntiAlias(true); flags.setColor(kButtonBackgroundColor); flags.setStyle(cc::PaintFlags::kFill_Style); float radius = FullscreenControlView::kCircleButtonDiameter / 2.0f; canvas->DrawCircle(gfx::PointF(radius, radius), radius, flags); } }; BEGIN_METADATA(CloseFullscreenButton, views::Button) END_METADATA } // namespace FullscreenControlView::FullscreenControlView( views::Button::PressedCallback callback) { exit_fullscreen_button_ = AddChildView( std::make_unique<CloseFullscreenButton>(std::move(callback))); SetLayoutManager(std::make_unique<views::FillLayout>()); exit_fullscreen_button_->SetPreferredSize( gfx::Size(kCircleButtonDiameter, kCircleButtonDiameter)); } FullscreenControlView::~FullscreenControlView() = default; BEGIN_METADATA(FullscreenControlView, views::View) END_METADATA
1,078
13,249
<reponame>WilSenwish/zipkin<gh_stars>1000+ /* * Copyright 2015-2020 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin2.storage.cassandra; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.Row; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionStage; import zipkin2.Call; import zipkin2.DependencyLink; import zipkin2.internal.DependencyLinker; import zipkin2.storage.cassandra.internal.call.ResultSetFutureCall; import static zipkin2.storage.cassandra.Schema.TABLE_DEPENDENCY; final class SelectDependencies extends ResultSetFutureCall<List<DependencyLink>> { static final class Factory { final CqlSession session; final PreparedStatement preparedStatement; Factory(CqlSession session) { this.session = session; this.preparedStatement = session.prepare("SELECT parent,child,errors,calls" + " FROM " + TABLE_DEPENDENCY + " WHERE day IN ?"); } Call<List<DependencyLink>> create(long endTs, long lookback) { List<LocalDate> days = CassandraUtil.getDays(endTs, lookback); return new SelectDependencies(this, days); } } final Factory factory; final List<LocalDate> days; SelectDependencies(Factory factory, List<LocalDate> days) { this.factory = factory; this.days = days; } @Override protected CompletionStage<AsyncResultSet> newCompletionStage() { return factory.session.executeAsync(factory.preparedStatement.boundStatementBuilder() .setList(0, days, LocalDate.class).build()); } @Override public String toString() { return "SelectDependencies{days=" + days + "}"; } @Override public SelectDependencies clone() { return new SelectDependencies(factory, days); } @Override public List<DependencyLink> map(AsyncResultSet rs) { List<DependencyLink> unmerged = new ArrayList<>(); for (Row row : rs.currentPage()) { unmerged.add(DependencyLink.newBuilder() .parent(row.getString("parent")) .child(row.getString("child")) .errorCount(row.getLong("errors")) .callCount(row.getLong("calls")) .build()); } return DependencyLinker.merge(unmerged); } }
976
339
#include "RaZ/RaZ.hpp" using namespace std::literals; int main() { Raz::Application app; Raz::World& world = app.addWorld(2); auto& render = world.addSystem<Raz::RenderSystem>(1280, 720, "RaZ"); render.getGeometryProgram().setShaders(Raz::VertexShader(RAZ_ROOT + "shaders/common.vert"s), Raz::FragmentShader(RAZ_ROOT + "shaders/cook-torrance.frag"s)); Raz::Entity& camera = world.addEntityWithComponent<Raz::Transform>(Raz::Vec3f(0.f, 0.f, -5.f)); camera.addComponent<Raz::Camera>(render.getWindow().getWidth(), render.getWindow().getHeight()); Raz::Entity& mesh = world.addEntityWithComponent<Raz::Transform>(); auto [meshData, meshRenderData] = Raz::ObjFormat::load(RAZ_ROOT + "assets/meshes/ball.obj"s); mesh.addComponent<Raz::Mesh>(std::move(meshData)); mesh.addComponent<Raz::MeshRenderer>(std::move(meshRenderData)); Raz::Entity& light = world.addEntityWithComponent<Raz::Transform>(); light.addComponent<Raz::Light>(Raz::LightType::DIRECTIONAL, // Type Raz::Vec3f(0.f, 0.f, 1.f), // Direction 1.f, // Energy Raz::Vec3f(1.f)); // Color (R/G/B) render.getWindow().addKeyCallback(Raz::Keyboard::ESCAPE, [&app] (float /* deltaTime */) noexcept { app.quit(); }); render.getWindow().setCloseCallback([&app] () noexcept { app.quit(); }); app.run(); return EXIT_SUCCESS; }
621
882
<reponame>gigliovale/h2o import unittest, time, sys sys.path.extend(['.','..','../..','py']) import h2o, h2o_cmd, h2o_import as h2i class glm_same_parse(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): h2o.init(3) @classmethod def tearDownClass(cls): h2o.tear_down_cloud(h2o.nodes) def test_same_parse_fvec(self): print "\nput and parse of same file, but both src_key and hex_key are the h2o defaults..always different" for trial in range (10): start = time.time() parseResult = h2i.import_parse(bucket='smalldata', path='logreg/prostate_long.csv.gz', schema='put') print "trial #", trial, "parse end on ", "prostate_long.csv.gz" , 'took', time.time() - start, 'seconds' h2o.check_sandbox_for_errors() if __name__ == '__main__': h2o.unit_main()
435
343
<reponame>nzeh/syzygy // Copyright 2016 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "syzygy/refinery/analyzers/teb_analyzer.h" #include <winnt.h> #include <winternl.h> // For _TEB. #include <vector> #include "gtest/gtest.h" #include "syzygy/common/unittest_util.h" #include "syzygy/refinery/unittest_util.h" #include "syzygy/refinery/analyzers/analysis_runner.h" #include "syzygy/refinery/analyzers/analyzer_util.h" #include "syzygy/refinery/analyzers/memory_analyzer.h" #include "syzygy/refinery/analyzers/module_analyzer.h" #include "syzygy/refinery/process_state/process_state.h" #include "syzygy/refinery/process_state/process_state_util.h" #include "syzygy/refinery/symbols/symbol_provider.h" namespace refinery { namespace { bool AnalyzeMinidump(const base::FilePath& minidump_path, ProcessState* process_state) { minidump::FileMinidump minidump; if (!minidump.Open(minidump_path)) return false; AnalysisRunner runner; runner.AddAnalyzer( std::move(std::unique_ptr<Analyzer>(new refinery::MemoryAnalyzer()))); runner.AddAnalyzer( std::move(std::unique_ptr<Analyzer>(new refinery::ModuleAnalyzer()))); runner.AddAnalyzer( std::move(std::unique_ptr<Analyzer>(new refinery::TebAnalyzer()))); SimpleProcessAnalysis analysis(process_state); analysis.set_symbol_provider(new SymbolProvider()); return runner.Analyze(minidump, analysis) == Analyzer::ANALYSIS_COMPLETE; } class TebAnalyzerTest : public testing::Test { public: void SetUp() override { ASSERT_NO_FATAL_FAILURE(testing::Test::SetUp()); ASSERT_TRUE(scoped_symbol_path_.Setup()); } private: testing::ScopedSymbolPath scoped_symbol_path_; }; } // namespace TEST_F(TebAnalyzerTest, AnalyzeTeb) { testing::ScopedMinidump minidump; ASSERT_TRUE( minidump.GenerateMinidump(testing::ScopedMinidump::kMinidumpWithData)); ProcessState process_state; ASSERT_TRUE(AnalyzeMinidump(minidump.minidump_path(), &process_state)); TypedBlockLayerPtr typed_block_layer; ASSERT_TRUE(process_state.FindLayer(&typed_block_layer)); Address teb_addr = reinterpret_cast<Address>(NtCurrentTeb()); std::vector<TypedBlockRecordPtr> blocks; typed_block_layer->GetRecordsAt(teb_addr, &blocks); ASSERT_EQ(1u, blocks.size()); TypedBlockRecordPtr teb_block = blocks[0]; EXPECT_EQ("_TEB", teb_block->data().data_name()); // The winternl.h TEB declaration exposes a subset of the structure. EXPECT_LE(sizeof(_TEB), teb_block->range().size()); } } // namespace refinery
1,095
594
#include <Engine/Light/DiskLight.h> #include <Basic/Sampler/BasicSampler.h> using namespace Ubpa; const rgbf DiskLight::Sample_L(const pointf3 & p, normalf & wi, float & distToLight, float & PD) const { if (p[1] <= 0) { PD = 0; return rgbf(0.f); } auto Xi = BasicSampler::UniformInDisk(); pointf3 posOnLight(Xi[0] * radius, 0, Xi[1] * radius); const auto d = posOnLight - p; const float sqDist = d.norm2(); distToLight = sqrt(sqDist); wi = (d / distToLight).cast_to<normalf>(); float area = PI<float> * radius * radius; //float cosTheta = wi[1]; PD = sqDist / (area * (-wi[1])); return Luminance(); } float DiskLight::PDF(const pointf3 & p, const normalf & wi) const { if (p[1] <= 0 || wi[1] >= 0) return 0; float t = -p[1] / wi[1]; auto pos = p + t * wi.cast_to<vecf3>(); float r2 = radius * radius; if (pos.cast_to<vecf3>().norm2() >= r2) return 0; float dist2 = (p - pos).norm2(); return dist2 / (PI<float> * r2 * (-wi[1])); }
408
594
<reponame>Skycoder42/QtAutoUpdater package de.skycoder42.qtautoupdater.core.plugin.qplaystore; import java.util.List; import android.content.Context; import android.content.ComponentName; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.content.pm.ResolveInfo; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.net.Uri; import android.app.Activity; import com.google.android.play.core.tasks.OnCompleteListener; import com.google.android.play.core.tasks.OnSuccessListener; import com.google.android.play.core.tasks.Task; import com.google.android.play.core.appupdate.AppUpdateInfo; import com.google.android.play.core.appupdate.AppUpdateManager; import com.google.android.play.core.install.InstallStateUpdatedListener; import com.google.android.play.core.install.InstallState; import com.google.android.play.core.install.model.UpdateAvailability; import com.google.android.play.core.install.model.AppUpdateType; import com.google.android.play.core.install.model.InstallStatus; class UpdateHelper implements InstallStateUpdatedListener { private String _id; private AppUpdateManager _manager; private native void reportCheckResult(AppUpdateInfo info); @Override public native void onStateUpdate(InstallState state); public UpdateHelper(String id, AppUpdateManager manager) { _id = id; _manager = manager; } public String id() { return _id; } public void startUpdateCheck() { _manager.getAppUpdateInfo().addOnCompleteListener(new OnCompleteListener<AppUpdateInfo>() { @Override public void onComplete(Task<AppUpdateInfo> task) { if (task.isSuccessful()) reportCheckResult(task.getResult()); else reportCheckResult(null); } }); } public void resumeStalledUpdate(final int requestCode, final Activity activity) { _manager.getAppUpdateInfo().addOnSuccessListener(new OnSuccessListener<AppUpdateInfo>() { @Override public void onSuccess(AppUpdateInfo info) { if (info.updateAvailability() == UpdateAvailability.DEVELOPER_TRIGGERED_UPDATE_IN_PROGRESS) triggerUpdate(requestCode, activity, info); } }); } public boolean triggerUpdate(int requestCode, Activity activity, AppUpdateInfo info) { try { _manager.startUpdateFlowForResult(info, AppUpdateType.IMMEDIATE, activity, requestCode); return true; } catch(SendIntentException e) { e.printStackTrace(); return false; } } // https://stackoverflow.com/a/28090925/3767076 public void openInPlay(Context context, AppUpdateInfo info) { // you can also use BuildConfig.APPLICATION_ID String appId = info.packageName(); Intent rateIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("market://details?id=" + appId)); boolean marketFound = false; // find all applications able to handle our rateIntent final List<ResolveInfo> otherApps = context.getPackageManager() .queryIntentActivities(rateIntent, 0); for (ResolveInfo otherApp: otherApps) { // look for Google Play application if (otherApp.activityInfo.applicationInfo.packageName .equals("com.android.vending")) { ActivityInfo otherAppActivity = otherApp.activityInfo; ComponentName componentName = new ComponentName( otherAppActivity.applicationInfo.packageName, otherAppActivity.name ); // make sure it does NOT open in the stack of your activity rateIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); // task reparenting if needed rateIntent.addFlags(Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED); // if the Google Play was already open in a search result // this make sure it still go to the app page you requested rateIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); // this make sure only the Google Play app is allowed to // intercept the intent rateIntent.setComponent(componentName); context.startActivity(rateIntent); marketFound = true; break; } } // if GP not present on device, open web browser if (!marketFound) { Intent webIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://play.google.com/store/apps/details?id="+appId)); context.startActivity(webIntent); } } public boolean startUpdate(int requestCode, Activity activity, AppUpdateInfo info) { try { _manager.registerListener(this); _manager.startUpdateFlowForResult(info, AppUpdateType.FLEXIBLE, activity, requestCode); return true; } catch(SendIntentException e) { _manager.unregisterListener(this); e.printStackTrace(); return false; } } public void completeUpdate() { _manager.completeUpdate(); } public void triggerRestart(Context context) { PackageManager packageManager = context.getPackageManager(); Intent intent = packageManager.getLaunchIntentForPackage(context.getPackageName()); ComponentName componentName = intent.getComponent(); Intent mainIntent = Intent.makeRestartActivityTask(componentName); context.startActivity(mainIntent); System.exit(0); } }
1,740
1,863
<filename>KaplaDemo/samples/sampleViewer3/IJGWin32/jdlossls.cpp /* * jdlossls.c * * Copyright (C) 1998, <NAME>. * This file is part of the Independent JPEG Group's software. * For conditions of distribution and use, see the accompanying README file. * * This file contains the control logic for the lossless JPEG decompressor. */ #include "stdafx.h" #define JPEG_INTERNALS //#include "jinclude.h" //#include "jpeglib.h" //#include "jlossls.h" #ifdef D_LOSSLESS_SUPPORTED /* * Compute output image dimensions and related values. */ METHODDEF(void) calc_output_dimensions (j_decompress_ptr cinfo) { /* Hardwire it to "no scaling" */ cinfo->output_width = cinfo->image_width; cinfo->output_height = cinfo->image_height; /* jdinput.c has already initialized codec_data_unit to 1, * and has computed unscaled downsampled_width and downsampled_height. */ } /* * Initialize for an input processing pass. */ METHODDEF(void) start_input_pass (j_decompress_ptr cinfo) { j_lossless_d_ptr losslsd = (j_lossless_d_ptr) cinfo->codec; (*losslsd->entropy_start_pass) (cinfo); (*losslsd->predict_start_pass) (cinfo); (*losslsd->scaler_start_pass) (cinfo); (*losslsd->diff_start_input_pass) (cinfo); } /* * Initialize the lossless decompression codec. * This is called only once, during master selection. */ GLOBAL(void) jinit_lossless_d_codec(j_decompress_ptr cinfo) { j_lossless_d_ptr losslsd; boolean use_c_buffer; /* Create subobject in permanent pool */ losslsd = (j_lossless_d_ptr) (*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT, SIZEOF(jpeg_lossless_d_codec)); cinfo->codec = (struct jpeg_d_codec *) losslsd; /* Initialize sub-modules */ /* Entropy decoding: either Huffman or arithmetic coding. */ if (cinfo->arith_code) { ERREXIT(cinfo, JERR_ARITH_NOTIMPL); } else { jinit_lhuff_decoder(cinfo); } /* Undifferencer */ jinit_undifferencer(cinfo); /* Scaler */ jinit_d_scaler(cinfo); use_c_buffer = cinfo->inputctl->has_multiple_scans || cinfo->buffered_image; jinit_d_diff_controller(cinfo, use_c_buffer); /* Initialize method pointers. * * Note: consume_data, start_output_pass and decompress_data are * assigned in jddiffct.c. */ losslsd->pub.calc_output_dimensions = calc_output_dimensions; losslsd->pub.start_input_pass = start_input_pass; } #endif /* D_LOSSLESS_SUPPORTED */
917
310
{ "name": "SyncMaster 2333HD", "description": "A 23 inch monitor/TV.", "url": "https://www.amazon.com/Samsung-SyncMaster-2333HD-Widescreen-Monitor/dp/B00213QXFA" }
66
2,059
/******************************************************************* * File automatically generated by rebuild_wrappers.py (v2.0.0.10) * *******************************************************************/ #ifndef __wrappedbz2DEFS_H_ #define __wrappedbz2DEFS_H_ #endif // __wrappedbz2DEFS_H_
79
335
<reponame>Safal08/Hacktoberfest-1 { "word": "Multinuclear", "definitions": [ "Pathology and Biology= multinucleate.", "Chemistry= \"polynuclear\".", "Generally." ], "parts-of-speech": "Adjective" }
110
4,822
<filename>server/src/test/java/org/opensearch/monitor/os/OsProbeTests.java /* * SPDX-License-Identifier: Apache-2.0 * * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * compatible open source license. */ /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ package org.opensearch.monitor.os; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import java.io.IOException; import java.math.BigInteger; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.stream.Collectors; import org.apache.lucene.util.Constants; import org.opensearch.test.OpenSearchTestCase; public class OsProbeTests extends OpenSearchTestCase { public void testOsInfo() throws IOException { final int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors()); final long refreshInterval = randomBoolean() ? -1 : randomNonNegativeLong(); final String prettyName; if (Constants.LINUX) { prettyName = randomFrom("Fedora 28 (Workstation Edition)", "Linux", null); } else { prettyName = Constants.OS_NAME; } final OsProbe osProbe = new OsProbe() { @Override List<String> readOsRelease() throws IOException { assert Constants.LINUX : Constants.OS_NAME; if (prettyName != null) { final String quote = randomFrom("\"", "'", ""); final String space = randomFrom(" ", ""); final String prettyNameLine = String.format(Locale.ROOT, "PRETTY_NAME=%s%s%s%s", quote, prettyName, quote, space); return Arrays.asList("NAME=" + randomAlphaOfLength(16), prettyNameLine); } else { return Collections.singletonList("NAME=" + randomAlphaOfLength(16)); } } }; final OsInfo info = osProbe.osInfo(refreshInterval, allocatedProcessors); assertNotNull(info); assertThat(info.getRefreshInterval(), equalTo(refreshInterval)); assertThat(info.getName(), equalTo(Constants.OS_NAME)); if (Constants.LINUX) { if (prettyName != null) { assertThat(info.getPrettyName(), equalTo(prettyName)); } else { assertThat(info.getPrettyName(), equalTo(Constants.OS_NAME)); } } assertThat(info.getArch(), equalTo(Constants.OS_ARCH)); assertThat(info.getVersion(), equalTo(Constants.OS_VERSION)); assertThat(info.getAllocatedProcessors(), equalTo(allocatedProcessors)); assertThat(info.getAvailableProcessors(), equalTo(Runtime.getRuntime().availableProcessors())); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/66629") public void testOsStats() { final OsProbe osProbe = new OsProbe(); OsStats stats = osProbe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); assertThat( stats.getCpu().getPercent(), anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))) ); double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); } if (Constants.WINDOWS) { // load average is unavailable on Windows assertNull(loadAverage); } else if (Constants.LINUX) { // we should be able to get the load average assertNotNull(loadAverage); assertThat(loadAverage[0], greaterThanOrEqualTo((double) 0)); assertThat(loadAverage[1], greaterThanOrEqualTo((double) 0)); assertThat(loadAverage[2], greaterThanOrEqualTo((double) 0)); } else if (Constants.MAC_OS_X) { // one minute load average is available, but 10-minute and 15-minute load averages are not assertNotNull(loadAverage); assertThat(loadAverage[0], greaterThanOrEqualTo((double) 0)); assertThat(loadAverage[1], equalTo((double) -1)); assertThat(loadAverage[2], equalTo((double) -1)); } else { // unknown system, but the best case is that we have the one-minute load average if (loadAverage != null) { assertThat(loadAverage[0], anyOf(equalTo((double) -1), greaterThanOrEqualTo((double) 0))); assertThat(loadAverage[1], equalTo((double) -1)); assertThat(loadAverage[2], equalTo((double) -1)); } } assertNotNull(stats.getMem()); assertThat(stats.getMem().getTotal().getBytes(), greaterThan(0L)); assertThat(stats.getMem().getFree().getBytes(), greaterThan(0L)); assertThat(stats.getMem().getFreePercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); assertThat(stats.getMem().getUsed().getBytes(), greaterThan(0L)); assertThat(stats.getMem().getUsedPercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))); assertNotNull(stats.getSwap()); assertNotNull(stats.getSwap().getTotal()); long total = stats.getSwap().getTotal().getBytes(); if (total > 0) { assertThat(stats.getSwap().getTotal().getBytes(), greaterThan(0L)); assertThat(stats.getSwap().getFree().getBytes(), greaterThan(0L)); assertThat(stats.getSwap().getUsed().getBytes(), greaterThanOrEqualTo(0L)); } else { // On platforms with no swap assertThat(stats.getSwap().getTotal().getBytes(), equalTo(0L)); assertThat(stats.getSwap().getFree().getBytes(), equalTo(0L)); assertThat(stats.getSwap().getUsed().getBytes(), equalTo(0L)); } if (Constants.LINUX) { if (stats.getCgroup() != null) { assertThat(stats.getCgroup().getCpuAcctControlGroup(), notNullValue()); assertThat(stats.getCgroup().getCpuAcctUsageNanos(), greaterThan(0L)); assertThat(stats.getCgroup().getCpuCfsQuotaMicros(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L))); assertThat(stats.getCgroup().getCpuCfsPeriodMicros(), greaterThanOrEqualTo(0L)); assertThat(stats.getCgroup().getCpuStat().getNumberOfElapsedPeriods(), greaterThanOrEqualTo(0L)); assertThat(stats.getCgroup().getCpuStat().getNumberOfTimesThrottled(), greaterThanOrEqualTo(0L)); assertThat(stats.getCgroup().getCpuStat().getTimeThrottledNanos(), greaterThanOrEqualTo(0L)); // These could be null if transported from a node running an older version, but shouldn't be null on the current node assertThat(stats.getCgroup().getMemoryControlGroup(), notNullValue()); assertThat(stats.getCgroup().getMemoryLimitInBytes(), notNullValue()); assertThat(new BigInteger(stats.getCgroup().getMemoryLimitInBytes()), greaterThan(BigInteger.ZERO)); assertThat(stats.getCgroup().getMemoryUsageInBytes(), notNullValue()); assertThat(new BigInteger(stats.getCgroup().getMemoryUsageInBytes()), greaterThan(BigInteger.ZERO)); } } else { assertNull(stats.getCgroup()); } } public void testGetSystemLoadAverage() { assumeTrue("test runs on Linux only", Constants.LINUX); final OsProbe probe = new OsProbe() { @Override String readProcLoadavg() { return "1.51 1.69 1.99 3/417 23251"; } }; final double[] systemLoadAverage = probe.getSystemLoadAverage(); assertNotNull(systemLoadAverage); assertThat(systemLoadAverage.length, equalTo(3)); // avoid silliness with representing doubles assertThat(systemLoadAverage[0], equalTo(Double.parseDouble("1.51"))); assertThat(systemLoadAverage[1], equalTo(Double.parseDouble("1.69"))); assertThat(systemLoadAverage[2], equalTo(Double.parseDouble("1.99"))); } public void testCgroupProbe() { assumeTrue("test runs on Linux only", Constants.LINUX); final boolean areCgroupStatsAvailable = randomBoolean(); final String hierarchy = randomAlphaOfLength(16); final OsProbe probe = buildStubOsProbe(areCgroupStatsAvailable, hierarchy); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); if (areCgroupStatsAvailable) { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); assertThat(cgroup.getCpuControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuCfsPeriodMicros(), equalTo(100000L)); assertThat(cgroup.getCpuCfsQuotaMicros(), equalTo(50000L)); assertThat(cgroup.getCpuStat().getNumberOfElapsedPeriods(), equalTo(17992L)); assertThat(cgroup.getCpuStat().getNumberOfTimesThrottled(), equalTo(1311L)); assertThat(cgroup.getCpuStat().getTimeThrottledNanos(), equalTo(139298645489L)); assertThat(cgroup.getMemoryLimitInBytes(), equalTo("18446744073709551615")); assertThat(cgroup.getMemoryUsageInBytes(), equalTo("4796416")); } else { assertNull(cgroup); } } public void testCgroupProbeWithMissingCpuAcct() { assumeTrue("test runs on Linux only", Constants.LINUX); final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpuacct List<String> procSelfCgroupLines = getProcSelfGroupLines(hierarchy).stream() .map(line -> line.replaceFirst(",cpuacct", "")) .collect(Collectors.toList()); final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); assertNull(cgroup); } public void testCgroupProbeWithMissingCpu() { assumeTrue("test runs on Linux only", Constants.LINUX); final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpu List<String> procSelfCgroupLines = getProcSelfGroupLines(hierarchy).stream() .map(line -> line.replaceFirst(":cpu,", ":")) .collect(Collectors.toList()); final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); assertNull(cgroup); } public void testCgroupProbeWithMissingMemory() { assumeTrue("test runs on Linux only", Constants.LINUX); final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about memory List<String> procSelfCgroupLines = getProcSelfGroupLines(hierarchy).stream() .filter(line -> !line.contains(":memory:")) .collect(Collectors.toList()); final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); assertNull(cgroup); } private static List<String> getProcSelfGroupLines(String hierarchy) { return Arrays.asList( "10:freezer:/", "9:net_cls,net_prio:/", "8:pids:/", "7:blkio:/", "6:memory:/" + hierarchy, "5:devices:/user.slice", "4:hugetlb:/", "3:perf_event:/", "2:cpu,cpuacct,cpuset:/" + hierarchy, "1:name=systemd:/user.slice/user-1000.slice/session-2359.scope", "0::/cgroup2" ); } private static OsProbe buildStubOsProbe(final boolean areCgroupStatsAvailable, final String hierarchy) { List<String> procSelfCgroupLines = getProcSelfGroupLines(hierarchy); return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines); } /** * Builds a test instance of OsProbe. Methods that ordinarily read from the filesystem are overridden to return values based upon * the arguments to this method. * * @param areCgroupStatsAvailable whether or not cgroup data is available. Normally OsProbe establishes this for itself. * @param hierarchy a mock value used to generate a cgroup hierarchy. * @param procSelfCgroupLines the lines that will be used as the content of <code>/proc/self/cgroup</code> * @return a test instance */ private static OsProbe buildStubOsProbe( final boolean areCgroupStatsAvailable, final String hierarchy, List<String> procSelfCgroupLines ) { return new OsProbe() { @Override List<String> readProcSelfCgroup() { return procSelfCgroupLines; } @Override String readSysFsCgroupCpuAcctCpuAcctUsage(String controlGroup) { assertThat(controlGroup, equalTo("/" + hierarchy)); return "364869866063112"; } @Override String readSysFsCgroupCpuAcctCpuCfsPeriod(String controlGroup) { assertThat(controlGroup, equalTo("/" + hierarchy)); return "100000"; } @Override String readSysFsCgroupCpuAcctCpuAcctCfsQuota(String controlGroup) { assertThat(controlGroup, equalTo("/" + hierarchy)); return "50000"; } @Override List<String> readSysFsCgroupCpuAcctCpuStat(String controlGroup) { return Arrays.asList("nr_periods 17992", "nr_throttled 1311", "throttled_time 139298645489"); } @Override String readSysFsCgroupMemoryLimitInBytes(String controlGroup) { assertThat(controlGroup, equalTo("/" + hierarchy)); // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long return "18446744073709551615"; } @Override String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { assertThat(controlGroup, equalTo("/" + hierarchy)); return "4796416"; } @Override boolean areCgroupStatsAvailable() { return areCgroupStatsAvailable; } }; } }
6,622
763
package org.batfish.coordinator.resources; import static com.google.common.base.MoreObjects.firstNonNull; import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.util.Objects; import java.util.Set; import org.batfish.referencelibrary.ServiceObjectGroup; public class ServiceObjectGroupBean { /** The name of this service object group */ public String name; /** The set of names of service objects or service object groups in this service object group */ public Set<String> services; @JsonCreator private ServiceObjectGroupBean() {} public ServiceObjectGroupBean(ServiceObjectGroup group) { name = group.getName(); services = ImmutableSet.copyOf(group.getServices()); } @Override public boolean equals(Object o) { if (!(o instanceof ServiceObjectGroupBean)) { return false; } return Objects.equals(name, ((ServiceObjectGroupBean) o).name) && Objects.equals(services, ((ServiceObjectGroupBean) o).services); } @Override public int hashCode() { return Objects.hash(name, services); } public ServiceObjectGroup toServiceObjectGroup() { return new ServiceObjectGroup( name, ImmutableSortedSet.copyOf(firstNonNull(services, ImmutableSet.of()))); } }
419
14,668
<filename>chrome/browser/ui/media_router/media_router_ui_helper.cc // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/media_router/media_router_ui_helper.h" #include "base/time/time.h" #include "extensions/browser/extension_registry.h" #include "url/gurl.h" namespace media_router { namespace { // The amount of time to wait for a response when creating a new route. const int kCreateRouteTimeoutSeconds = 20; const int kCreateRouteTimeoutSecondsForTab = 60; const int kCreateRouteTimeoutSecondsForLocalFile = 60; const int kCreateRouteTimeoutSecondsForDesktop = 120; } // namespace std::string GetExtensionName(const GURL& gurl, extensions::ExtensionRegistry* registry) { if (gurl.is_empty() || !registry) return std::string(); const extensions::Extension* extension = registry->enabled_extensions().GetExtensionOrAppByURL(gurl); return extension ? extension->name() : std::string(); } std::string GetHostFromURL(const GURL& gurl) { if (gurl.is_empty()) return std::string(); std::string host = gurl.host(); if (base::StartsWith(host, "www.", base::CompareCase::INSENSITIVE_ASCII)) host = host.substr(4); return host; } base::TimeDelta GetRouteRequestTimeout(MediaCastMode cast_mode) { switch (cast_mode) { case PRESENTATION: return base::Seconds(kCreateRouteTimeoutSeconds); case TAB_MIRROR: return base::Seconds(kCreateRouteTimeoutSecondsForTab); case DESKTOP_MIRROR: return base::Seconds(kCreateRouteTimeoutSecondsForDesktop); case LOCAL_FILE: return base::Seconds(kCreateRouteTimeoutSecondsForLocalFile); default: NOTREACHED(); return base::TimeDelta(); } } RouteParameters::RouteParameters() = default; RouteParameters::RouteParameters(RouteParameters&& other) = default; RouteParameters::~RouteParameters() = default; RouteParameters& RouteParameters::operator=(RouteParameters&& other) = default; } // namespace media_router
714
30,785
package jadx.plugins.input.java.data.attributes.types; import java.util.ArrayList; import java.util.List; import jadx.plugins.input.java.data.attributes.IJavaAttribute; import jadx.plugins.input.java.data.attributes.IJavaAttributeReader; import jadx.plugins.input.java.data.attributes.types.data.RawBootstrapMethod; public class JavaBootstrapMethodsAttr implements IJavaAttribute { private final List<RawBootstrapMethod> list; public JavaBootstrapMethodsAttr(List<RawBootstrapMethod> list) { this.list = list; } public List<RawBootstrapMethod> getList() { return list; } public static IJavaAttributeReader reader() { return (clsData, reader) -> { int len = reader.readU2(); List<RawBootstrapMethod> list = new ArrayList<>(len); for (int i = 0; i < len; i++) { int methodHandleIdx = reader.readU2(); int argsCount = reader.readU2(); int[] args = new int[argsCount]; for (int j = 0; j < argsCount; j++) { args[j] = reader.readU2(); } list.add(new RawBootstrapMethod(methodHandleIdx, args)); } return new JavaBootstrapMethodsAttr(list); }; } }
406