diff
stringlengths 41
2.03M
| msg
stringlengths 1
1.5k
⌀ | repo
stringlengths 5
40
| sha
stringlengths 40
40
| time
stringlengths 20
20
|
---|---|---|---|---|
mmm a / headers / common . h <nl> ppp b / headers / common . h <nl> using float32 = float ; <nl> using float64 = double ; <nl> using int32 = int ; <nl> using uint64 = unsigned long long ; <nl> + using uint8 = unsigned char ; <nl> + using uint16 = unsigned short ; <nl> <nl> # if defined ( TLANG_GPU ) <nl> # include < cuda_runtime . h > <nl> REGISTER_VEC ( float32 , 8 , __m256 ) ; <nl> REGISTER_VEC ( int32 , 8 , __m256i ) ; <nl> / / REGISTER_VEC ( uint32 , 8 , __m256u ) ; <nl> <nl> - template < typename T , int dim > <nl> - using vec = typename vec_helper < T , dim > : : type ; <nl> - <nl> - template < typename T , int dim > <nl> - inline vec < T , dim > load ( const void * ) ; <nl> + / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> <nl> template < typename T , int dim > <nl> - inline vec < T , dim > floor ( const vec < T , dim > & ) ; <nl> + using vec = typename vec_helper < T , dim > : : type ; <nl> <nl> using float32x8 = vec < float32 , 8 > ; <nl> + using int32x8 = vec < int32 , 8 > ; <nl> <nl> - template < > <nl> - inline float32x8 floor < float32 , 8 > ( const float32x8 & v ) { <nl> - return _mm256_floor_ps ( v ) ; <nl> - } ; <nl> + / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + <nl> + template < typename T , int dim > <nl> + inline vec < T , dim > load ( const void * ) ; <nl> <nl> template < > <nl> inline float32x8 load < float32 , 8 > ( const void * addr ) { <nl> return _mm256_load_ps ( ( float32 * ) addr ) ; <nl> - } ; <nl> + } <nl> <nl> template < > <nl> inline vec < int32 , 8 > load < int32 , 8 > ( const void * addr ) { <nl> return _mm256_load_si256 ( ( __m256i * ) addr ) ; <nl> + } <nl> + <nl> + / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + <nl> + template < typename T , int dim > <nl> + inline void store ( const vec < T , dim > & v , const void * ) ; <nl> + <nl> + template < > <nl> + inline void store < float32 , 8 > ( const float32x8 & v , const void * addr ) { <nl> + _mm256_store_ps ( ( float32 * ) addr , v ) ; <nl> + } <nl> + <nl> + template < > <nl> + inline void store < int32 , 8 > ( const int32x8 & v , const void * addr ) { <nl> + _mm256_store_si256 ( ( __m256i * ) addr , v ) ; <nl> + } <nl> + / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + <nl> + template < typename T , int dim > <nl> + inline vec < T , dim > floor ( const vec < T , dim > & ) ; <nl> + <nl> + template < > <nl> + inline float32x8 floor < float32 , 8 > ( const float32x8 & v ) { <nl> + return _mm256_floor_ps ( v ) ; <nl> } ; <nl> <nl> + / / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + <nl> template < typename T , int dim , int n > <nl> struct vvec { <nl> vec < T , dim > d [ n ] ; <nl> + <nl> + void store ( void * addr ) { <nl> + for ( int i = 0 ; i < n ; i + + ) { <nl> + store ( d [ i ] , ( uint8 * ) addr + i * sizeof ( vec < T , dim > ) ) ; <nl> + } <nl> + } <nl> } ; <nl> <nl> # define VVEC_BINARY_OP ( NAME , OP ) \ <nl> mmm a / lang / codegen . cpp <nl> ppp b / lang / codegen . cpp <nl> void CPUCodeGen : : codegen ( Program & prog , int group_size ) { <nl> <nl> FunctionType CPUCodeGen : : get ( Program & prog ) { <nl> auto group_size = prog . config . group_size ; <nl> - auto mode = CPUCodeGen : : Mode : : vv ; <nl> - / / auto mode = CPUCodeGen : : Mode : : intrinsics ; <nl> + / / auto mode = CPUCodeGen : : Mode : : vv ; <nl> + auto mode = CPUCodeGen : : Mode : : intrinsics ; <nl> auto simd_width = 8 ; <nl> this - > mode = mode ; <nl> this - > simd_width = simd_width ; <nl> mmm a / lang / codegen . h <nl> ppp b / lang / codegen . h <nl> class CPUCodeGen : public CodeGenBase { <nl> public : <nl> / / Create vectorized IR for the root node <nl> / / the vector width should be the final SIMD instruction width <nl> - std : : string get_vectorized_address ( Address addr , int extra_offset = 0 ) { <nl> + std : : string get_vectorized_address ( Address addr , <nl> + int loop_index , <nl> + int extra_offset ) { <nl> TC_ASSERT ( addr . buffer_id ! = - 1 ) ; <nl> auto buffer_name = <nl> fmt : : format ( " context . get_buffer < float32 > ( { : 02d } ) " , addr . buffer_id ) ; <nl> - auto stride = <nl> - addr . coeff_i * num_groups + <nl> - num_groups / addr . coeff_aosoa_group_size * addr . coeff_aosoa_stride ; <nl> + auto stride = addr . coeff_i * num_groups ; <nl> + if ( addr . coeff_aosoa_group_size ! = 0 ) { <nl> + stride + = <nl> + num_groups / addr . coeff_aosoa_group_size * addr . coeff_aosoa_stride ; <nl> + } <nl> auto offset = addr . coeff_const ; <nl> - return fmt : : format ( " & { } [ { } * n + { } * ( g + loop_index ) + { } + { } ] " , <nl> - buffer_name , addr . coeff_imax , stride , offset , <nl> + return fmt : : format ( " & { } [ { } * n + { } * ( b + { } ) + { } + { } ] " , buffer_name , <nl> + addr . coeff_imax , stride , loop_index , offset , <nl> extra_offset ) ; <nl> } <nl> <nl> mmm a / lang / codegen_intrinsics . cpp <nl> ppp b / lang / codegen_intrinsics . cpp <nl> void CPUCodeGen : : visit_intrinsics ( Expr & expr ) { <nl> / / TC_P ( expr - > id ) ; <nl> / / TC_P ( expr - > node_type_name ( ) ) ; <nl> auto vv_width = num_groups * expr - > group_size ( ) ; <nl> + TC_ASSERT ( vv_width % simd_width = = 0 ) ; <nl> TC_ASSERT ( expr - > is_vectorized ) ; <nl> TC_ASSERT ( expr - > members . size ( ) = = 0 | | <nl> ( int ) expr - > members . size ( ) = = group_size ) ; <nl> void CPUCodeGen : : visit_intrinsics ( Expr & expr ) { <nl> } <nl> } <nl> <nl> - if ( binary_ops . find ( expr - > type ) ! = binary_ops . end ( ) ) { <nl> - auto op = binary_ops [ expr - > type ] ; <nl> + if ( binary_ops_intrinsics . find ( expr - > type ) ! = binary_ops_intrinsics . end ( ) ) { <nl> + auto op = binary_ops_intrinsics [ expr - > type ] ; <nl> emit_code ( " auto { } = { } ( { } , { } ) ; " , expr - > var_name , op , <nl> expr - > ch [ 0 ] - > var_name , expr - > ch [ 1 ] - > var_name ) ; <nl> / / emit_code ( " { } . print ( ) ; " , expr - > var_name ) ; <nl> void CPUCodeGen : : visit_intrinsics ( Expr & expr ) { <nl> } <nl> } else if ( expr - > type = = NodeType : : load ) { <nl> emit_code ( " auto { } = load < { } , { } > ( { } _base , { } _offsets ) ; " , expr - > var_name , <nl> - expr - > group_size ( ) * num_groups , data_type_name ( expr - > data_type ) , <nl> - expr [ 0 ] - > var_name , expr [ 0 ] - > var_name ) ; <nl> + vv_width , data_type_name ( expr - > data_type ) , expr [ 0 ] - > var_name , <nl> + expr [ 0 ] - > var_name ) ; <nl> } else if ( expr - > type = = NodeType : : store ) { <nl> - emit_code ( " store ( { } , { } _base , { } _offsets ) ; " , expr - > ch [ 1 ] - > var_name , <nl> - expr - > ch [ 0 ] - > var_name , expr - > ch [ 0 ] - > var_name ) ; <nl> + / / TODO : analyze address here <nl> + auto addr = expr [ 0 ] [ 0 ] - > get_address ( ) ; <nl> + TC_ASSERT ( addr . coeff_aosoa_group_size = = 0 | | <nl> + addr . coeff_aosoa_group_size = = num_groups ) ; <nl> + emit_code ( " { } . store ( { } ) ; " , expr - > ch [ 1 ] - > var_name , <nl> + get_vectorized_address ( addr , 0 , 0 ) ) ; <nl> } else if ( expr - > type = = NodeType : : combine ) { <nl> / / do nothing <nl> } else if ( expr - > type = = NodeType : : imm ) { <nl> mmm a / lang / matmul . cpp <nl> ppp b / lang / matmul . cpp <nl> void run_matmatmul ( ) { <nl> } <nl> <nl> void initialize_benchmark ( ) { <nl> - CoreState : : set_trigger_gdb_when_crash ( true ) ; <nl> + / / CoreState : : set_trigger_gdb_when_crash ( true ) ; <nl> static bool initialized = false ; <nl> if ( initialized ) { <nl> return ; <nl> | . | taichi-dev/taichi | 5c0bd5054c72cfe2f7e7a14ed6f7162fe30cf9be | 2018-12-24T03:32:23Z |
mmm a / lib / Sema / CSFix . h <nl> ppp b / lib / Sema / CSFix . h <nl> enum class FixKind : uint8_t { <nl> / / / Swift version 5 . <nl> AutoClosureForwarding , <nl> <nl> - / / / Allow invalid pointer conversions for autoclosure result types as if the <nl> - / / / pointer type is a function parameter rather than an autoclosure result . <nl> - AllowAutoClosurePointerConversion , <nl> - <nl> / / / Remove ` ! ` or ` ? ` because base is not an optional type . <nl> RemoveUnwrap , <nl> <nl> class AutoClosureForwarding final : public ConstraintFix { <nl> ConstraintLocator * locator ) ; <nl> } ; <nl> <nl> + / / / Allow invalid pointer conversions for autoclosure result types as if the <nl> + / / / pointer type is a function parameter rather than an autoclosure result . <nl> class AllowAutoClosurePointerConversion final : public ContextualMismatch { <nl> AllowAutoClosurePointerConversion ( ConstraintSystem & cs , Type pointeeType , <nl> Type pointerType , ConstraintLocator * locator ) <nl> - : ContextualMismatch ( cs , FixKind : : AllowAutoClosurePointerConversion , <nl> - pointeeType , pointerType , locator ) { } <nl> + : ContextualMismatch ( cs , pointeeType , pointerType , locator ) { } <nl> <nl> public : <nl> std : : string getName ( ) const override { <nl> mmm a / lib / Sema / CSSimplify . cpp <nl> ppp b / lib / Sema / CSSimplify . cpp <nl> bool ConstraintSystem : : repairFailures ( <nl> case ConstraintLocator : : AutoclosureResult : { <nl> if ( repairByInsertingExplicitCall ( lhs , rhs ) ) <nl> return true ; <nl> + <nl> + auto result = matchTypes ( lhs , rhs , ConstraintKind : : ArgumentConversion , <nl> + TypeMatchFlags : : TMF_ApplyingFix , <nl> + locator . withPathElement ( ConstraintLocator : : FunctionArgument ) ) ; <nl> + <nl> + if ( result . isSuccess ( ) ) <nl> + conversionsOrFixes . push_back ( AllowAutoClosurePointerConversion : : create ( <nl> + * this , lhs , rhs , getConstraintLocator ( locator ) ) ) ; <nl> break ; <nl> } <nl> <nl> ConstraintSystem : : matchTypes ( Type type1 , Type type2 , ConstraintKind kind , <nl> if ( kind > = ConstraintKind : : Conversion ) { <nl> / / It is never legal to form an autoclosure that results in these <nl> / / implicit conversions to pointer types . <nl> - bool isAutoClosureArgument = false ; <nl> - if ( auto last = locator . last ( ) ) <nl> - if ( last - > getKind ( ) = = ConstraintLocator : : AutoclosureResult ) <nl> - isAutoClosureArgument = true ; <nl> + bool isAutoClosureArgument = locator . isForAutoclosureResult ( ) ; <nl> <nl> / / Pointer arguments can be converted from pointer - compatible types . <nl> if ( kind > = ConstraintKind : : ArgumentConversion ) { <nl> ConstraintSystem : : matchTypes ( Type type1 , Type type2 , ConstraintKind kind , <nl> } <nl> conversionsOrFixes . push_back ( <nl> ConversionRestrictionKind : : InoutToPointer ) ; <nl> - } else { <nl> - Type pointeeType = inoutType1 - > getObjectType ( ) ; <nl> - auto * fix = AllowAutoClosurePointerConversion : : create ( * this , <nl> - pointeeType , type2 , getConstraintLocator ( locator ) ) ; <nl> - conversionsOrFixes . push_back ( fix ) ; <nl> } <nl> } <nl> <nl> ConstraintSystem : : SolutionKind ConstraintSystem : : simplifyFixConstraint ( <nl> return matchTupleTypes ( matchingType , smaller , matchKind , subflags , locator ) ; <nl> } <nl> <nl> - case FixKind : : AllowAutoClosurePointerConversion : { <nl> - if ( recordFix ( fix ) ) <nl> - return SolutionKind : : Error ; <nl> - return matchTypes ( type1 , type2 , matchKind , subflags , <nl> - locator . withPathElement ( ConstraintLocator : : FunctionArgument ) ) ; <nl> - } <nl> - <nl> case FixKind : : InsertCall : <nl> case FixKind : : RemoveReturn : <nl> case FixKind : : AddConformance : <nl> mmm a / lib / Sema / ConstraintLocator . h <nl> ppp b / lib / Sema / ConstraintLocator . h <nl> class ConstraintLocatorBuilder { <nl> return ( getSummaryFlags ( ) & ConstraintLocator : : IsFunctionConversion ) ; <nl> } <nl> <nl> + bool isForAutoclosureResult ( ) const { <nl> + SmallVector < LocatorPathElt , 4 > path ; <nl> + getLocatorParts ( path ) ; <nl> + <nl> + auto last = std : : find_if ( <nl> + path . rbegin ( ) , path . rend ( ) , [ ] ( LocatorPathElt & elt ) - > bool { <nl> + return elt . getKind ( ) ! = ConstraintLocator : : OptionalPayload & & <nl> + elt . getKind ( ) ! = ConstraintLocator : : GenericArgument ; <nl> + } ) ; <nl> + <nl> + if ( last ! = path . rend ( ) ) <nl> + return last - > getKind ( ) = = ConstraintLocator : : AutoclosureResult ; <nl> + <nl> + return false ; <nl> + } <nl> + <nl> / / / Retrieve the base constraint locator , on which this builder ' s <nl> / / / path is based . <nl> ConstraintLocator * getBaseLocator ( ) const { <nl> mmm a / test / Constraints / invalid_implicit_conversions . swift <nl> ppp b / test / Constraints / invalid_implicit_conversions . swift <nl> func test ( <nl> _ ptr : UnsafePointer < Int > , <nl> _ ptrI8 : UnsafePointer < Int8 > , <nl> _ ptrU8 : UnsafePointer < UInt8 > , <nl> - _ ptrVoid : UnsafePointer < Void > / / expected - warning { { UnsafePointer < Void > has been replaced by UnsafeRawPointer } } <nl> + _ ptrVoid : UnsafePointer < Void > , / / expected - warning { { UnsafePointer < Void > has been replaced by UnsafeRawPointer } } <nl> + _ optPtr : UnsafePointer < Int > ? <nl> ) { <nl> var i : Int = 0 <nl> var a : [ Int ] = [ 0 ] <nl> + var b : [ Int ] ? = [ 0 ] <nl> let s = " string " <nl> <nl> takesAutoclosure ( rawPtr , & i ) / / expected - error { { cannot perform pointer conversion of value of type ' Int ' to autoclosure result type ' UnsafeRawPointer ' } } <nl> func test ( <nl> takesAutoclosure ( mutRawPtr , & a ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ' to autoclosure result type ' UnsafeMutableRawPointer ' } } <nl> takesAutoclosure ( mutPtr , & a ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ' to autoclosure result type ' UnsafeMutablePointer < Int > ' } } <nl> takesAutoclosure ( ptr , & a ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ' to autoclosure result type ' UnsafePointer < Int > ' } } <nl> + takesAutoclosure ( optPtr , & i ) / / expected - error { { cannot perform pointer conversion of value of type ' Int ' to autoclosure result type ' UnsafePointer < Int > ? ' } } <nl> <nl> - takesAutoclosure ( rawPtr , a ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafeRawPointer , [ Int ] ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> - takesAutoclosure ( ptr , a ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafePointer < Int > , [ Int ] ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> + takesAutoclosure ( rawPtr , a ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ' to autoclosure result type ' UnsafeRawPointer ' } } <nl> + takesAutoclosure ( ptr , a ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ' to autoclosure result type ' UnsafePointer < Int > ' } } <nl> + takesAutoclosure ( optPtr , b ) / / expected - error { { cannot perform pointer conversion of value of type ' [ Int ] ? ' to autoclosure result type ' UnsafePointer < Int > ? ' } } <nl> <nl> - takesAutoclosure ( rawPtr , s ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafeRawPointer , String ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> - takesAutoclosure ( ptrI8 , s ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafePointer < Int8 > , String ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> - takesAutoclosure ( ptrU8 , s ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafePointer < UInt8 > , String ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> - takesAutoclosure ( ptrVoid , s ) / / expected - error { { cannot invoke ' takesAutoclosure ' with an argument list of type ' ( UnsafePointer < Void > , String ) ' } } <nl> - / / expected - note @ - 1 { { expected an argument list of type ' ( T , @ autoclosure ( ) throws - > T ) ' } } <nl> + takesAutoclosure ( rawPtr , s ) / / expected - error { { cannot perform pointer conversion of value of type ' String ' to autoclosure result type ' UnsafeRawPointer ' } } <nl> + takesAutoclosure ( ptrI8 , s ) / / expected - error { { cannot perform pointer conversion of value of type ' String ' to autoclosure result type ' UnsafePointer < Int8 > ' } } <nl> + takesAutoclosure ( ptrU8 , s ) / / expected - error { { cannot perform pointer conversion of value of type ' String ' to autoclosure result type ' UnsafePointer < UInt8 > ' } } <nl> + takesAutoclosure ( ptrVoid , s ) / / expected - error { { cannot perform pointer conversion of value of type ' String ' to autoclosure result type ' UnsafePointer < Void > ' } } <nl> } <nl> | Merge pull request from hborla / autoclosure - optional - ptr - conversion - diag | apple/swift | 0cdfda0eb9d990fc5fc54fdb96bf2345bfed5bd3 | 2019-08-20T17:58:33Z |
mmm a / tensorflow / core / BUILD <nl> ppp b / tensorflow / core / BUILD <nl> tf_gpu_kernel_library ( <nl> visibility = [ " / / visibility : public " ] , <nl> deps = [ <nl> " : cuda " , <nl> + " : framework " , <nl> " / / third_party / eigen3 " , <nl> ] , <nl> ) <nl> mmm a / tensorflow / stream_executor / BUILD <nl> ppp b / tensorflow / stream_executor / BUILD <nl> cc_library ( <nl> ) , <nl> hdrs = glob ( [ <nl> " * . h " , <nl> + " cuda / * . h " , <nl> " lib / * . h " , <nl> " platform / * * / * . h " , <nl> ] ) , <nl> | TensorFlow : fix undeclared header inclusions in two BUILD files | tensorflow/tensorflow | a4cefca9f40ae6cfe366b6187d07e5199aa74895 | 2015-12-11T22:09:09Z |
mmm a / test / core / gprpp / examine_stack_test . cc <nl> ppp b / test / core / gprpp / examine_stack_test . cc <nl> TEST ( ExamineStackTest , AbseilStackProvider ) { <nl> grpc_core : : GetCurrentStackTrace ( ) ; <nl> EXPECT_NE ( stack_trace , absl : : nullopt ) ; <nl> gpr_log ( GPR_INFO , " stack_trace = % s " , stack_trace - > c_str ( ) ) ; <nl> - # ifndef NDEBUG <nl> + # if ! defined ( NDEBUG ) & & ! defined ( GPR_MUSL_LIBC_COMPAT ) <nl> EXPECT_TRUE ( stack_trace - > find ( " GetCurrentStackTrace " ) ! = - 1 ) ; <nl> # endif <nl> } <nl> mmm a / test / core / util / stack_tracer_test . cc <nl> ppp b / test / core / util / stack_tracer_test . cc <nl> <nl> TEST ( StackTracerTest , Basic ) { <nl> std : : string stack_trace = grpc_core : : testing : : GetCurrentStackTrace ( ) ; <nl> gpr_log ( GPR_INFO , " stack_trace = % s " , stack_trace . c_str ( ) ) ; <nl> - # ifndef NDEBUG <nl> + # if ! defined ( NDEBUG ) & & ! defined ( GPR_MUSL_LIBC_COMPAT ) <nl> EXPECT_TRUE ( stack_trace . find ( " Basic " ) ! = - 1 ) ; <nl> # endif <nl> } <nl> | Merge pull request from veblush / fix - stacktrace - test2 | grpc/grpc | 5fc1a7c8cc4afb13040bda5bf659d95348baf567 | 2020-11-04T20:16:39Z |
mmm a / Marlin / Conditionals . h <nl> ppp b / Marlin / Conditionals . h <nl> <nl> / * * <nl> * MIN_Z_HEIGHT_FOR_HOMING / Z_RAISE_BETWEEN_PROBINGS <nl> * / <nl> - # ifndef MIN_Z_HEIGHT_FOR_HOMING <nl> - # ifndef Z_RAISE_BETWEEN_PROBINGS <nl> - # define MIN_Z_HEIGHT_FOR_HOMING 0 <nl> - # else <nl> - # define MIN_Z_HEIGHT_FOR_HOMING Z_RAISE_BETWEEN_PROBINGS <nl> - # endif <nl> - # endif <nl> - # ifndef Z_RAISE_BETWEEN_PROBINGS <nl> - # define Z_RAISE_BETWEEN_PROBING MIN_Z_HEIGHT_FOR_HOMING <nl> - # endif <nl> + # ifndef MIN_Z_HEIGHT_FOR_HOMING <nl> + # ifndef Z_RAISE_BETWEEN_PROBINGS <nl> + # define MIN_Z_HEIGHT_FOR_HOMING 0 <nl> + # else <nl> + # define MIN_Z_HEIGHT_FOR_HOMING Z_RAISE_BETWEEN_PROBINGS <nl> + # endif <nl> + # endif <nl> + # ifndef Z_RAISE_BETWEEN_PROBINGS <nl> + # define Z_RAISE_BETWEEN_PROBING MIN_Z_HEIGHT_FOR_HOMING <nl> + # endif <nl> <nl> # endif / / CONFIGURATION_LCD <nl> # endif / / CONDITIONALS_H <nl> mmm a / Marlin / example_configurations / Felix / DUAL / Configuration . h <nl> ppp b / Marlin / example_configurations / Felix / DUAL / Configuration . h <nl> const bool Z_MIN_PROBE_ENDSTOP_INVERTING = false ; / / set to true to invert the l <nl> / / Print job timer <nl> / / <nl> / / Enable this option to automatically start and stop the <nl> - / / print job timer when M104 and M109 commands are received . <nl> + / / print job timer when M104 / M109 / M190 commands are received . <nl> + / / M104 ( extruder without wait ) - high temp = none , low temp = stop timer <nl> + / / M109 ( extruder with wait ) - high temp = start timer , low temp = stop timer <nl> + / / M190 ( bed with wait ) - high temp = start timer , low temp = none <nl> / / <nl> / / In all cases the timer can be started and stopped using <nl> / / the following commands : <nl> diff - - git a / Marlin / example_configurations / K8400 / Dual Heads / Configuration . h b / Marlin / example_configurations / K8400 / Dual Heads / Configuration . h <nl> mmm a / Marlin / example_configurations / K8400 / Dual Heads / Configuration . h <nl> ppp b / Marlin / example_configurations / K8400 / Dual Heads / Configuration . h <nl> const bool Z_MIN_PROBE_ENDSTOP_INVERTING = true ; / / set to true to invert the lo <nl> / / Print job timer <nl> / / <nl> / / Enable this option to automatically start and stop the <nl> - / / print job timer when M104 and M109 commands are received . <nl> + / / print job timer when M104 / M109 / M190 commands are received . <nl> + / / M104 ( extruder without wait ) - high temp = none , low temp = stop timer <nl> + / / M109 ( extruder with wait ) - high temp = start timer , low temp = stop timer <nl> + / / M190 ( bed with wait ) - high temp = start timer , low temp = none <nl> / / <nl> / / In all cases the timer can be started and stopped using <nl> / / the following commands : <nl> | Merge pull request from esenapaj / Follow - up - the - PR - - etc | MarlinFirmware/Marlin | 5b87deebb6b292e1ba2fbf1b1ba51b7479852f10 | 2016-07-10T23:59:24Z |
mmm a / include / swift / SIL / SILInstruction . h <nl> ppp b / include / swift / SIL / SILInstruction . h <nl> class KeyPathInst final <nl> <nl> / / / Represents an invocation of builtin functionality provided by the code <nl> / / / generator . <nl> - class BuiltinInst <nl> + class BuiltinInst final <nl> : public InstructionBase < SILInstructionKind : : BuiltinInst , <nl> - SingleValueInstruction > { <nl> + SingleValueInstruction > , <nl> + private llvm : : TrailingObjects < BuiltinInst , Operand , Substitution > { <nl> + friend TrailingObjects ; <nl> friend SILBuilder ; <nl> <nl> / / / The name of the builtin to invoke . <nl> Identifier Name ; <nl> - <nl> - / / / The number of tail - allocated substitutions , allocated after the operand <nl> - / / / list ' s tail allocation . <nl> - unsigned NumSubstitutions ; <nl> - <nl> - / / / The value arguments to the builtin . <nl> - TailAllocatedOperandList < 0 > Operands ; <nl> - <nl> - Substitution * getSubstitutionsStorage ( ) { <nl> - return reinterpret_cast < Substitution * > ( Operands . asArray ( ) . end ( ) ) ; <nl> + <nl> + size_t numTrailingObjects ( OverloadToken < Operand > ) const { <nl> + return SILInstruction : : Bits . BuiltinInst . NumOperands ; <nl> } <nl> - const Substitution * getSubstitutionsStorage ( ) const { <nl> - return reinterpret_cast < const Substitution * > ( Operands . asArray ( ) . end ( ) ) ; <nl> + size_t numTrailingObjects ( OverloadToken < Substitution > ) const { <nl> + return SILInstruction : : Bits . BuiltinInst . NumSubstitutions ; <nl> } <nl> <nl> BuiltinInst ( SILDebugLocation DebugLoc , Identifier Name , SILType ReturnType , <nl> class BuiltinInst <nl> ArrayRef < SILValue > Args , SILModule & M ) ; <nl> <nl> public : <nl> + ~ BuiltinInst ( ) { <nl> + for ( auto & op : getAllOperands ( ) ) { <nl> + op . ~ Operand ( ) ; <nl> + } <nl> + } <nl> + <nl> / / / Return the name of the builtin operation . <nl> Identifier getName ( ) const { return Name ; } <nl> void setName ( Identifier I ) { Name = I ; } <nl> class BuiltinInst <nl> / / / True if this builtin application has substitutions , which represent type <nl> / / / parameters to the builtin . <nl> bool hasSubstitutions ( ) const { <nl> - return NumSubstitutions ! = 0 ; <nl> + return SILInstruction : : Bits . BuiltinInst . NumSubstitutions ! = 0 ; <nl> } <nl> <nl> / / / Return the type parameters to the builtin . <nl> SubstitutionList getSubstitutions ( ) const { <nl> - return { getSubstitutionsStorage ( ) , NumSubstitutions } ; <nl> + return { getTrailingObjects < Substitution > ( ) , <nl> + SILInstruction : : Bits . BuiltinInst . NumSubstitutions } ; <nl> } <nl> / / / Return the type parameters to the builtin . <nl> MutableArrayRef < Substitution > getSubstitutions ( ) { <nl> - return { getSubstitutionsStorage ( ) , NumSubstitutions } ; <nl> + return { getTrailingObjects < Substitution > ( ) , <nl> + SILInstruction : : Bits . BuiltinInst . NumSubstitutions } ; <nl> } <nl> <nl> / / / The arguments to the builtin . <nl> ArrayRef < Operand > getAllOperands ( ) const { <nl> - return Operands . asArray ( ) ; <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . BuiltinInst . NumOperands } ; <nl> } <nl> / / / The arguments to the builtin . <nl> MutableArrayRef < Operand > getAllOperands ( ) { <nl> - return Operands . asArray ( ) ; <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . BuiltinInst . NumOperands } ; <nl> } <nl> / / / The arguments to the builtin . <nl> OperandValueArrayRef getArguments ( ) const { <nl> - return Operands . asValueArray ( ) ; <nl> + return OperandValueArrayRef ( getAllOperands ( ) ) ; <nl> } <nl> } ; <nl> <nl> class StringLiteralInst final <nl> } ; <nl> <nl> private : <nl> - unsigned Length ; <nl> - Encoding TheEncoding ; <nl> - <nl> StringLiteralInst ( SILDebugLocation DebugLoc , StringRef text , <nl> Encoding encoding , SILType ty ) ; <nl> <nl> class StringLiteralInst final <nl> public : <nl> / / / getValue - Return the string data for the literal , in UTF - 8 . <nl> StringRef getValue ( ) const { <nl> - return { getTrailingObjects < char > ( ) , Length } ; <nl> + return { getTrailingObjects < char > ( ) , <nl> + SILInstruction : : Bits . StringLiteralInst . Length } ; <nl> } <nl> <nl> / / / getEncoding - Return the desired encoding of the text . <nl> - Encoding getEncoding ( ) const { return TheEncoding ; } <nl> + Encoding getEncoding ( ) const { <nl> + return Encoding ( SILInstruction : : Bits . StringLiteralInst . TheEncoding ) ; <nl> + } <nl> <nl> / / / getCodeUnitCount - Return encoding - based length of the string <nl> / / / literal in code units . <nl> class ConstStringLiteralInst final <nl> } ; <nl> <nl> private : <nl> - unsigned Length ; <nl> - Encoding TheEncoding ; <nl> - <nl> ConstStringLiteralInst ( SILDebugLocation DebugLoc , StringRef text , <nl> Encoding encoding , SILType ty ) ; <nl> <nl> class ConstStringLiteralInst final <nl> <nl> public : <nl> / / / getValue - Return the string data for the literal , in UTF - 8 . <nl> - StringRef getValue ( ) const { return { getTrailingObjects < char > ( ) , Length } ; } <nl> + StringRef getValue ( ) const { <nl> + return { getTrailingObjects < char > ( ) , <nl> + SILInstruction : : Bits . ConstStringLiteralInst . Length } ; <nl> + } <nl> <nl> / / / getEncoding - Return the desired encoding of the text . <nl> - Encoding getEncoding ( ) const { return TheEncoding ; } <nl> + Encoding getEncoding ( ) const { <nl> + return Encoding ( SILInstruction : : Bits . ConstStringLiteralInst . TheEncoding ) ; <nl> + } <nl> <nl> / / / getCodeUnitCount - Return encoding - based length of the string <nl> / / / literal in code units . <nl> class UnconditionalCheckedCastValueInst final <nl> } ; <nl> <nl> / / / StructInst - Represents a constructed loadable struct . <nl> - class StructInst <nl> + class StructInst final <nl> : public InstructionBase < SILInstructionKind : : StructInst , <nl> - SingleValueInstruction > { <nl> + SingleValueInstruction > , <nl> + private llvm : : TrailingObjects < StructInst , Operand > { <nl> + friend TrailingObjects ; <nl> friend SILBuilder ; <nl> <nl> - TailAllocatedOperandList < 0 > Operands ; <nl> - <nl> / / / Because of the storage requirements of StructInst , object <nl> / / / creation goes through ' create ( ) ' . <nl> StructInst ( SILDebugLocation DebugLoc , SILType Ty , <nl> class StructInst <nl> ArrayRef < SILValue > Elements , SILModule & M ) ; <nl> <nl> public : <nl> + ~ StructInst ( ) { <nl> + for ( auto & op : getAllOperands ( ) ) { <nl> + op . ~ Operand ( ) ; <nl> + } <nl> + } <nl> + <nl> / / / The elements referenced by this StructInst . <nl> MutableArrayRef < Operand > getElementOperands ( ) { <nl> - return Operands . getDynamicAsArray ( ) ; <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . StructInst . NumOperands } ; <nl> } <nl> <nl> / / / The elements referenced by this StructInst . <nl> OperandValueArrayRef getElements ( ) const { <nl> - return Operands . getDynamicValuesAsArray ( ) ; <nl> + return OperandValueArrayRef ( { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . StructInst . NumOperands } ) ; <nl> } <nl> <nl> - ArrayRef < Operand > getAllOperands ( ) const { return Operands . asArray ( ) ; } <nl> - MutableArrayRef < Operand > getAllOperands ( ) { return Operands . asArray ( ) ; } <nl> + ArrayRef < Operand > getAllOperands ( ) const { <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . StructInst . NumOperands } ; <nl> + } <nl> + MutableArrayRef < Operand > getAllOperands ( ) { <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . StructInst . NumOperands } ; <nl> + } <nl> <nl> SILValue getFieldValue ( const VarDecl * V ) const { <nl> return getOperandForField ( V ) - > get ( ) ; <nl> class ObjectInst <nl> <nl> <nl> / / / TupleInst - Represents a constructed loadable tuple . <nl> - class TupleInst <nl> + class TupleInst final <nl> : public InstructionBase < SILInstructionKind : : TupleInst , <nl> - SingleValueInstruction > { <nl> + SingleValueInstruction > , <nl> + private llvm : : TrailingObjects < TupleInst , Operand > { <nl> + friend TrailingObjects ; <nl> friend SILBuilder ; <nl> <nl> - TailAllocatedOperandList < 0 > Operands ; <nl> - <nl> / / / Because of the storage requirements of TupleInst , object <nl> / / / creation goes through ' create ( ) ' . <nl> TupleInst ( SILDebugLocation DebugLoc , SILType Ty , <nl> class TupleInst <nl> ArrayRef < SILValue > Elements , SILModule & M ) ; <nl> <nl> public : <nl> + ~ TupleInst ( ) { <nl> + for ( auto & op : getAllOperands ( ) ) { <nl> + op . ~ Operand ( ) ; <nl> + } <nl> + } <nl> + <nl> / / / The elements referenced by this TupleInst . <nl> MutableArrayRef < Operand > getElementOperands ( ) { <nl> - return Operands . getDynamicAsArray ( ) ; <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . TupleInst . NumOperands } ; <nl> } <nl> <nl> / / / The elements referenced by this TupleInst . <nl> OperandValueArrayRef getElements ( ) const { <nl> - return Operands . getDynamicValuesAsArray ( ) ; <nl> + return OperandValueArrayRef ( { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . TupleInst . NumOperands } ) ; <nl> } <nl> <nl> / / / Return the i ' th value referenced by this TupleInst . <nl> class TupleInst <nl> return operand - > getOperandNumber ( ) ; <nl> } <nl> <nl> - ArrayRef < Operand > getAllOperands ( ) const { return Operands . asArray ( ) ; } <nl> - MutableArrayRef < Operand > getAllOperands ( ) { return Operands . asArray ( ) ; } <nl> + ArrayRef < Operand > getAllOperands ( ) const { <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . TupleInst . NumOperands } ; <nl> + } <nl> + MutableArrayRef < Operand > getAllOperands ( ) { <nl> + return { getTrailingObjects < Operand > ( ) , <nl> + SILInstruction : : Bits . TupleInst . NumOperands } ; <nl> + } <nl> <nl> TupleType * getTupleType ( ) const { <nl> return getType ( ) . getSwiftRValueType ( ) - > castTo < TupleType > ( ) ; <nl> mmm a / include / swift / SIL / SILNode . h <nl> ppp b / include / swift / SIL / SILNode . h <nl> class alignas ( 8 ) SILNode { <nl> SWIFT_INLINE_BITFIELD_EMPTY ( LiteralInst , SingleValueInstruction ) ; <nl> SWIFT_INLINE_BITFIELD_EMPTY ( AllocationInst , SingleValueInstruction ) ; <nl> <nl> + SWIFT_INLINE_BITFIELD_FULL ( StructInst , SingleValueInstruction , 32 , <nl> + : NumPadBits , <nl> + NumOperands : 32 <nl> + ) ; <nl> + <nl> + SWIFT_INLINE_BITFIELD_FULL ( TupleInst , SingleValueInstruction , 32 , <nl> + : NumPadBits , <nl> + NumOperands : 32 <nl> + ) ; <nl> + <nl> + SWIFT_INLINE_BITFIELD_FULL ( BuiltinInst , SingleValueInstruction , <nl> + 64 - NumSingleValueInstructionBits , <nl> + NumSubstitutions : 32 - NumSingleValueInstructionBits , <nl> + NumOperands : 32 <nl> + ) ; <nl> + <nl> SWIFT_INLINE_BITFIELD_FULL ( IntegerLiteralInst , LiteralInst , 32 , <nl> : NumPadBits , <nl> numBits : 32 <nl> class alignas ( 8 ) SILNode { <nl> numBits : 32 <nl> ) ; <nl> <nl> + SWIFT_INLINE_BITFIELD_FULL ( StringLiteralInst , LiteralInst , 2 + 32 , <nl> + TheEncoding : 2 , <nl> + : NumPadBits , <nl> + Length : 32 <nl> + ) ; <nl> + <nl> + SWIFT_INLINE_BITFIELD_FULL ( ConstStringLiteralInst , LiteralInst , 1 + 32 , <nl> + TheEncoding : 1 , <nl> + : NumPadBits , <nl> + Length : 32 <nl> + ) ; <nl> + <nl> SWIFT_INLINE_BITFIELD ( DeallocRefInst , DeallocationInst , 1 , <nl> OnStack : 1 <nl> ) ; <nl> class alignas ( 8 ) SILNode { <nl> SWIFT_INLINE_BITS ( BeginAccessInst ) ; <nl> SWIFT_INLINE_BITS ( EndAccessInst ) ; <nl> SWIFT_INLINE_BITS ( MetatypeInst ) ; <nl> + SWIFT_INLINE_BITS ( BuiltinInst ) ; <nl> + SWIFT_INLINE_BITS ( StringLiteralInst ) ; <nl> + SWIFT_INLINE_BITS ( ConstStringLiteralInst ) ; <nl> + SWIFT_INLINE_BITS ( StructInst ) ; <nl> + SWIFT_INLINE_BITS ( TupleInst ) ; <nl> } Bits ; <nl> <nl> private : <nl> mmm a / lib / SIL / SILInstructions . cpp <nl> ppp b / lib / SIL / SILInstructions . cpp <nl> BuiltinInst * BuiltinInst : : create ( SILDebugLocation Loc , Identifier Name , <nl> SubstitutionList Substitutions , <nl> ArrayRef < SILValue > Args , <nl> SILModule & M ) { <nl> - void * Buffer = M . allocateInst ( <nl> - sizeof ( BuiltinInst ) <nl> - + decltype ( Operands ) : : getExtraSize ( Args . size ( ) ) <nl> - + sizeof ( Substitution ) * Substitutions . size ( ) , <nl> - alignof ( BuiltinInst ) ) ; <nl> + auto Size = totalSizeToAlloc < swift : : Operand , Substitution > ( Args . size ( ) , <nl> + Substitutions . size ( ) ) ; <nl> + auto Buffer = M . allocateInst ( Size , alignof ( BuiltinInst ) ) ; <nl> return : : new ( Buffer ) BuiltinInst ( Loc , Name , ReturnType , Substitutions , <nl> Args ) ; <nl> } <nl> BuiltinInst * BuiltinInst : : create ( SILDebugLocation Loc , Identifier Name , <nl> BuiltinInst : : BuiltinInst ( SILDebugLocation Loc , Identifier Name , <nl> SILType ReturnType , SubstitutionList Subs , <nl> ArrayRef < SILValue > Args ) <nl> - : InstructionBase ( Loc , ReturnType ) , Name ( Name ) , <nl> - NumSubstitutions ( Subs . size ( ) ) , Operands ( this , Args ) { <nl> - static_assert ( IsTriviallyCopyable < Substitution > : : value , <nl> - " assuming Substitution is trivially copyable " ) ; <nl> - memcpy ( getSubstitutionsStorage ( ) , Subs . begin ( ) , <nl> - sizeof ( Substitution ) * Subs . size ( ) ) ; <nl> + : InstructionBase ( Loc , ReturnType ) , Name ( Name ) { <nl> + SILInstruction : : Bits . BuiltinInst . NumSubstitutions = Subs . size ( ) ; <nl> + assert ( SILInstruction : : Bits . BuiltinInst . NumSubstitutions = = Subs . size ( ) & & <nl> + " Truncation " ) ; <nl> + SILInstruction : : Bits . BuiltinInst . NumOperands = Args . size ( ) ; <nl> + Operand * dynamicSlot = getTrailingObjects < Operand > ( ) ; <nl> + for ( auto value : Args ) { <nl> + new ( dynamicSlot + + ) Operand ( this , value ) ; <nl> + } <nl> + std : : uninitialized_copy ( Subs . begin ( ) , Subs . end ( ) , <nl> + getTrailingObjects < Substitution > ( ) ) ; <nl> } <nl> <nl> InitBlockStorageHeaderInst * <nl> APFloat FloatLiteralInst : : getValue ( ) const { <nl> <nl> StringLiteralInst : : StringLiteralInst ( SILDebugLocation Loc , StringRef Text , <nl> Encoding encoding , SILType Ty ) <nl> - : InstructionBase ( Loc , Ty ) , Length ( Text . size ( ) ) , <nl> - TheEncoding ( encoding ) { <nl> + : InstructionBase ( Loc , Ty ) { <nl> + SILInstruction : : Bits . StringLiteralInst . TheEncoding = unsigned ( encoding ) ; <nl> + SILInstruction : : Bits . StringLiteralInst . Length = Text . size ( ) ; <nl> memcpy ( getTrailingObjects < char > ( ) , Text . data ( ) , Text . size ( ) ) ; <nl> } <nl> <nl> StringLiteralInst * StringLiteralInst : : create ( SILDebugLocation Loc , <nl> } <nl> <nl> uint64_t StringLiteralInst : : getCodeUnitCount ( ) { <nl> - if ( TheEncoding = = Encoding : : UTF16 ) <nl> + auto E = unsigned ( Encoding : : UTF16 ) ; <nl> + if ( SILInstruction : : Bits . StringLiteralInst . TheEncoding = = E ) <nl> return unicode : : getUTF16Length ( getValue ( ) ) ; <nl> - return Length ; <nl> + return SILInstruction : : Bits . StringLiteralInst . Length ; <nl> } <nl> <nl> ConstStringLiteralInst : : ConstStringLiteralInst ( SILDebugLocation Loc , <nl> StringRef Text , <nl> Encoding encoding , SILType Ty ) <nl> - : InstructionBase ( Loc , Ty ) , <nl> - Length ( Text . size ( ) ) , TheEncoding ( encoding ) { <nl> + : InstructionBase ( Loc , Ty ) { <nl> + SILInstruction : : Bits . ConstStringLiteralInst . TheEncoding = unsigned ( encoding ) ; <nl> + SILInstruction : : Bits . ConstStringLiteralInst . Length = Text . size ( ) ; <nl> memcpy ( getTrailingObjects < char > ( ) , Text . data ( ) , Text . size ( ) ) ; <nl> } <nl> <nl> ConstStringLiteralInst * ConstStringLiteralInst : : create ( SILDebugLocation Loc , <nl> } <nl> <nl> uint64_t ConstStringLiteralInst : : getCodeUnitCount ( ) { <nl> - if ( TheEncoding = = Encoding : : UTF16 ) <nl> + auto E = unsigned ( Encoding : : UTF16 ) ; <nl> + if ( SILInstruction : : Bits . ConstStringLiteralInst . TheEncoding = = E ) <nl> return unicode : : getUTF16Length ( getValue ( ) ) ; <nl> - return Length ; <nl> + return SILInstruction : : Bits . ConstStringLiteralInst . Length ; <nl> } <nl> <nl> StoreInst : : StoreInst ( <nl> UnconditionalCheckedCastAddrInst : : UnconditionalCheckedCastAddrInst ( <nl> <nl> StructInst * StructInst : : create ( SILDebugLocation Loc , SILType Ty , <nl> ArrayRef < SILValue > Elements , SILModule & M ) { <nl> - void * Buffer = M . allocateInst ( sizeof ( StructInst ) + <nl> - decltype ( Operands ) : : getExtraSize ( Elements . size ( ) ) , <nl> - alignof ( StructInst ) ) ; <nl> + auto Size = totalSizeToAlloc < swift : : Operand > ( Elements . size ( ) ) ; <nl> + auto Buffer = M . allocateInst ( Size , alignof ( StructInst ) ) ; <nl> return : : new ( Buffer ) StructInst ( Loc , Ty , Elements ) ; <nl> } <nl> <nl> StructInst : : StructInst ( SILDebugLocation Loc , SILType Ty , <nl> ArrayRef < SILValue > Elems ) <nl> - : InstructionBase ( Loc , Ty ) , Operands ( this , Elems ) { <nl> + : InstructionBase ( Loc , Ty ) { <nl> + SILInstruction : : Bits . StructInst . NumOperands = Elems . size ( ) ; <nl> + Operand * dynamicSlot = getTrailingObjects < Operand > ( ) ; <nl> + for ( auto value : Elems ) { <nl> + new ( dynamicSlot + + ) Operand ( this , value ) ; <nl> + } <nl> assert ( ! Ty . getStructOrBoundGenericStruct ( ) - > hasUnreferenceableStorage ( ) ) ; <nl> } <nl> <nl> ObjectInst : : ObjectInst ( SILDebugLocation Loc , SILType Ty , <nl> <nl> TupleInst * TupleInst : : create ( SILDebugLocation Loc , SILType Ty , <nl> ArrayRef < SILValue > Elements , SILModule & M ) { <nl> - void * Buffer = M . allocateInst ( sizeof ( TupleInst ) + <nl> - decltype ( Operands ) : : getExtraSize ( Elements . size ( ) ) , <nl> - alignof ( TupleInst ) ) ; <nl> + auto Size = totalSizeToAlloc < swift : : Operand > ( Elements . size ( ) ) ; <nl> + auto Buffer = M . allocateInst ( Size , alignof ( TupleInst ) ) ; <nl> return : : new ( Buffer ) TupleInst ( Loc , Ty , Elements ) ; <nl> } <nl> <nl> TupleInst : : TupleInst ( SILDebugLocation Loc , SILType Ty , <nl> ArrayRef < SILValue > Elems ) <nl> - : InstructionBase ( Loc , Ty ) , Operands ( this , Elems ) { } <nl> + : InstructionBase ( Loc , Ty ) { <nl> + SILInstruction : : Bits . TupleInst . NumOperands = Elems . size ( ) ; <nl> + Operand * dynamicSlot = getTrailingObjects < Operand > ( ) ; <nl> + for ( auto value : Elems ) { <nl> + new ( dynamicSlot + + ) Operand ( this , value ) ; <nl> + } <nl> + } <nl> <nl> MetatypeInst : : MetatypeInst ( SILDebugLocation Loc , SILType Metatype , <nl> ArrayRef < SILValue > TypeDependentOperands ) <nl> | Merge pull request from davezarzycki / nfc_misc_silnode_cleanup | apple/swift | 4a9eb4af413f3814eb4820c3acc0c2dbfc1c9680 | 2017-12-22T19:56:25Z |
mmm a / Documentation / Books / Cookbook / Monitoring / Collectd . md <nl> ppp b / Documentation / Books / Cookbook / Monitoring / Collectd . md <nl> For the RRD we will go pretty much with defaults : <nl> ` Collectd ` comes with a wide range of metric aggregation plug - ins . <nl> Many tools today use [ JSON ] ( http : / / json . org ) as data formatting grammar ; so does ArangoDB . <nl> <nl> - Therefore a plug - in offering to fetch JSON documents via HTTP is the perfect match to query ArangoDBs administrative statistics interface : <nl> + Therefore a plug - in offering to fetch JSON documents via HTTP is the perfect match to query ArangoDBs [ administrative Statistics interface ] ( . . / . . / HTTP / AdministrationAndMonitoring / index . html # read - the - statistics ) : <nl> <nl> # Load the plug - in : <nl> LoadPlugin curl_json <nl> mmm a / Documentation / Books / build . sh <nl> ppp b / Documentation / Books / build . sh <nl> function ppbook - check - html - link ( ) <nl> echo " $ { ALLBOOKS } " | tr " " " \ n " | sed - e ' s ; ^ ; / ; ' - e ' s ; $ ; / ; ' > / tmp / books . regex <nl> <nl> set + e <nl> - egrep - r ' \ [ . * \ ] \ ( . * \ ) ' " ppbooks / $ { NAME } " | \ <nl> + grep - r - E ' \ [ . * \ ] \ ( . * \ ) ' " ppbooks / $ { NAME } " | \ <nl> grep ' \ . md : ' | grep ' html ' | \ <nl> grep - v ' http : / / ' | \ <nl> grep - v ' https : / / ' | \ <nl> function ppbook - check - directory - link ( ) <nl> NAME = " $ 1 " <nl> echo " $ { STD_COLOR } # # # # # checking for invalid md links in $ { NAME } $ { RESET } " <nl> set + e <nl> - ERRORS = $ ( egrep - r ' \ [ . * \ ] \ ( . * \ ) ' " ppbooks / $ { NAME } " | \ <nl> + ERRORS = $ ( grep - r - E ' \ [ . * \ ] \ ( . * \ ) ' " ppbooks / $ { NAME } " | \ <nl> grep ' \ . md : ' | \ <nl> grep - v html | \ <nl> grep - v http : / / | \ <nl> function check - dangling - anchors ( ) <nl> <nl> echo " $ { STD_COLOR } # # # # # fetching anchors from generated http files $ { RESET } " <nl> grep - R " a href . * # " books / | \ <nl> - egrep - v " ( styles / header \ . js | / app \ . js | class = \ " navigation | https * : / / | href = \ " # \ " ) " | \ <nl> + grep - v - E " ( styles / header \ \ . js | / app \ \ . js | class = \ " navigation | https * : / / | href = \ " # \ " ) " | \ <nl> sed ' s ; \ ( . * \ . html \ ) : . * a href = " \ ( . * \ ) # \ ( . * \ ) " > . * < / a > . * ; \ 1 , \ 2 , \ 3 ; ' | grep - v " " > / tmp / anchorlist . txt <nl> <nl> echo " $ { STD_COLOR } # # # # # cross checking anchors $ { RESET } " <nl> function check - dangling - anchors ( ) <nl> if test " $ { NO } " - gt 0 ; then <nl> echo " $ { ERR_COLOR } " <nl> echo " $ { NO } Dangling anchors found ! " <nl> + echo " $ { WRN_COLOR } " <nl> + echo " $ { 1 } " <nl> echo " $ { RESET } " <nl> exit 1 <nl> fi <nl> function build - book ( ) <nl> RELEASE_DIRECTORY = devel <nl> else <nl> VERSION = " $ { newVersionNumber } " <nl> - RELEASE_DIRECTORY = $ ( sed " s ; \ . [ 0 - 9 ] * $ ; ; " < < < " $ { newVersionNumber } " ) <nl> + RELEASE_DIRECTORY = $ ( sed " s ; \ \ . [ 0 - 9 ] * $ ; ; " < < < " $ { newVersionNumber } " ) <nl> fi <nl> export VERSION <nl> <nl> function check - docublocks ( ) <nl> echo " $ { ERR_COLOR } " <nl> echo " Duplicate entry found in the source trees : " <nl> comm - 3 / tmp / inprog_raw . txt / tmp / inprog . txt <nl> + echo " $ { WRN_COLOR } " <nl> + echo " $ { 1 } " <nl> echo " $ { RESET } " <nl> exit 1 <nl> fi <nl> function check - docublocks ( ) <nl> grep " $ grepit " / tmp / rawinprog . txt | sed " s ; / / / @ startDocuBlock ; \ t \ t ; " <nl> done <nl> fi <nl> + echo " $ { WRN_COLOR } " <nl> + echo " $ { 1 } " <nl> echo " $ { RESET } " <nl> exit 1 <nl> fi <nl> function build - books ( ) <nl> ppbook - check - html - link " $ { book } " <nl> done <nl> <nl> - check - docublocks <nl> - check - dangling - anchors <nl> + check - docublocks " " <nl> + check - dangling - anchors " " <nl> echo " $ { STD_COLOR } # # # # # Generating redirect index . html $ { RESET } " ; \ <nl> echo ' < html > < head > < meta http - equiv = " refresh " content = " 0 ; url = Manual / " > < / head > < body > < / body > < / html > ' > books / index . html <nl> } <nl> case " $ VERB " in <nl> exit 1 <nl> fi <nl> build - book " $ NAME " <nl> + check - docublocks " some of the above errors may be because of referenced books weren ' t rebuilt . " <nl> + check - dangling - anchors " some of the above errors may be because of referenced books weren ' t rebuilt . " <nl> ; ; <nl> check - book ) <nl> check - summary " $ { NAME } " <nl> mmm a / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md <nl> ppp b / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md <nl> <nl> @ startDocuBlock put_read_all_documents <nl> @ brief reads all documents from collection <nl> <nl> - @ RESTHEADER { PUT / _api / simple / all - keys , Read all documents } <nl> + @ RESTHEADER { PUT / _api / simple / all - keys , Read all documents } <nl> + <nl> + @ RESTQUERYPARAMETERS <nl> + <nl> + @ RESTQUERYPARAM { collection , string , optional } <nl> + The name of the collection . <nl> + * * This parameter is only for an easier migration path from old versions . * * <nl> + In ArangoDB versions < 3 . 0 , the URL path was * / _api / document * and <nl> + this was passed in via the query parameter " collection " . <nl> + This combination was removed . The collection name can be passed to <nl> + * / _api / simple / all - keys * as body parameter ( preferred ) or as query parameter . <nl> <nl> @ RESTBODYPARAM { collection , string , required , } <nl> The collection that should be queried <nl> mmm a / Documentation / Scripts / generateMdFiles . py <nl> ppp b / Documentation / Scripts / generateMdFiles . py <nl> def validatePathParameters ( ) : <nl> if thisVerb [ ' parameters ' ] [ nParam ] [ ' in ' ] = = ' path ' : <nl> break <nl> else : <nl> - raise Exception ( " @ RESTPATHPARAMETERS found without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> + raise Exception ( " @ RESTPATHPARAMETERS found in Swagger data without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> <nl> def validateQueryParams ( ) : <nl> # print thisVerb <nl> def validateQueryParams ( ) : <nl> if thisVerb [ ' parameters ' ] [ nParam ] [ ' in ' ] = = ' query ' : <nl> break <nl> else : <nl> - raise Exception ( " @ RESTQUERYPARAMETERS found without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> + raise Exception ( " @ RESTQUERYPARAMETERS found in Swagger data without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> <nl> def validateHeaderParams ( ) : <nl> # print thisVerb <nl> def validateHeaderParams ( ) : <nl> if thisVerb [ ' parameters ' ] [ nParam ] [ ' in ' ] = = ' header ' : <nl> break <nl> else : <nl> - raise Exception ( " @ RESTHEADERPARAMETERS found without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> + raise Exception ( " @ RESTHEADERPARAMETERS found in Swagger data without any parameter following in % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> <nl> def validateReturnCodes ( ) : <nl> # print thisVerb <nl> def validateReturnCodes ( ) : <nl> if len ( thisVerb [ ' responses ' ] . keys ( ) ) ! = 0 : <nl> break <nl> else : <nl> - raise Exception ( " @ RESTRETURNCODES found without any documented returncodes % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> + raise Exception ( " @ RESTRETURNCODES found in Swagger data without any documented returncodes % s " % json . dumps ( thisVerb , indent = 4 , separators = ( ' , ' , ' : ' ) , sort_keys = True ) ) <nl> <nl> def validateExamples ( ) : <nl> pass <nl> mmm a / js / apps / system / _admin / aardvark / APP / api - docs . json <nl> ppp b / js / apps / system / _admin / aardvark / APP / api - docs . json <nl> <nl> " startVertex " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph Traversal / HTTP_API_TRAVERSAL . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph Traversal / HTTP_API_TRAVERSAL . md " <nl> } , <nl> " PostApiQueryProperties " : { <nl> " properties " : { <nl> <nl> " query " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PostApiQueryProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PostApiQueryProperties . md " <nl> } , <nl> " PutApiQueryCacheProperties " : { <nl> " properties " : { <nl> <nl> " maxResults " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PutApiQueryCacheProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PutApiQueryCacheProperties . md " <nl> } , <nl> " PutApiQueryProperties " : { <nl> " properties " : { <nl> <nl> " maxQueryStringLength " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PutApiQueryProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PutApiQueryProperties . md " <nl> } , <nl> " RestLookupByKeys " : { <nl> " properties " : { <nl> <nl> " keys " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / RestLookupByKeys . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / RestLookupByKeys . md " <nl> } , <nl> " RestRemoveByKeys " : { <nl> " properties " : { <nl> <nl> " keys " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> } , <nl> " UserHandling_create " : { <nl> " properties " : { <nl> <nl> " passwd " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " UserHandling_grantCollection " : { <nl> " properties " : { <nl> <nl> " grant " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " UserHandling_grantDatabase " : { <nl> " properties " : { <nl> <nl> " grant " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " UserHandling_modify " : { <nl> " properties " : { <nl> <nl> " passwd " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " UserHandling_replace " : { <nl> " properties " : { <nl> <nl> " passwd " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " admin_echo_client_struct " : { <nl> " description " : " attributes of the client connection \ n \ n " , <nl> <nl> " client " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> } , <nl> " admin_echo_server_struct " : { <nl> " description " : " \ n " , <nl> <nl> " server " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> } , <nl> " admin_statistics_figures_struct " : { <nl> " description " : " " , <nl> <nl> " bytesReceived " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " cluster_endpoints_struct " : { <nl> " description " : " " , <nl> <nl> " indexes " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_alive " : { <nl> " description " : " the currently active figures \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_compactors " : { <nl> " description " : " \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_datafiles " : { <nl> " description " : " Metrics regarding the datafiles \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_dead " : { <nl> " description " : " the items waiting to be swept away by the cleaner \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_indexes " : { <nl> " description " : " " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_journals " : { <nl> " description " : " Metrics regarding the journal files \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_readcache " : { <nl> " description " : " \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " collection_figures_revisions " : { <nl> " description " : " \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " compactionStatus_attributes " : { <nl> " description " : " " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " delete_api_aqlfunction_rc_200 " : { <nl> " properties " : { <nl> <nl> " deletedCount " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> } , <nl> " delete_api_aqlfunction_rc_400 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> } , <nl> " delete_api_aqlfunction_rc_404 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> } , <nl> " delete_api_tasks_rc_200 " : { <nl> " properties " : { <nl> <nl> " error " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> } , <nl> " delete_api_tasks_rc_404 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> } , <nl> " explain_options " : { <nl> " description " : " Options for the query \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> } , <nl> " general_graph_create_http_examples " : { <nl> " properties " : { <nl> <nl> " name " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> } , <nl> " general_graph_edge_definition_add_http_examples " : { <nl> " properties " : { <nl> <nl> " to " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_add_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_add_http_examples . md " <nl> } , <nl> " general_graph_edge_definition_modify_http_examples " : { <nl> " properties " : { <nl> <nl> " to " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_modify_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_modify_http_examples . md " <nl> } , <nl> " get_admin_log_rc_200 " : { <nl> " properties " : { <nl> <nl> " totalAmount " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> } , <nl> " get_admin_server_role_rc_200 " : { <nl> " properties " : { <nl> <nl> " role " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_server_role . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_server_role . md " <nl> } , <nl> " get_admin_statistics_description_rc_200 " : { <nl> " properties " : { <nl> <nl> " error " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics_description . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics_description . md " <nl> } , <nl> " get_admin_statistics_rc_200 " : { <nl> " properties " : { <nl> <nl> " enabled " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " get_admin_time_rc_200 " : { <nl> " properties " : { <nl> <nl> " time " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_time . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_time . md " <nl> } , <nl> " get_api_aqlfunction_rc_200 " : { <nl> " properties " : { <nl> <nl> " result " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> } , <nl> " get_api_aqlfunction_rc_400 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> } , <nl> " get_api_cluster_endpoints_rc_200 " : { <nl> " properties " : { <nl> <nl> " endpoints " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_cluster_endpoints . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_cluster_endpoints . md " <nl> } , <nl> " get_api_collection_figures_rc_200 " : { <nl> " properties " : { <nl> <nl> " journalSize " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } , <nl> " get_api_database_new " : { <nl> " properties " : { <nl> <nl> " name " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_new . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_new . md " <nl> } , <nl> " get_api_database_new_USERS " : { <nl> " description " : " " , <nl> <nl> " version " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> } , <nl> " get_api_tasks_all_rc_200 " : { <nl> " description " : " a list of all tasks \ n \ n " , <nl> <nl> " name " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_engine . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_engine . md " <nl> } , <nl> " http_statistics_struct " : { <nl> " description " : " the numbers of requests by Verb \ n \ n " , <nl> <nl> " http " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " patch_api_view_link_props " : { <nl> " description " : " The link properties . If specified , then * properties * \ nshould be a JSON object containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_link_props_field_props " : { <nl> " description " : " The field properties . If specified , then * properties * \ nshould be a JSON object containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_links " : { <nl> " description " : " The set of collection names associated with the properties . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_properties_iresearch " : { <nl> " properties " : { <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_props_commit " : { <nl> " description " : " Commit options for regular operations . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_props_consolidation " : { <nl> " description " : " \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_props_consolidation_bytes_accum " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_props_consolidation_count " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " patch_api_view_props_consolidation_fill " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " post_admin_echo_rc_200 " : { <nl> " properties " : { <nl> <nl> " rawRequestBody " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> } , <nl> " post_api_aqlfunction " : { <nl> " properties " : { <nl> <nl> " code " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> } , <nl> " post_api_aqlfunction_rc_200 " : { <nl> " properties " : { <nl> <nl> " code " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> } , <nl> " post_api_aqlfunction_rc_201 " : { <nl> " properties " : { <nl> <nl> " code " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> } , <nl> " post_api_aqlfunction_rc_400 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> } , <nl> " post_api_collection " : { <nl> " properties " : { <nl> <nl> " name " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> } , <nl> " post_api_collection_opts " : { <nl> " description " : " additional options for key generation . If specified , then * keyOptions * \ nshould be a JSON array containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> } , <nl> " post_api_cursor " : { <nl> " properties " : { <nl> <nl> " query " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> } , <nl> " post_api_cursor_opts " : { <nl> " description " : " key / value object with extra options for the query . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> } , <nl> " post_api_cursor_rc_201 " : { <nl> " properties " : { <nl> <nl> " cached " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> } , <nl> " post_api_cursor_rc_400 " : { <nl> " properties " : { <nl> <nl> " errorMessage " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> } , <nl> " post_api_explain " : { <nl> " properties " : { <nl> <nl> " query " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> } , <nl> " post_api_export " : { <nl> " properties " : { <nl> <nl> " ttl " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> } , <nl> " post_api_export_restrictions " : { <nl> " description " : " an object containing an array of attribute names that will be \ nincluded or excluded when returning result documents . \ n \ nNot specifying * restrict * will by default return all attributes of each document . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> } , <nl> " post_api_gharial_create_opts " : { <nl> " description " : " a JSON object which is only useful in Enterprise version and with isSmart set to true . \ nIt can contain the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> } , <nl> " post_api_index_fulltext " : { <nl> " properties " : { <nl> <nl> " minLength " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_fulltext . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_fulltext . md " <nl> } , <nl> " post_api_index_geo " : { <nl> " properties " : { <nl> <nl> " geoJson " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_geo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_geo . md " <nl> } , <nl> " post_api_index_hash " : { <nl> " properties " : { <nl> <nl> " sparse " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_hash . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_hash . md " <nl> } , <nl> " post_api_index_persistent " : { <nl> " properties " : { <nl> <nl> " sparse " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_persistent . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_persistent . md " <nl> } , <nl> " post_api_index_skiplist " : { <nl> " properties " : { <nl> <nl> " sparse " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_skiplist . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_skiplist . md " <nl> } , <nl> " post_api_new_tasks " : { <nl> " properties " : { <nl> <nl> " params " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> } , <nl> " post_api_new_tasks_rc_200 " : { <nl> " properties " : { <nl> <nl> " error " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> } , <nl> " post_api_transaction " : { <nl> " properties " : { <nl> <nl> " action " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Transactions / post_api_transaction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Transactions / post_api_transaction . md " <nl> } , <nl> " post_api_view_iresearch " : { <nl> " properties " : { <nl> <nl> " type " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> } , <nl> " post_api_view_link_props " : { <nl> " description " : " The link properties . If specified , then * properties * \ nshould be a JSON object containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_link_props_field_props " : { <nl> " description " : " The field properties . If specified , then * properties * \ nshould be a JSON object containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_links " : { <nl> " description " : " The set of collection names associated with the properties . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_props " : { <nl> " description " : " The view properties . If specified , then * properties * \ nshould be a JSON object containing the following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> } , <nl> " post_api_view_props_commit " : { <nl> " description " : " Commit options for regular operations . \ n \ nCommit options for regular operations . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_props_consolidation " : { <nl> " description " : " \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_props_consolidation_bytes_accum " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ nUse empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_props_consolidation_count " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ nUse empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_api_view_props_consolidation_fill " : { <nl> " description " : " Use empty object for default values , i . e . { } \ n \ nUse empty object for default values , i . e . { } \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " post_batch_replication " : { <nl> " properties " : { <nl> <nl> " ttl " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / post_batch_replication . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / post_batch_replication . md " <nl> } , <nl> " put_admin_loglevel " : { <nl> " properties " : { <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> } , <nl> " put_api_new_tasks " : { <nl> " properties " : { <nl> <nl> " params " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / put_api_new_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / put_api_new_tasks . md " <nl> } , <nl> " put_api_replication_applier_adjust " : { <nl> " properties " : { <nl> <nl> " restrictType " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_adjust . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_adjust . md " <nl> } , <nl> " put_api_replication_makeSlave " : { <nl> " properties " : { <nl> <nl> " includeSystem " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_makeSlave . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_makeSlave . md " <nl> } , <nl> " put_api_replication_synchronize " : { <nl> " properties " : { <nl> <nl> " password " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_synchronize . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_synchronize . md " <nl> } , <nl> " put_api_simple_any " : { <nl> " properties " : { <nl> <nl> " collection " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_any . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_any . md " <nl> } , <nl> " put_api_simple_by_example " : { <nl> " properties " : { <nl> <nl> " limit " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_by_example . md " <nl> } , <nl> " put_api_simple_first_example " : { <nl> " properties " : { <nl> <nl> " example " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_first_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_first_example . md " <nl> } , <nl> " put_api_simple_fulltext " : { <nl> " properties " : { <nl> <nl> " index " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_fulltext . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_fulltext . md " <nl> } , <nl> " put_api_simple_near " : { <nl> " properties " : { <nl> <nl> " geo " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_near . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_near . md " <nl> } , <nl> " put_api_simple_range " : { <nl> " properties " : { <nl> <nl> " skip " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_range . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_range . md " <nl> } , <nl> " put_api_simple_remove_by_example " : { <nl> " properties " : { <nl> <nl> " example " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> } , <nl> " put_api_simple_remove_by_example_opts " : { <nl> " description " : " a json object which can contains following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> } , <nl> " put_api_simple_remove_by_keys_opts " : { <nl> " description " : " a json object which can contains following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> } , <nl> " put_api_simple_replace_by_example " : { <nl> " properties " : { <nl> <nl> " newValue " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> } , <nl> " put_api_simple_replace_by_example_options " : { <nl> " description " : " a json object which can contain following attributes \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> } , <nl> " put_api_simple_update_by_example " : { <nl> " properties " : { <nl> <nl> " newValue " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> } , <nl> " put_api_simple_update_by_example_options " : { <nl> " description " : " a json object which can contains following attributes : \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> } , <nl> " put_api_simple_within " : { <nl> " properties " : { <nl> <nl> " geo " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within . md " <nl> } , <nl> " put_api_simple_within_rectangle " : { <nl> " properties " : { <nl> <nl> " geo " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within_rectangle . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within_rectangle . md " <nl> } , <nl> " put_api_view_properties_iresearch " : { <nl> " properties " : { <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } , <nl> " put_batch_replication " : { <nl> " properties " : { <nl> <nl> " ttl " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_batch_replication . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_batch_replication . md " <nl> } , <nl> " put_read_all_documents " : { <nl> " properties " : { <nl> " collection " : { <nl> - " description " : " The name of the collection . This is only for backward compatibility . \ nIn ArangoDB versions < 3 . 0 , the URL path was * / _api / document * and \ nthis was passed in via the query parameter \ " collection \ " . \ nThis combination was removed . \ n \ n " , <nl> + " description " : " The collection that should be queried \ n \ n " , <nl> " type " : " string " <nl> } , <nl> " type " : { <nl> <nl> " type " : " string " <nl> } <nl> } , <nl> + " required " : [ <nl> + " collection " <nl> + ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md " <nl> } , <nl> " server_statistics_struct " : { <nl> " description " : " statistics of the server \ n \ n " , <nl> <nl> " threads " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " server_threads_struct " : { <nl> " description " : " Statistics about the server worker threads ( excluding V8 specific or jemalloc specific threads and system threads ) \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " setof_statistics_struct " : { <nl> " description " : " total connection times \ n \ nthe system time \ n \ nthe request times \ n \ nthe time requests were queued waiting for processing \ n \ nIO Time \ n \ nnumber of bytes sent to the clients \ n \ nnumber of bytes received from the clients \ n \ n \ n " , <nl> <nl> " system " <nl> ] , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " v8_context_struct " : { <nl> " description " : " Statistics about the V8 javascript contexts \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } , <nl> " version_details_struct " : { <nl> " description " : " an optional JSON object with additional details . This is \ nreturned only if the * details * query parameter is set to * true * in the \ nrequest . \ n \ n " , <nl> <nl> } <nl> } , <nl> " type " : " object " , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> } <nl> } , <nl> " info " : { <nl> <nl> " Cluster " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cluster / get_cluster_health . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cluster / get_cluster_health . md " <nl> } <nl> } , <nl> " / _admin / clusterStatistics " : { <nl> <nl> " Cluster " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cluster / get_cluster_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cluster / get_cluster_statistics . md " <nl> } <nl> } , <nl> " / _admin / database / target - version " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_database_version . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_database_version . md " <nl> } <nl> } , <nl> " / _admin / echo " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_admin_echo . md " <nl> } <nl> } , <nl> " / _admin / execute " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_admin_execute . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_admin_execute . md " <nl> } <nl> } , <nl> " / _admin / log " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> } <nl> } , <nl> " / _admin / log / level " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nModifies and returns the server ' s current loglevel settings . \ nThe request body must be a JSON object with the log topics being the object keys \ nand the log levels being the object values . \ n \ nThe result is a JSON object with the adjusted log topics being the object keys , and \ nthe adjusted log levels being the object values . \ n \ nIt can set the loglevel of all facilities by only specifying the loglevel as string without json . \ n \ nPossible loglevels are : \ n - FATAL - There will be no way out of this . ArangoDB will go down after this message . \ n - ERROR - This is an error . you should investigate and fix it . It may harm your production . \ n - WARNING - This may be serious application - wise , but we don ' t know . \ n - INFO - Something has happened , take notice , but no drama attached . \ n - DEBUG - output debug messages \ n - TRACE - trace - prepare your log to be flooded - don ' t use in production . \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * audit - service * * : One of the possible loglevels . \ n - * * cache * * : One of the possible loglevels . \ n - * * syscall * * : One of the possible loglevels . \ n - * * communication * * : One of the possible loglevels . \ n - * * audit - authentication * * : One of the possible loglevels . \ n - * * agencycomm * * : One of the possible loglevels . \ n - * * startup * * : One of the possible loglevels . \ n - * * general * * : One of the possible loglevels . \ n - * * cluster * * : One of the possible loglevels . \ n - * * audit - view * * : One of the possible loglevels . \ n - * * collector * * : One of the possible loglevels . \ n - * * audit - documentation * * : One of the possible loglevels . \ n - * * engines * * : One of the possible loglevels . \ n - * * trx * * : One of the possible loglevels . \ n - * * mmap * * : One of the possible loglevels . \ n - * * agency * * : One of the possible loglevels . \ n - * * authentication * * : One of the possible loglevels . \ n - * * memory * * : One of the possible loglevels . \ n - * * performance * * : One of the possible loglevels . \ n - * * config * * : One of the possible loglevels . \ n - * * authorization * * : One of the possible loglevels . \ n - * * development * * : One of the possible loglevels . \ n - * * datafiles * * : One of the possible loglevels . \ n - * * views * * : One of the possible loglevels . \ n - * * ldap * * : One of the possible loglevels . \ n - * * replication * * : One of the possible loglevels . \ n - * * threads * * : One of the possible loglevels . \ n - * * audit - database * * : One of the possible loglevels . \ n - * * v8 * * : One of the possible loglevels . \ n - * * ssl * * : One of the possible loglevels . \ n - * * pregel * * : One of the possible loglevels . \ n - * * audit - collection * * : One of the possible loglevels . \ n - * * rocksdb * * : One of the possible loglevels . \ n - * * supervision * * : One of the possible loglevels . \ n - * * graphs * * : One of the possible loglevels . \ n - * * compactor * * : One of the possible loglevels . \ n - * * queries * * : One of the possible loglevels . \ n - * * heartbeat * * : One of the possible loglevels . \ n - * * requests * * : One of the possible loglevels . \ n \ n \ n " , <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_modules_flush . md " <nl> } <nl> } , <nl> " / _admin / routing / reload " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_routing_reloads . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_routing_reloads . md " <nl> } <nl> } , <nl> " / _admin / server / id " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_server_id . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_server_id . md " <nl> } <nl> } , <nl> " / _admin / server / role " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_server_role . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_server_role . md " <nl> } <nl> } , <nl> " / _admin / shutdown " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / delete_api_shutdown . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / delete_api_shutdown . md " <nl> } <nl> } , <nl> " / _admin / statistics " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics . md " <nl> } <nl> } , <nl> " / _admin / statistics - description " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics_description . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_statistics_description . md " <nl> } <nl> } , <nl> " / _admin / status " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / JSF_get_admin_status . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / JSF_get_admin_status . md " <nl> } <nl> } , <nl> " / _admin / time " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_admin_time . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_admin_time . md " <nl> } <nl> } , <nl> " / _admin / wal / flush " : { <nl> <nl> " wal " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / wal / put_admin_wal_flush . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / wal / put_admin_wal_flush . md " <nl> } <nl> } , <nl> " / _admin / wal / properties " : { <nl> <nl> " wal " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / wal / get_admin_wal_properties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / wal / get_admin_wal_properties . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nConfigures the behavior of the write - ahead log . The body of the request \ nmust be a JSON object with the following attributes : \ n - * allowOversizeEntries * : whether or not operations that are bigger than a \ n single logfile can be executed and stored \ n - * logfileSize * : the size of each write - ahead logfile \ n - * historicLogfiles * : the maximum number of historic logfiles to keep \ n - * reserveLogfiles * : the maximum number of reserve logfiles that ArangoDB \ n allocates in the background \ n - * throttleWait * : the maximum wait time that operations will wait before \ n they get aborted if case of write - throttling ( in milliseconds ) \ n - * throttleWhenPending * : the number of unprocessed garbage - collection \ n operations that , when reached , will activate write - throttling . A value of \ n * 0 * means that write - throttling will not be triggered . \ n \ nSpecifying any of the above attributes is optional . Not specified attributes \ nwill be ignored and the configuration for them will not be modified . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _admin / wal / properties & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " logfileSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " allowOversizeEntries \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " allowOversizeEntries \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " logfileSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " historicLogfiles \ " < / span > : < span class = \ " hljs - number \ " > 10 < / span > , \ n < span class = \ " hljs - string \ " > \ " reserveLogfiles \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " syncInterval \ " < / span > : < span class = \ " hljs - number \ " > 100 < / span > , \ n < span class = \ " hljs - string \ " > \ " throttleWait \ " < / span > : < span class = \ " hljs - number \ " > 15000 < / span > , \ n < span class = \ " hljs - string \ " > \ " throttleWhenPending \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " wal " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / wal / put_admin_wal_properties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / wal / put_admin_wal_properties . md " <nl> } <nl> } , <nl> " / _admin / wal / transactions " : { <nl> <nl> " wal " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / wal / get_admin_wal_transactions . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / wal / get_admin_wal_transactions . md " <nl> } <nl> } , <nl> " / _api / aqlfunction " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / get_api_aqlfunction . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * isDeterministic * * : an optional boolean value to indicate whether the function \ n results are fully deterministic ( function return value solely depends on \ n the input value and return value is the same for repeated calls with same \ n input ) . The * isDeterministic * attribute is currently not used but may be \ n used later for optimisations . \ n - * * code * * : a string representation of the function body . \ n - * * name * * : the fully qualified name of the user functions . \ n \ n \ n \ n \ n \ nIn case of success , HTTP 200 is returned . \ nIf the function isn ' t valid etc . HTTP 400 including a detailed error message will be returned . \ n \ n \ n # # # # HTTP 200 \ n * A json document with these Properties is returned : * \ n \ nIf the function already existed and was replaced by the \ ncall , the server will respond with * HTTP 200 * . \ n \ n - * * code * * : the HTTP status code \ n - * * error * * : boolean flag to indicate whether an error occurred ( * false * in this case ) \ n \ n \ n # # # # HTTP 201 \ n * A json document with these Properties is returned : * \ n \ nIf the function can be registered by the server , the server will respond with \ n * HTTP 201 * . \ n \ n - * * code * * : the HTTP status code \ n - * * error * * : boolean flag to indicate whether an error occurred ( * false * in this case ) \ n \ n \ n # # # # HTTP 400 \ n * A json document with these Properties is returned : * \ n \ nIf the JSON representation is malformed or mandatory data is missing from the \ nrequest , the server will respond with * HTTP 400 * . \ n \ n - * * errorMessage * * : a descriptive error message \ n - * * errorNum * * : the server error number \ n - * * code * * : the HTTP status code \ n - * * error * * : boolean flag to indicate whether an error occurred ( * true * in this case ) \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / aqlfunction & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myfunctions : : temperature : : celsiustofahrenheit \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - string \ " > \ " function ( celsius ) { return celsius * 1 . 8 + 32 ; } \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " isDeterministic \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_aqlfunction . md " <nl> } <nl> } , <nl> " / _api / aqlfunction / { name } " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / delete_api_aqlfunction . md " <nl> } <nl> } , <nl> " / _api / batch " : { <nl> <nl> " Bulk " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / batch_processing . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / batch_processing . md " <nl> } <nl> } , <nl> " / _api / cluster / endpoints " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_cluster_endpoints . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_cluster_endpoints . md " <nl> } <nl> } , <nl> " / _api / collection " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collections . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collections . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nCreates a new collection with a given name . The request must contain an \ nobject with the following attributes . \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * journalSize * * : The maximal size of a journal or datafile in bytes . The value \ n must be at least ` 1048576 ` ( 1 MiB ) . ( The default is a configuration parameter ) \ n This option is meaningful for the MMFiles storage engine only . \ n - * * replicationFactor * * : ( The default is * 1 * ) : in a cluster , this attribute determines how many copies \ n of each shard are kept on different DBServers . The value 1 means that only one \ n copy ( no synchronous replication ) is kept . A value of k means that k - 1 replicas \ n are kept . Any two copies reside on different DBServers . Replication between them is \ n synchronous , that is , every write operation to the \ " leader \ " copy will be replicated \ n to all \ " follower \ " replicas , before the write operation is reported successful . \ n If a server fails , this is detected automatically and one of the servers holding \ n copies take over , usually without an error being reported . \ n - * * keyOptions * * : \ n - * * allowUserKeys * * : if set to * true * , then it is allowed to supply own key values in the \ n * _key * attribute of a document . If set to * false * , then the key generator \ n will solely be responsible for generating keys and supplying own key values \ n in the * _key * attribute of documents is considered an error . \ n - * * type * * : specifies the type of the key generator . The currently available generators are \ n * traditional * and * autoincrement * . \ n - * * increment * * : increment value for * autoincrement * key generator . Not used for other key \ n generator types . \ n - * * offset * * : Initial offset value for * autoincrement * key generator . \ n Not used for other key generator types . \ n - * * name * * : The name of the collection . \ n - * * waitForSync * * : If * true * then the data is synchronized to disk before returning from a \ n document create , update , replace or removal operation . ( default : false ) \ n - * * doCompact * * : whether or not the collection will be compacted ( default is * true * ) \ n This option is meaningful for the MMFiles storage engine only . \ n - * * isVolatile * * : If * true * then the collection data is kept in - memory only and not made persistent . \ n Unloading the collection will cause the collection data to be discarded . Stopping \ n or re - starting the server will also cause full loss of data in the \ n collection . Setting this option will make the resulting collection be \ n slightly faster than regular collections because ArangoDB does not \ n enforce any synchronization to disk and does not calculate any CRC \ n checksums for datafiles ( as there are no datafiles ) . This option \ n should therefore be used for cache - type collections only , and not \ n for data that cannot be re - created otherwise . \ n ( The default is * false * ) \ n This option is meaningful for the MMFiles storage engine only . \ n - * * shardKeys * * : ( The default is * [ \ " _key \ " ] * ) : in a cluster , this attribute determines \ n which document attributes are used to determine the target shard for documents . \ n Documents are sent to shards based on the values of their shard key attributes . \ n The values of all shard key attributes in a document are hashed , \ n and the hash value is used to determine the target shard . \ n * * Note * * : Values of shard key attributes cannot be changed once set . \ n This option is meaningless in a single server setup . \ n - * * numberOfShards * * : ( The default is * 1 * ) : in a cluster , this value determines the \ n number of shards to create for the collection . In a single \ n server setup , this option is meaningless . \ n - * * isSystem * * : If * true * , create a system collection . In this case * collection - name * \ n should start with an underscore . End users should normally create non - system \ n collections only . API implementors may be required to create system \ n collections in very special occasions , but normally a regular collection will do . \ n ( The default is * false * ) \ n - * * type * * : ( The default is * 2 * ) : the type of the collection to create . \ n The following values for * type * are valid : \ n - * 2 * : document collection \ n - * 3 * : edges collection \ n - * * indexBuckets * * : The number of buckets into which indexes using a hash \ n table are split . The default is 16 and this number has to be a \ n power of 2 and less than or equal to 1024 . \ n For very large collections one should increase this to avoid long pauses \ n when the hash table has to be initially built or resized , since buckets \ n are resized individually and can be initially built in parallel . For \ n example , 64 might be a sensible value for a collection with 100 \ n 000 000 documents . Currently , only the edge index respects this \ n value , but other index types might follow in future ArangoDB versions . \ n Changes ( see below ) are applied when the collection is loaded the next \ n time . \ n This option is meaningful for the MMFiles storage engine only . \ n - * * distributeShardsLike * * : ( The default is * \ " \ " * ) : in an enterprise cluster , this attribute binds \ n the specifics of sharding for the newly created collection to follow that of a \ n specified existing collection . \ n * * Note * * : Using this parameter has consequences for the prototype \ n collection . It can no longer be dropped , before sharding imitating \ n collections are dropped . Equally , backups and restores of imitating \ n collections alone will generate warnings , which can be overridden , \ n about missing sharding prototype . \ n \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionBasics \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " status \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " statusString \ " < / span > : < span class = \ " hljs - string \ " > \ " loaded \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionBasics \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " keyOptions \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " traditional \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " allowUserKeys \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " lastValue \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " indexBuckets \ " < / span > : < span class = \ " hljs - number \ " > 8 < / span > , \ n < span class = \ " hljs - string \ " > \ " globallyUniqueId \ " < / span > : < span class = \ " hljs - string \ " > \ " h1AA24B099AC2 / 11464 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " doCompact \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " waitForSync \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 11464 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " isSystem \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " journalSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " isVolatile \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ nshell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionEdges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " status \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " statusString \ " < / span > : < span class = \ " hljs - string \ " > \ " loaded \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionEdges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " keyOptions \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " traditional \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " allowUserKeys \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " lastValue \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " indexBuckets \ " < / span > : < span class = \ " hljs - number \ " > 8 < / span > , \ n < span class = \ " hljs - string \ " > \ " globallyUniqueId \ " < / span > : < span class = \ " hljs - string \ " > \ " h1AA24B099AC2 / 11467 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " doCompact \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " waitForSync \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 11467 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " isSystem \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " journalSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " isVolatile \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionUsers \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " keyOptions \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " autoincrement \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " increment \ " < / span > : < span class = \ " hljs - number \ " > 5 < / span > , \ n < span class = \ " hljs - string \ " > \ " allowUserKeys \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " status \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " statusString \ " < / span > : < span class = \ " hljs - string \ " > \ " loaded \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " testCollectionUsers \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " keyOptions \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " autoincrement \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " allowUserKeys \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " offset \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " increment \ " < / span > : < span class = \ " hljs - number \ " > 5 < / span > , \ n < span class = \ " hljs - string \ " > \ " lastValue \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " indexBuckets \ " < / span > : < span class = \ " hljs - number \ " > 8 < / span > , \ n < span class = \ " hljs - string \ " > \ " globallyUniqueId \ " < / span > : < span class = \ " hljs - string \ " > \ " h1AA24B099AC2 / 11472 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " doCompact \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " waitForSync \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 11472 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " isSystem \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " journalSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " isVolatile \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / post_api_collection . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / delete_api_collection . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / delete_api_collection . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nThe result is an object describing the collection with the following \ nattributes : \ n \ n - * id * : The identifier of the collection . \ n \ n - * name * : The name of the collection . \ n \ n - * status * : The status of the collection as number . \ n - 1 : new born collection \ n - 2 : unloaded \ n - 3 : loaded \ n - 4 : in the process of being unloaded \ n - 5 : deleted \ n - 6 : loading \ n \ nEvery other status indicates a corrupted collection . \ n \ n - * type * : The type of the collection as number . \ n - 2 : document collection ( normal case ) \ n - 3 : edges collection \ n \ n - * isSystem * : If * true * then the collection is a system collection . \ n \ n " , <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_name . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_name . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / checksum " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_checksum . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_checksum . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / count " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_count . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_count . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / figures " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_figures . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / load " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_load . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_load . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / loadIndexesIntoMemory " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_load_indexes_in_memory . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_load_indexes_in_memory . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / properties " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_properties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_properties . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nChanges the properties of a collection . Expects an object with the \ nattribute ( s ) \ n \ n - * waitForSync * : If * true * then creating or changing a \ n document will wait until the data has been synchronized to disk . \ n \ n - * journalSize * : The maximal size of a journal or datafile in bytes . \ n The value must be at least ` 1048576 ` ( 1 MB ) . Note that when \ n changing the journalSize value , it will only have an effect for \ n additional journals or datafiles that are created . Already \ n existing journals or datafiles will not be affected . \ n \ nOn success an object with the following attributes is returned : \ n \ n - * id * : The identifier of the collection . \ n \ n - * name * : The name of the collection . \ n \ n - * waitForSync * : The new value . \ n \ n - * journalSize * : The new value . \ n \ n - * status * : The status of the collection as number . \ n \ n - * type * : The collection type . Valid types are : \ n - 2 : document collection \ n - 3 : edges collection \ n \ n - * isSystem * : If * true * then the collection is a system collection . \ n \ n - * isVolatile * : If * true * then the collection data will be \ n kept in memory only and ArangoDB will not write or sync the data \ n to disk . \ n \ n - * doCompact * : Whether or not the collection will be compacted . \ n \ n - * keyOptions * : JSON object which contains key generation options : \ n - * type * : specifies the type of the key generator . The currently \ n available generators are * traditional * and * autoincrement * . \ n - * allowUserKeys * : if set to * true * , then it is allowed to supply \ n own key values in the * _key * attribute of a document . If set to \ n * false * , then the key generator is solely responsible for \ n generating keys and supplying own key values in the * _key * attribute \ n of documents is considered an error . \ n \ n * * Note * * : except for * waitForSync * , * journalSize * and * name * , collection \ nproperties * * cannot be changed * * once a collection is created . To rename \ na collection , the rename endpoint must be used . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / properties & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " waitForSync \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _api / < / span > collection / products / properties \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " status \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " statusString \ " < / span > : < span class = \ " hljs - string \ " > \ " loaded \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " keyOptions \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " traditional \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " allowUserKeys \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " lastValue \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " indexBuckets \ " < / span > : < span class = \ " hljs - number \ " > 8 < / span > , \ n < span class = \ " hljs - string \ " > \ " globallyUniqueId \ " < / span > : < span class = \ " hljs - string \ " > \ " h1AA24B099AC2 / 11863 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " doCompact \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " waitForSync \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 11863 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " isSystem \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " journalSize \ " < / span > : < span class = \ " hljs - number \ " > 33554432 < / span > , \ n < span class = \ " hljs - string \ " > \ " isVolatile \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_properties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_properties . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / rename " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_rename . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_rename . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / revision " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / get_api_collection_revision . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / get_api_collection_revision . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / rotate " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_rotate . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_rotate . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / truncate " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_truncate . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_truncate . md " <nl> } <nl> } , <nl> " / _api / collection / { collection - name } / unload " : { <nl> <nl> " Collections " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Collections / put_api_collection_unload . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Collections / put_api_collection_unload . md " <nl> } <nl> } , <nl> " / _api / cursor " : { <nl> <nl> " Cursors " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor . md " <nl> } <nl> } , <nl> " / _api / cursor / { cursor - identifier } " : { <nl> <nl> " Cursors " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor_delete . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor_delete . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nIf the cursor is still alive , returns an object with the following \ nattributes : \ n \ n - * id * : the * cursor - identifier * \ n - * result * : a list of documents for the current batch \ n - * hasMore * : * false * if this was the last batch \ n - * count * : if present the total number of elements \ n \ nNote that even if * hasMore * returns * true * , the next call might \ nstill return no documents . If , however , * hasMore * is * false * , then \ nthe cursor is exhausted . Once the * hasMore * attribute has a value of \ n * false * , the client can stop . \ n \ n \ n \ n \ n * * Example : * * \ n Valid request for next batch \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " query \ " < / span > : < span class = \ " hljs - string \ " > \ " FOR p IN products LIMIT 5 RETURN p \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " count \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " batchSize \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > \ n } \ nEOF \ n \ nshell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 12019 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12013 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12013 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW0km - - F \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " hello4 \ " < / span > : < span class = \ " hljs - string \ " > \ " world1 \ " < / span > \ n } , \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12003 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12003 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW0km - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " hello1 \ " < / span > : < span class = \ " hljs - string \ " > \ " world1 \ " < / span > \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " hasMore \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 12019 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " count \ " < / span > : < span class = \ " hljs - number \ " > 5 < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " stats \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " writesExecuted \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " writesIgnored \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedFull \ " < / span > : < span class = \ " hljs - number \ " > 5 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedIndex \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " filtered \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " httpRequests \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " executionTime \ " < / span > : < span class = \ " hljs - number \ " > 0 . 00010180473327636719 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " warnings \ " < / span > : [ ] \ n } , \ n < span class = \ " hljs - string \ " > \ " cached \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Missing identifier \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 400 < / span > Bad Request \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " expecting PUT / _api / cursor / & lt ; cursor - id & gt ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Unknown identifier \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 123123 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " cursor not found \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1600 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Cursors " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor_identifier . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Cursors / post_api_cursor_identifier . md " <nl> } <nl> } , <nl> " / _api / database " : { <nl> <nl> " Database " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_list . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_list . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * name * * : Has to contain a valid database name . \ n - * * users * * : Has to be an array of user objects to initially create for the new database . \ n User information will not be changed for users that already exist . \ n If * users * is not specified or does not contain any users , a default user \ n * root * will be created with an empty string password . This ensures that the \ n new database will be accessible after it is created . \ n Each user object can contain the following attributes : \ n - * * username * * : Loginname of the user to be created \ n - * * passwd * * : The user password as a string . If not specified , it will default to an empty string . \ n - * * active * * : A flag indicating whether the user account should be activated or not . \ n The default value is * true * . If set to * false * , the user won ' t be able to \ n log into the database . \ n - * * extra * * : A JSON object with extra user information . The data contained in * extra * \ n will be stored for the user but not be interpreted further by ArangoDB . \ n \ n \ n \ n \ nCreates a new database \ n \ nThe response is a JSON object with the attribute * result * set to * true * . \ n \ n * * Note * * : creating a new database is only possible from within the * _system * database . \ n \ n \ n \ n \ n * * Example : * * \ n Creating a database named * example * . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / database & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " example \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > , \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Creating a database named * mydb * with two users . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / database & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " mydb \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " users \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " username \ " < / span > : < span class = \ " hljs - string \ " > \ " admin \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " passwd \ " < / span > : < span class = \ " hljs - string \ " > \ " secret \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " active \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } , \ n { \ n < span class = \ " hljs - string \ " > \ " username \ " < / span > : < span class = \ " hljs - string \ " > \ " tester \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " passwd \ " < / span > : < span class = \ " hljs - string \ " > \ " test001 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " active \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n ] \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > , \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Database " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_new . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_new . md " <nl> } <nl> } , <nl> " / _api / database / current " : { <nl> <nl> " Database " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_current . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_current . md " <nl> } <nl> } , <nl> " / _api / database / user " : { <nl> <nl> " Database " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_user . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_user . md " <nl> } <nl> } , <nl> " / _api / database / { database - name } " : { <nl> <nl> " Database " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Database / get_api_database_delete . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Database / get_api_database_delete . md " <nl> } <nl> } , <nl> " / _api / document / { collection } " : { <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / delete_mutliple_documents . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / delete_mutliple_documents . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nPartially updates documents , the documents to update are specified \ nby the * _key * attributes in the body objects . The body of the \ nrequest must contain a JSON array of document updates with the \ nattributes to patch ( the patch documents ) . All attributes from the \ npatch documents will be added to the existing documents if they do \ nnot yet exist , and overwritten in the existing documents if they do \ nexist there . \ n \ nSetting an attribute value to * null * in the patch documents will cause a \ nvalue of * null * to be saved for the attribute by default . \ n \ nIf * ignoreRevs * is * false * and there is a * _rev * attribute in a \ ndocument in the body and its value does not match the revision of \ nthe corresponding document in the database , the precondition is \ nviolated . \ n \ nIf the document exists and can be updated , then an * HTTP 201 * or \ nan * HTTP 202 * is returned ( depending on * waitForSync * , see below ) . \ n \ nOptionally , the query parameter * waitForSync * can be used to force \ nsynchronization of the document replacement operation to disk even in case \ nthat the * waitForSync * flag had been disabled for the entire collection . \ nThus , the * waitForSync * query parameter can be used to force synchronization \ nof just specific operations . To use this , set the * waitForSync * parameter \ nto * true * . If the * waitForSync * parameter is not specified or set to \ n * false * , then the collection ' s default * waitForSync * behavior is \ napplied . The * waitForSync * query parameter cannot be used to disable \ nsynchronization for collections that have a default * waitForSync * value \ nof * true * . \ n \ nThe body of the response contains a JSON array of the same length \ nas the input array with the information about the handle and the \ nrevision of the updated documents . In each entry , the attribute \ n * _id * contains the known * document - handle * of each updated document , \ n * _key * contains the key which uniquely identifies a document in a \ ngiven collection , and the attribute * _rev * contains the new document \ nrevision . In case of an error or violated precondition , an error \ nobject with the attribute * error * set to * true * and the attribute \ n * errorCode * set to the error code is built . \ n \ nIf the query parameter * returnOld * is * true * , then , for each \ ngenerated document , the complete previous revision of the document \ nis returned under the * old * attribute in the result . \ n \ nIf the query parameter * returnNew * is * true * , then , for each \ ngenerated document , the complete new document is returned under \ nthe * new * attribute in the result . \ n \ nNote that if any precondition is violated or an error occurred with \ nsome of the documents , the return code is still 201 or 202 , but \ nthe additional HTTP header * X - Arango - Error - Codes * is set , which \ ncontains a map of the error codes that occurred together with their \ nmultiplicities , as in : * 1200 : 17 , 1205 : 10 * which means that in 17 \ ncases the error 1200 \ " revision conflict \ " and in 10 cases the error \ n1205 \ " illegal document handle \ " has happened . \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / patch_update_multiple_documents . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / patch_update_multiple_documents . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nCreates a new document from the document given in the body , unless there \ nis already a document with the * _key * given . If no * _key * is given , a new \ nunique * _key * is generated automatically . \ n \ nThe body can be an array of documents , in which case all \ ndocuments in the array are inserted with the same semantics as for a \ nsingle document . The result body will contain a JSON array of the \ nsame length as the input array , and each entry contains the result \ nof the operation for the corresponding input . In case of an error \ nthe entry is a document with attributes * error * set to * true * and \ nerrorCode set to the error code that has happened . \ n \ nPossibly given * _id * and * _rev * attributes in the body are always ignored , \ nthe URL part or the query parameter collection respectively counts . \ n \ nIf the document was created successfully , then the * Location * header \ ncontains the path to the newly created document . The * Etag * header field \ ncontains the revision of the document . Both are only set in the single \ ndocument case . \ n \ nIf * silent * is not set to * true * , the body of the response contains a \ nJSON object ( single document case ) with the following attributes : \ n \ n - * _id * contains the document handle of the newly created document \ n - * _key * contains the document key \ n - * _rev * contains the document revision \ n \ nIn the multi case the body is an array of such objects . \ n \ nIf the collection parameter * waitForSync * is * false * , then the call \ nreturns as soon as the document has been accepted . It will not wait \ nuntil the documents have been synced to disk . \ n \ nOptionally , the query parameter * waitForSync * can be used to force \ nsynchronization of the document creation operation to disk even in \ ncase that the * waitForSync * flag had been disabled for the entire \ ncollection . Thus , the * waitForSync * query parameter can be used to \ nforce synchronization of just this specific operations . To use this , \ nset the * waitForSync * parameter to * true * . If the * waitForSync * \ nparameter is not specified or set to * false * , then the collection ' s \ ndefault * waitForSync * behavior is applied . The * waitForSync * query \ nparameter cannot be used to disable synchronization for collections \ nthat have a default * waitForSync * value of * true * . \ n \ nIf the query parameter * returnNew * is * true * , then , for each \ ngenerated document , the complete new document is returned under \ nthe * new * attribute in the result . \ n \ n \ n \ n \ n * * Example : * * \ n Create a document in a collection named * products * . Note that the \ nrevision identifier might or might not by equal to the auto - generated \ nkey . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1Ey - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12271 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12271 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12271 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Ey - - _ \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Create a document in a collection named * products * with a collection - level \ n * waitForSync * value of * false * . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1DC - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12259 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12259 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12259 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1DC - - _ \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Create a document in a collection with a collection - level * waitForSync * \ nvalue of * false * , but using the * waitForSync * query parameter . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products ? waitForSync = true & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1G6 - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12301 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12301 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12301 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1G6 - - _ \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Unknown collection name \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " collection or view not found : products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1203 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Illegal document \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products & lt ; & lt ; EOF \ n { 1 : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 400 < / span > Bad Request \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " VPackError error : Expecting ' \ \ \ " ' or ' } ' \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 600 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Insert multiple documents : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products & lt ; & lt ; EOF \ n [ { \ " Hello \ " : \ " Earth \ " } , { \ " Hello \ " : \ " Venus \ " } , { \ " Hello \ " : \ " Mars \ " } ] \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n [ \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12279 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12279 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Fm - - _ \ " < / span > \ n } , \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12283 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12283 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Fm - - B \ " < / span > \ n } , \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12285 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12285 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Fm - - D \ " < / span > \ n } \ n ] \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Use of returnNew : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products ? returnNew = true & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " World \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1GO - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12293 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12293 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12293 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1GO - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " new \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12293 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12293 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1GO - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " Hello \ " < / span > : < span class = \ " hljs - string \ " > \ " World \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / post_create_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / post_create_document . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces multiple documents in the specified collection with the \ nones in the body , the replaced documents are specified by the * _key * \ nattributes in the body documents . \ n \ nIf * ignoreRevs * is * false * and there is a * _rev * attribute in a \ ndocument in the body and its value does not match the revision of \ nthe corresponding document in the database , the precondition is \ nviolated . \ n \ nIf the document exists and can be updated , then an * HTTP 201 * or \ nan * HTTP 202 * is returned ( depending on * waitForSync * , see below ) . \ n \ nOptionally , the query parameter * waitForSync * can be used to force \ nsynchronization of the document replacement operation to disk even in case \ nthat the * waitForSync * flag had been disabled for the entire collection . \ nThus , the * waitForSync * query parameter can be used to force synchronization \ nof just specific operations . To use this , set the * waitForSync * parameter \ nto * true * . If the * waitForSync * parameter is not specified or set to \ n * false * , then the collection ' s default * waitForSync * behavior is \ napplied . The * waitForSync * query parameter cannot be used to disable \ nsynchronization for collections that have a default * waitForSync * value \ nof * true * . \ n \ nThe body of the response contains a JSON array of the same length \ nas the input array with the information about the handle and the \ nrevision of the replaced documents . In each entry , the attribute \ n * _id * contains the known * document - handle * of each updated document , \ n * _key * contains the key which uniquely identifies a document in a \ ngiven collection , and the attribute * _rev * contains the new document \ nrevision . In case of an error or violated precondition , an error \ nobject with the attribute * error * set to * true * and the attribute \ n * errorCode * set to the error code is built . \ n \ nIf the query parameter * returnOld * is * true * , then , for each \ ngenerated document , the complete previous revision of the document \ nis returned under the * old * attribute in the result . \ n \ nIf the query parameter * returnNew * is * true * , then , for each \ ngenerated document , the complete new document is returned under \ nthe * new * attribute in the result . \ n \ nNote that if any precondition is violated or an error occurred with \ nsome of the documents , the return code is still 201 or 202 , but \ nthe additional HTTP header * X - Arango - Error - Codes * is set , which \ ncontains a map of the error codes that occurred together with their \ nmultiplicities , as in : * 1200 : 17 , 1205 : 10 * which means that in 17 \ ncases the error 1200 \ " revision conflict \ " and in 10 cases the error \ n1205 \ " illegal document handle \ " has happened . \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / put_replace_multiple_documents . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / put_replace_multiple_documents . md " <nl> } <nl> } , <nl> " / _api / document / { document - handle } " : { <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / delete_remove_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / delete_remove_document . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nReturns the document identified by * document - handle * . The returned \ ndocument contains three special attributes : * _id * containing the document \ nhandle , * _key * containing key which uniquely identifies a document \ nin a given collection and * _rev * containing the revision . \ n \ n \ n \ n \ n * * Example : * * \ n Use a document handle : \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12309 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1Hq - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12309 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12309 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Hq - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " hello \ " < / span > : < span class = \ " hljs - string \ " > \ " world \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Use a document handle and an Etag : \ n \ n < pre > < code class = \ " json \ " > shell > curl - - header ' If - None - Match : \ " _WnWW1KC - - _ \ " ' - - dump - http : / / localhost : 8529 / _api / document / products / 12357 \ n \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Unknown document handle : \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / document / products / unknownhandle \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " collection or view not found : products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1203 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / get_read_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / get_read_document . md " <nl> } , <nl> " head " : { <nl> " description " : " \ n \ nLike * GET * , but only returns the header fields and not the body . You \ ncan use this call to get the current revision of a document or check if \ nthe document was deleted . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X HEAD - - dump - http : / / localhost : 8529 / _api / document / products / 12348 \ n \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / head_read_document_header . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / head_read_document_header . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nPartially updates the document identified by * document - handle * . \ nThe body of the request must contain a JSON document with the \ nattributes to patch ( the patch document ) . All attributes from the \ npatch document will be added to the existing document if they do not \ nyet exist , and overwritten in the existing document if they do exist \ nthere . \ n \ nSetting an attribute value to * null * in the patch document will cause a \ nvalue of * null * to be saved for the attribute by default . \ n \ nIf the * If - Match * header is specified and the revision of the \ ndocument in the database is unequal to the given revision , the \ nprecondition is violated . \ n \ nIf * If - Match * is not given and * ignoreRevs * is * false * and there \ nis a * _rev * attribute in the body and its value does not match \ nthe revision of the document in the database , the precondition is \ nviolated . \ n \ nIf a precondition is violated , an * HTTP 412 * is returned . \ n \ nIf the document exists and can be updated , then an * HTTP 201 * or \ nan * HTTP 202 * is returned ( depending on * waitForSync * , see below ) , \ nthe * Etag * header field contains the new revision of the document \ n ( in double quotes ) and the * Location * header contains a complete URL \ nunder which the document can be queried . \ n \ nOptionally , the query parameter * waitForSync * can be used to force \ nsynchronization of the updated document operation to disk even in case \ nthat the * waitForSync * flag had been disabled for the entire collection . \ nThus , the * waitForSync * query parameter can be used to force synchronization \ nof just specific operations . To use this , set the * waitForSync * parameter \ nto * true * . If the * waitForSync * parameter is not specified or set to \ n * false * , then the collection ' s default * waitForSync * behavior is \ napplied . The * waitForSync * query parameter cannot be used to disable \ nsynchronization for collections that have a default * waitForSync * value \ nof * true * . \ n \ nIf * silent * is not set to * true * , the body of the response contains a JSON \ nobject with the information about the handle and the revision . The attribute \ n * _id * contains the known * document - handle * of the updated document , * _key * \ ncontains the key which uniquely identifies a document in a given collection , \ nand the attribute * _rev * contains the new document revision . \ n \ nIf the query parameter * returnOld * is * true * , then \ nthe complete previous revision of the document \ nis returned under the * old * attribute in the result . \ n \ nIf the query parameter * returnNew * is * true * , then \ nthe complete new document is returned under \ nthe * new * attribute in the result . \ n \ nIf the document does not exist , then a * HTTP 404 * is returned and the \ nbody of the response contains an error document . \ n \ n \ n \ n \ n * * Example : * * \ n Patches an existing document with new content . \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12228 & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " hello \ " < / span > : < span class = \ " hljs - string \ " > \ " world \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1_u - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12228 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_u - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_q - - _ \ " < / span > \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12228 & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " numbers \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " one \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " two \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " three \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " empty \ " < / span > : < span class = \ " hljs - literal \ " > null < / span > \ n } \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1_6 - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12228 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_6 - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_u - - _ \ " < / span > \ n } \ nshell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12228 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1_6 - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_6 - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " one \ " < / span > : < span class = \ " hljs - string \ " > \ " world \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " hello \ " < / span > : < span class = \ " hljs - string \ " > \ " world \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " numbers \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " one \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " two \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " three \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " empty \ " < / span > : < span class = \ " hljs - literal \ " > null < / span > \ n } \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12228 ? keepNull = false & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " hello \ " < / span > : < span class = \ " hljs - literal \ " > null < / span > , \ n < span class = \ " hljs - string \ " > \ " numbers \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " four \ " < / span > : < span class = \ " hljs - number \ " > 4 < / span > \ n } \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1AS - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12228 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1AS - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1_6 - - _ \ " < / span > \ n } \ nshell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12228 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1AS - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12228 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1AS - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " one \ " < / span > : < span class = \ " hljs - string \ " > \ " world \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " numbers \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " empty \ " < / span > : < span class = \ " hljs - literal \ " > null < / span > , \ n < span class = \ " hljs - string \ " > \ " one \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " three \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " two \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " four \ " < / span > : < span class = \ " hljs - number \ " > 4 < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Merging attributes of an object using ` mergeObjects ` : \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12244 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1Ba - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Ba - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " inhabitants \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " china \ " < / span > : < span class = \ " hljs - number \ " > 1366980000 < / span > , \ n < span class = \ " hljs - string \ " > \ " india \ " < / span > : < span class = \ " hljs - number \ " > 1263590000 < / span > , \ n < span class = \ " hljs - string \ " > \ " usa \ " < / span > : < span class = \ " hljs - number \ " > 319220000 < / span > \ n } \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12244 ? mergeObjects = true & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " inhabitants \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " indonesia \ " < / span > : < span class = \ " hljs - number \ " > 252164800 < / span > , \ n < span class = \ " hljs - string \ " > \ " brazil \ " < / span > : < span class = \ " hljs - number \ " > 203553000 < / span > \ n } \ n } \ nEOF \ n \ nshell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12244 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1Bq - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Bq - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " inhabitants \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " china \ " < / span > : < span class = \ " hljs - number \ " > 1366980000 < / span > , \ n < span class = \ " hljs - string \ " > \ " india \ " < / span > : < span class = \ " hljs - number \ " > 1263590000 < / span > , \ n < span class = \ " hljs - string \ " > \ " usa \ " < / span > : < span class = \ " hljs - number \ " > 319220000 < / span > , \ n < span class = \ " hljs - string \ " > \ " indonesia \ " < / span > : < span class = \ " hljs - number \ " > 252164800 < / span > , \ n < span class = \ " hljs - string \ " > \ " brazil \ " < / span > : < span class = \ " hljs - number \ " > 203553000 < / span > \ n } \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12244 ? mergeObjects = false & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " inhabitants \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " pakistan \ " < / span > : < span class = \ " hljs - number \ " > 188346000 < / span > \ n } \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1B6 - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12244 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1B6 - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Bq - - _ \ " < / span > \ n } \ nshell > curl - - dump - http : / / localhost : 8529 / _api / document / products / 12244 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1B6 - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12244 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1B6 - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " inhabitants \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " pakistan \ " < / span > : < span class = \ " hljs - number \ " > 188346000 < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / patch_update_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / patch_update_document . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces the document with handle < document - handle > with the one in \ nthe body , provided there is such a document and no precondition is \ nviolated . \ n \ nIf the * If - Match * header is specified and the revision of the \ ndocument in the database is unequal to the given revision , the \ nprecondition is violated . \ n \ nIf * If - Match * is not given and * ignoreRevs * is * false * and there \ nis a * _rev * attribute in the body and its value does not match \ nthe revision of the document in the database , the precondition is \ nviolated . \ n \ nIf a precondition is violated , an * HTTP 412 * is returned . \ n \ nIf the document exists and can be updated , then an * HTTP 201 * or \ nan * HTTP 202 * is returned ( depending on * waitForSync * , see below ) , \ nthe * Etag * header field contains the new revision of the document \ nand the * Location * header contains a complete URL under which the \ ndocument can be queried . \ n \ nOptionally , the query parameter * waitForSync * can be used to force \ nsynchronization of the document replacement operation to disk even in case \ nthat the * waitForSync * flag had been disabled for the entire collection . \ nThus , the * waitForSync * query parameter can be used to force synchronization \ nof just specific operations . To use this , set the * waitForSync * parameter \ nto * true * . If the * waitForSync * parameter is not specified or set to \ n * false * , then the collection ' s default * waitForSync * behavior is \ napplied . The * waitForSync * query parameter cannot be used to disable \ nsynchronization for collections that have a default * waitForSync * value \ nof * true * . \ n \ nIf * silent * is not set to * true * , the body of the response contains a JSON \ nobject with the information about the handle and the revision . The attribute \ n * _id * contains the known * document - handle * of the updated document , * _key * \ ncontains the key which uniquely identifies a document in a given collection , \ nand the attribute * _rev * contains the new document revision . \ n \ nIf the query parameter * returnOld * is * true * , then \ nthe complete previous revision of the document \ nis returned under the * old * attribute in the result . \ n \ nIf the query parameter * returnNew * is * true * , then \ nthe complete new document is returned under \ nthe * new * attribute in the result . \ n \ nIf the document does not exist , then a * HTTP 404 * is returned and the \ nbody of the response contains an error document . \ n \ n \ n \ n \ n * * Example : * * \ n Using a document handle \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12366 & lt ; & lt ; EOF \ n { \ " Hello \ " : \ " you \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1LG - - _ \ " < / span > \ nlocation : < span class = \ " hljs - regexp \ " > / _db / < / span > _system / _api / < span class = \ " hljs - built_in \ " > document < / span > / products / < span class = \ " hljs - number \ " > 12366 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12366 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12366 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1LG - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1LC - - _ \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Unknown document handle \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12388 & lt ; & lt ; EOF \ n { } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " document not found \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1202 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Produce a revision conflict \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' If - Match : \ " _WnWW1Lu - - B \ " ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 12376 & lt ; & lt ; EOF \ n { \ " other \ " : \ " content \ " } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 412 < / span > Precondition Failed \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : < span class = \ " hljs - string \ " > \ " _WnWW1Lu - - _ \ " < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 412 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1200 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " precondition failed \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 12376 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 12376 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWW1Lu - - _ \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / put_replace_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / put_replace_document . md " <nl> } <nl> } , <nl> " / _api / edges / { collection - id } " : { <nl> <nl> " Graph Edges " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph Edges / get_read_in_out_edges . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph Edges / get_read_in_out_edges . md " <nl> } <nl> } , <nl> " / _api / endpoint " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_endpoint . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_endpoint . md " <nl> } <nl> } , <nl> " / _api / engine " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_engine . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_engine . md " <nl> } <nl> } , <nl> " / _api / explain " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / post_api_explain . md " <nl> } <nl> } , <nl> " / _api / export " : { <nl> <nl> " Bulk " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / post_api_export . md " <nl> } <nl> } , <nl> " / _api / foxx " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_list . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_list . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nInstalls the given new service at the given mount path . \ n \ nThe request body can be any of the following formats : \ n \ n - ` application / zip ` : a raw zip bundle containing a service \ n - ` application / javascript ` : a standalone JavaScript file \ n - ` application / json ` : a service definition as JSON \ n - ` multipart / form - data ` : a service definition as a multipart form \ n \ nA service definition is an object or form with the following properties or fields : \ n \ n - * configuration * : a JSON object describing configuration values \ n - * dependencies * : a JSON object describing dependency settings \ n - * source * : a fully qualified URL or an absolute path on the server ' s file system \ n \ nWhen using multipart data , the * source * field can also alternatively be a file field \ ncontaining either a zip bundle or a standalone JavaScript file . \ n \ nWhen using a standalone JavaScript file the given file will be executed \ nto define our service ' s HTTP endpoints . It is the same which would be defined \ nin the field ` main ` of the service manifest . \ n \ nIf * source * is a URL , the URL must be reachable from the server . \ nIf * source * is a file system path , the path will be resolved on the server . \ nIn either case the path or URL is expected to resolve to a zip bundle , \ nJavaScript file or ( in case of a file system path ) directory . \ n \ nNote that when using file system paths in a cluster with multiple coordinators \ nthe file system path must resolve to equivalent files on every coordinator . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_install . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_install . md " <nl> } <nl> } , <nl> " / _api / foxx / commit " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_commit . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_commit . md " <nl> } <nl> } , <nl> " / _api / foxx / configuration " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_get . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_get . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nReplaces the given service ' s configuration . \ n \ nReturns an object mapping all configuration option names to their new values . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_update . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_update . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces the given service ' s configuration completely . \ n \ nReturns an object mapping all configuration option names to their new values . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_replace . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_configuration_replace . md " <nl> } <nl> } , <nl> " / _api / foxx / dependencies " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_get . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_get . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nReplaces the given service ' s dependencies . \ n \ nReturns an object mapping all dependency names to their new mount paths . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_update . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_update . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces the given service ' s dependencies completely . \ n \ nReturns an object mapping all dependency names to their new mount paths . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_replace . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_dependencies_replace . md " <nl> } <nl> } , <nl> " / _api / foxx / development " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_development_disable . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_development_disable . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nPuts the service into development mode . \ n \ nWhile the service is running in development mode the service will be reloaded \ nfrom the filesystem and its setup script ( if any ) will be re - executed every \ ntime the service handles a request . \ n \ nWhen running ArangoDB in a cluster with multiple coordinators note that changes \ nto the filesystem on one coordinator will not be reflected across the other \ ncoordinators . This means you should treat your coordinators as inconsistent \ nas long as any service is running in development mode . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_development_enable . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_development_enable . md " <nl> } <nl> } , <nl> " / _api / foxx / download " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_bundle . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_bundle . md " <nl> } <nl> } , <nl> " / _api / foxx / readme " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_readme . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_readme . md " <nl> } <nl> } , <nl> " / _api / foxx / scripts " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_scripts_list . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_scripts_list . md " <nl> } <nl> } , <nl> " / _api / foxx / scripts / { name } " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_scripts_run . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_scripts_run . md " <nl> } <nl> } , <nl> " / _api / foxx / service " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_uninstall . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_uninstall . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nFetches detailed information for the service at the given mount path . \ n \ nReturns an object with the following attributes : \ n \ n - * mount * : the mount path of the service \ n - * path * : the local file system path of the service \ n - * development * : * true * if the service is running in development mode \ n - * legacy * : * true * if the service is running in 2 . 8 legacy compatibility mode \ n - * manifest * : the normalized JSON manifest of the service \ n \ nAdditionally the object may contain the following attributes if they have been set on the manifest : \ n \ n - * name * : a string identifying the service type \ n - * version * : a semver - compatible version string \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_details . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_details . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nInstalls the given new service on top of the service currently installed at the given mount path . \ nThis is only recommended for switching between different versions of the same service . \ n \ nUnlike replacing a service , upgrading a service retains the old service ' s configuration \ nand dependencies ( if any ) and should therefore only be used to migrate an existing service \ nto a newer or equivalent service . \ n \ nThe request body can be any of the following formats : \ n \ n - ` application / zip ` : a raw zip bundle containing a service \ n - ` application / javascript ` : a standalone JavaScript file \ n - ` application / json ` : a service definition as JSON \ n - ` multipart / form - data ` : a service definition as a multipart form \ n \ nA service definition is an object or form with the following properties or fields : \ n \ n - * configuration * : a JSON object describing configuration values \ n - * dependencies * : a JSON object describing dependency settings \ n - * source * : a fully qualified URL or an absolute path on the server ' s file system \ n \ nWhen using multipart data , the * source * field can also alternatively be a file field \ ncontaining either a zip bundle or a standalone JavaScript file . \ n \ nWhen using a standalone JavaScript file the given file will be executed \ nto define our service ' s HTTP endpoints . It is the same which would be defined \ nin the field ` main ` of the service manifest . \ n \ nIf * source * is a URL , the URL must be reachable from the server . \ nIf * source * is a file system path , the path will be resolved on the server . \ nIn either case the path or URL is expected to resolve to a zip bundle , \ nJavaScript file or ( in case of a file system path ) directory . \ n \ nNote that when using file system paths in a cluster with multiple coordinators \ nthe file system path must resolve to equivalent files on every coordinator . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_upgrade . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_upgrade . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nRemoves the service at the given mount path from the database and file system . \ nThen installs the given new service at the same mount path . \ n \ nThis is a slightly safer equivalent to performing an uninstall of the old service \ nfollowed by installing the new service . The new service ' s main and script files \ n ( if any ) will be checked for basic syntax errors before the old service is removed . \ n \ nThe request body can be any of the following formats : \ n \ n - ` application / zip ` : a raw zip bundle containing a service \ n - ` application / javascript ` : a standalone JavaScript file \ n - ` application / json ` : a service definition as JSON \ n - ` multipart / form - data ` : a service definition as a multipart form \ n \ nA service definition is an object or form with the following properties or fields : \ n \ n - * configuration * : a JSON object describing configuration values \ n - * dependencies * : a JSON object describing dependency settings \ n - * source * : a fully qualified URL or an absolute path on the server ' s file system \ n \ nWhen using multipart data , the * source * field can also alternatively be a file field \ ncontaining either a zip bundle or a standalone JavaScript file . \ n \ nWhen using a standalone JavaScript file the given file will be executed \ nto define our service ' s HTTP endpoints . It is the same which would be defined \ nin the field ` main ` of the service manifest . \ n \ nIf * source * is a URL , the URL must be reachable from the server . \ nIf * source * is a file system path , the path will be resolved on the server . \ nIn either case the path or URL is expected to resolve to a zip bundle , \ nJavaScript file or ( in case of a file system path ) directory . \ n \ nNote that when using file system paths in a cluster with multiple coordinators \ nthe file system path must resolve to equivalent files on every coordinator . \ n \ n " , <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_replace . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_service_replace . md " <nl> } <nl> } , <nl> " / _api / foxx / swagger " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_swagger . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_swagger . md " <nl> } <nl> } , <nl> " / _api / foxx / tests " : { <nl> <nl> " Foxx " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Foxx / api_foxx_tests_run . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Foxx / api_foxx_tests_run . md " <nl> } <nl> } , <nl> " / _api / gharial " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_list_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_list_http_examples . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nThe creation of a graph requires the name of the graph and a \ ndefinition of its edges . \ n [ See also edge definitions ] ( . . / . . / Manual / Graphs / GeneralGraphs / Management . html # edge - definitions ) . \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * orphanCollections * * : An array of additional vertex collections . \ n - * * edgeDefinitions * * : An array of definitions for the edge \ n - * * name * * : Name of the graph . \ n - * * isSmart * * : Define if the created graph should be smart . \ n This only has effect in Enterprise version . \ n - * * options * * : \ n - * * smartGraphAttribute * * : The attribute name that is used to smartly shard the vertices of a graph . \ n Every vertex in this Graph has to have this attribute . \ n Cannot be modified later . \ n - * * numberOfShards * * : The number of shards that is used for every collection within this graph . \ n Cannot be modified later . \ n \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " edges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " startVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " endVertices \ " < / span > \ n ] \ n } \ n ] \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvIG - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " edges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " startVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " endVertices \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvIG - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " edges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " startVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " endVertices \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " options \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 9 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " region \ " < / span > \ n } \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvJ6 - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " edges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " startVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " endVertices \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvJ6 - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_create_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_drop_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_drop_http_examples . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nGets a graph from the collection * _graphs * . \ nReturns the definition content of this graph . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / gharial / myGraph \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvdS - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " edges \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " startVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " endVertices \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / myGraph \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvdS - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_get_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_get_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / edge " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_list_edge_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_list_edge_http_examples . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nAdds an additional edge definition to the graph . \ n \ nThis edge definition has to contain a * collection * and an array of \ neach * from * and * to * vertex collections . An edge definition can only \ nbe added if this definition is either not used in any other graph , or \ nit is used with exactly the same definition . It is not possible to \ nstore a definition \ " e \ " from \ " v1 \ " to \ " v2 \ " in the one graph , and \ " e \ " \ nfrom \ " v2 \ " to \ " v1 \ " in the other graph . \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * to * * ( string ) : One or many vertex collections that can contain target vertices . \ n - * * from * * ( string ) : One or many vertex collections that can contain source vertices . \ n - * * collection * * : The name of the edge collection to be used . \ n \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / edge & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " works_in \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " city \ " < / span > \ n ] \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvBS - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " relation \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] \ n } , \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " works_in \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " city \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvBS - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_add_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_add_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / edge / { collection - name } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_create_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_create_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / edge / { collection - name } / { edge - key } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_delete_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_delete_http_examples . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nGets an edge from the given collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / gharial / social / edge / relation / 10223 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvai - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " edge \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 10223 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " relation / 10223 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _from \ " < / span > : < span class = \ " hljs - string \ " > \ " female / alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _to \ " < / span > : < span class = \ " hljs - string \ " > \ " male / charly \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvai - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " friend \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " vertex \ " < / span > : < span class = \ " hljs - string \ " > \ " alice \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_get_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_get_http_examples . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nUpdates the data of the specific edge in the collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / edge / relation / 10799 & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " since \ " < / span > : < span class = \ " hljs - string \ " > \ " 01 . 01 . 2001 \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWv4u - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " edge \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " relation / 10799 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 10799 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWv4u - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWv4O - - B \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_modify_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_modify_http_examples . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces the data of an edge in the collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / edge / relation / 10868 & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " divorced \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _from \ " < / span > : < span class = \ " hljs - string \ " > \ " female / alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _to \ " < / span > : < span class = \ " hljs - string \ " > \ " male / bob \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWv8G - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " edge \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " relation / 10868 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 10868 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWv8G - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWv8mmmL \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_replace_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_replace_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / edge / { definition - name } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_remove_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_remove_http_examples . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nChange one specific edge definition . \ nThis will modify all occurrences of this definition in all graphs known to your database . \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * to * * ( string ) : One or many vertex collections that can contain target vertices . \ n - * * from * * ( string ) : One or many vertex collections that can contain source vertices . \ n - * * collection * * : The name of the edge collection to be used . \ n \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / edge / relation & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " relation \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " animal \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " animal \ " < / span > \ n ] \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWwIa - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " relation \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " animal \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " animal \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWwIa - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_modify_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_edge_definition_modify_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / vertex " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_list_vertex_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_list_vertex_http_examples . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nAdds a vertex collection to the set of collections of the graph . If \ nthe collection does not exist , it will be created . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / vertex & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " otherVertices \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvF6 - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " graph \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " edgeDefinitions \ " < / span > : [ \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " relation \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " from \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " to \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " female \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " male \ " < / span > \ n ] \ n } \ n ] , \ n < span class = \ " hljs - string \ " > \ " orphanCollections \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " otherVertices \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " isSmart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " numberOfShards \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " replicationFactor \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " smartGraphAttribute \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " _graphs / social \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvF6 - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_collection_add_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_collection_add_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / vertex / { collection - name } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_collection_remove_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_collection_remove_http_examples . md " <nl> } , <nl> " post " : { <nl> " description " : " \ n \ nAdds a vertex to the given collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / vertex / male & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " Francis \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvEO - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " vertex \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " male / 9743 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " 9743 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvEO - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_create_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_create_http_examples . md " <nl> } <nl> } , <nl> " / _api / gharial / { graph - name } / vertex / { collection - name } / { vertex - key } " : { <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_delete_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_delete_http_examples . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nGets a vertex from the given collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / gharial / social / vertex / female / alice \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWvhO - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > , \ n < span class = \ " hljs - string \ " > \ " vertex \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " female / alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvhO - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " Alice \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_get_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_get_http_examples . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n \ nUpdates the data of the specific vertex in the collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / vertex / female / alice & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " age \ " < / span > : < span class = \ " hljs - number \ " > 26 < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWv0S - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " vertex \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " female / alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWv0S - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWvzi - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_modify_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_modify_http_examples . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReplaces the data of a vertex in the collection . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / gharial / social / vertex / female / alice & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " Alice Cooper \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " age \ " < / span > : < span class = \ " hljs - number \ " > 26 < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 202 < / span > Accepted \ nx - content - type - options : nosniff \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ netag : _WnWWwNq - - _ \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 202 < / span > , \ n < span class = \ " hljs - string \ " > \ " vertex \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " _id \ " < / span > : < span class = \ " hljs - string \ " > \ " female / alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > : < span class = \ " hljs - string \ " > \ " alice \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _rev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWwNq - - _ \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " _oldRev \ " < / span > : < span class = \ " hljs - string \ " > \ " _WnWWwNW - - _ \ " < / span > \ n } \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Graph " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_replace_http_examples . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph / general_graph_vertex_replace_http_examples . md " <nl> } <nl> } , <nl> " / _api / import # document " : { <nl> <nl> " Bulk " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / import_document . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / import_document . md " <nl> } <nl> } , <nl> " / _api / import # json " : { <nl> <nl> " Bulk " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Bulk / import_json . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Bulk / import_json . md " <nl> } <nl> } , <nl> " / _api / index " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / get_api_index . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / get_api_index . md " <nl> } <nl> } , <nl> " / _api / index # fulltext " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_fulltext . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_fulltext . md " <nl> } <nl> } , <nl> " / _api / index # general " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index . md " <nl> } <nl> } , <nl> " / _api / index # geo " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_geo . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_geo . md " <nl> } <nl> } , <nl> " / _api / index # hash " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_hash . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_hash . md " <nl> } <nl> } , <nl> " / _api / index # persistent " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_persistent . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_persistent . md " <nl> } <nl> } , <nl> " / _api / index # skiplist " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_skiplist . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_skiplist . md " <nl> } <nl> } , <nl> " / _api / index / { index - handle } " : { <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / post_api_index_delete . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / post_api_index_delete . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nThe result is an object describing the index . It has at least the following \ nattributes : \ n \ n - * id * : the identifier of the index \ n \ n - * type * : the index type \ n \ nAll other attributes are type - dependent . For example , some indexes provide \ n * unique * or * sparse * flags , whereas others don ' t . Some indexes also provide \ na selectivity estimate in the * selectivityEstimate * attribute of the result . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / index / products / 0 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " fields \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " _key \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " products / 0 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " selectivityEstimate \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " sparse \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " primary \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " unique \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Indexes " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Indexes / get_api_reads_index . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Indexes / get_api_reads_index . md " <nl> } <nl> } , <nl> " / _api / job / { job - id } " : { <nl> <nl> " job " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / job / job_getStatusById . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / job / job_getStatusById . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nReturns the result of an async job identified by job - id . If the async job \ nresult is present on the server , the result will be removed from the list of \ nresult . That means this method can be called for each job - id once . \ nThe method will return the original job result ' s headers and body , plus the \ nadditional HTTP header x - arango - async - job - id . If this header is present , \ nthen \ nthe job was found and the response contains the original job ' s result . If \ nthe header is not present , the job was not found and the response contains \ nstatus information from the job manager . \ n \ n \ n \ n \ n * * Example : * * \ n Not providing a job - id : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / job \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 400 < / span > Bad Request \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " bad parameter \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Providing a job - id for a non - existing job : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / job / notthere \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " not found \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Fetching the result of an HTTP GET job : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' x - arango - async : store ' - - dump - http : / / localhost : 8529 / _api / version \ n \ nHTTP / 1 . 1 202 Accepted \ nx - content - type - options : nosniff \ nx - arango - async - id : 152293662584438 \ ncontent - type : text / plain ; charset = utf - 8 \ n \ nshell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / job / 152293662584438 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ nx - content - type - options : nosniff \ nx - arango - < span class = \ " hljs - keyword \ " > async < / span > - id : < span class = \ " hljs - number \ " > 152293662584438 < / span > \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " server \ " < / span > : < span class = \ " hljs - string \ " > \ " arango \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " version \ " < / span > : < span class = \ " hljs - string \ " > \ " 3 . 4 . devel \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " license \ " < / span > : < span class = \ " hljs - string \ " > \ " community \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Fetching the result of an HTTP POST job that failed : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' x - arango - async : store ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " this name is invalid \ " < / span > \ n } \ nEOF \ n \ nHTTP / 1 . 1 202 Accepted \ nx - content - type - options : nosniff \ nx - arango - async - id : 152293662584443 \ ncontent - type : text / plain ; charset = utf - 8 \ n \ nshell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / job / 152293662584443 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 400 < / span > Bad Request \ nx - content - type - options : nosniff \ nx - arango - < span class = \ " hljs - keyword \ " > async < / span > - id : < span class = \ " hljs - number \ " > 152293662584443 < / span > \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " expected PUT / _api / collection / & lt ; collection - name & gt ; / & lt ; action & gt ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 400 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " job " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / job / job_fetch_result . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / job / job_fetch_result . md " <nl> } <nl> } , <nl> " / _api / job / { job - id } / cancel " : { <nl> <nl> " job " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / job / job_cancel . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / job / job_cancel . md " <nl> } <nl> } , <nl> " / _api / job / { type } " : { <nl> <nl> " job " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / job / job_delete . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / job / job_delete . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nReturns the list of ids of async jobs with a specific status ( either done or \ npending ) . \ nThe list can be used by the client to get an overview of the job system \ nstatus and \ nto retrieve completed job results later . \ n \ n \ n \ n \ n * * Example : * * \ n Fetching the list of done jobs : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' x - arango - async : store ' - - dump - http : / / localhost : 8529 / _api / version \ n \ nHTTP / 1 . 1 202 Accepted \ nx - content - type - options : nosniff \ nx - arango - async - id : 152293662584448 \ ncontent - type : text / plain ; charset = utf - 8 \ n \ nshell > curl - - dump - http : / / localhost : 8529 / _api / job / done \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n [ \ n < span class = \ " hljs - string \ " > \ " 152293662584448 \ " < / span > \ n ] \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Fetching the list of pending jobs : \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' x - arango - async : store ' - - dump - http : / / localhost : 8529 / _api / version \ n \ nHTTP / 1 . 1 202 Accepted \ nx - content - type - options : nosniff \ nx - arango - async - id : 152293662584453 \ ncontent - type : text / plain ; charset = utf - 8 \ n \ nshell > curl - - dump - http : / / localhost : 8529 / _api / job / pending \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n [ ] \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Querying the status of a pending job : \ n ( we create a sleep job therefore . . . ) \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - header ' x - arango - async : store ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / transaction & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collections \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " read \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " _frontend \ " < / span > \ n ] \ n } , \ n < span class = \ " hljs - string \ " > \ " action \ " < / span > : < span class = \ " hljs - string \ " > \ " function ( ) { require ( ' internal ' ) . sleep ( 15 . 0 ) ; } \ " < / span > \ n } \ nEOF \ n \ nHTTP / 1 . 1 202 Accepted \ nx - content - type - options : nosniff \ nx - arango - async - id : 152293662584458 \ ncontent - type : text / plain ; charset = utf - 8 \ n \ nshell > curl - - dump - http : / / localhost : 8529 / _api / job / pending \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n [ \ n < span class = \ " hljs - string \ " > \ " 152293662584458 \ " < / span > \ n ] \ nshell > curl - X DELETE - - dump - http : / / localhost : 8529 / _api / job / 152293662584458 \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " job " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / job / job_getByType . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / job / job_getByType . md " <nl> } <nl> } , <nl> " / _api / query " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PostApiQueryProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PostApiQueryProperties . md " <nl> } <nl> } , <nl> " / _api / query - cache " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / DeleteApiQueryCache . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / DeleteApiQueryCache . md " <nl> } <nl> } , <nl> " / _api / query - cache / properties " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / GetApiQueryCacheProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / GetApiQueryCacheProperties . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n \ nAfter the properties have been changed , the current set of properties will \ nbe returned in the HTTP response . \ n \ nNote : changing the properties may invalidate all results in the cache . \ nThe global properties for AQL query cache . \ nThe properties need to be passed in the attribute * properties * in the body \ nof the HTTP request . * properties * needs to be a JSON object with the following \ nproperties : \ n \ n \ n * * A JSON object with these properties is required : * * \ n \ n - * * mode * * : the mode the AQL query cache should operate in . Possible values are * off * , * on * or * demand * . \ n - * * maxResults * * : the maximum number of query results that will be stored per database - specific cache . \ n \ n \ n " , <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PutApiQueryCacheProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PutApiQueryCacheProperties . md " <nl> } <nl> } , <nl> " / _api / query / current " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / GetApiQueryCurrent . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / GetApiQueryCurrent . md " <nl> } <nl> } , <nl> " / _api / query / properties " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / GetApiQueryProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / GetApiQueryProperties . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * maxSlowQueries * * : The maximum number of slow queries to keep in the list \ n of slow queries . If the list of slow queries is full , the oldest entry in \ n it will be discarded when additional slow queries occur . \ n - * * slowQueryThreshold * * : The threshold value for treating a query as slow . A \ n query with a runtime greater or equal to this threshold value will be \ n put into the list of slow queries when slow query tracking is enabled . \ n The value for * slowQueryThreshold * is specified in seconds . \ n - * * enabled * * : If set to * true * , then queries will be tracked . If set to \ n * false * , neither queries nor slow queries will be tracked . \ n - * * maxQueryStringLength * * : The maximum query string length to keep in the list of queries . \ n Query strings can have arbitrary lengths , and this property \ n can be used to save memory in case very long query strings are used . The \ n value is specified in bytes . \ n - * * trackSlowQueries * * : If set to * true * , then slow queries will be tracked \ n in the list of slow queries if their runtime exceeds the value set in \ n * slowQueryThreshold * . In order for slow queries to be tracked , the * enabled * \ n property must also be set to * true * . \ n - * * trackBindVars * * : If set to * true * , then the bind variables used in queries will be tracked \ n along with queries . \ n \ n \ n \ n \ nThe properties need to be passed in the attribute * properties * in the body \ nof the HTTP request . * properties * needs to be a JSON object . \ n \ nAfter the properties have been changed , the current set of properties will \ nbe returned in the HTTP response . \ n \ n " , <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / PutApiQueryProperties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / PutApiQueryProperties . md " <nl> } <nl> } , <nl> " / _api / query / slow " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / DeleteApiQuerySlow . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / DeleteApiQuerySlow . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nReturns an array containing the last AQL queries that are finished and \ nhave exceeded the slow query threshold in the selected database . \ nThe maximum amount of queries in the list can be controlled by setting \ nthe query tracking property ` maxSlowQueries ` . The threshold for treating \ na query as * slow * can be adjusted by setting the query tracking property \ n ` slowQueryThreshold ` . \ n \ nEach query is a JSON object with the following attributes : \ n \ n - * id * : the query ' s id \ n \ n - * query * : the query string ( potentially truncated ) \ n \ n - * bindVars * : the bind parameter values used by the query \ n \ n - * started * : the date and time when the query was started \ n \ n - * runTime * : the query ' s total run time \ n \ n - * state * : the query ' s current execution state ( will always be \ " finished \ " \ n for the list of slow queries ) \ n \ n " , <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / GetApiQuerySlow . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / GetApiQuerySlow . md " <nl> } <nl> } , <nl> " / _api / query / { query - id } " : { <nl> <nl> " AQL " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / AQL / DeleteApiQueryKill . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / AQL / DeleteApiQueryKill . md " <nl> } <nl> } , <nl> " / _api / replication / applier - config " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * username * * : an optional ArangoDB username to use when connecting to the endpoint . \ n - * * includeSystem * * : whether or not system collection operations will be applied \ n - * * endpoint * * : the logger server to connect to ( e . g . \ " tcp : / / 192 . 168 . 173 . 13 : 8529 \ " ) . The endpoint must be specified . \ n - * * verbose * * : if set to * true * , then a log line will be emitted for all operations \ n performed by the replication applier . This should be used for debugging replication \ n problems only . \ n - * * connectTimeout * * : the timeout ( in seconds ) when attempting to connect to the \ n endpoint . This value is used for each connection attempt . \ n - * * autoResync * * : whether or not the slave should perform a full automatic resynchronization \ n with the master in case the master cannot serve log data requested by the \ n slave , \ n or when the replication is started and no tick value can be found . \ n - * * database * * : the name of the database on the endpoint . If not specified , defaults to the current local database name . \ n - * * idleMinWaitTime * * : the minimum wait time ( in seconds ) that the applier will intentionally idle \ n before fetching more log data from the master in case the master has \ n already sent all its log data . This wait time can be used to control the \ n frequency with which the replication applier sends HTTP log fetch requests \ n to the master in case there is no write activity on the master . \ n This value will be ignored if set to * 0 * . \ n - * * requestTimeout * * : the timeout ( in seconds ) for individual requests to the endpoint . \ n - * * requireFromPresent * * : if set to * true * , then the replication applier will check \ n at start whether the start tick from which it starts or resumes replication is \ n still present on the master . If not , then there would be data loss . If \ n * requireFromPresent * is * true * , the replication applier will abort with an \ n appropriate error message . If set to * false * , then the replication applier will \ n still start , and ignore the data loss . \ n - * * idleMaxWaitTime * * : the maximum wait time ( in seconds ) that the applier will intentionally idle \ n before fetching more log data from the master in case the master has \ n already sent all its log data and there have been previous log fetch attempts \ n that resulted in no more log data . This wait time can be used to control the \ n maximum frequency with which the replication applier sends HTTP log fetch \ n requests to the master in case there is no write activity on the master for \ n longer periods . This configuration value will only be used if the option \ n * adaptivePolling * is set to * true * . \ n This value will be ignored if set to * 0 * . \ n - * * restrictCollections * * ( string ) : the array of collections to include or exclude , \ n based on the setting of * restrictType * \ n - * * restrictType * * : the configuration for * restrictCollections * ; Has to be either * include * or * exclude * \ n - * * initialSyncMaxWaitTime * * : the maximum wait time ( in seconds ) that the initial synchronization will \ n wait for a response from the master when fetching initial collection data . \ n This wait time can be used to control after what time the initial \ n synchronization \ n will give up waiting for a response and fail . This value is relevant even \ n for continuous replication when * autoResync * is set to * true * because this \ n may re - start the initial synchronization when the master cannot provide \ n log data the slave requires . \ n This value will be ignored if set to * 0 * . \ n - * * maxConnectRetries * * : the maximum number of connection attempts the applier \ n will make in a row . If the applier cannot establish a connection to the \ n endpoint in this number of attempts , it will stop itself . \ n - * * autoStart * * : whether or not to auto - start the replication applier on \ n ( next and following ) server starts \ n - * * adaptivePolling * * : if set to * true * , the replication applier will fall \ n to sleep for an increasingly long period in case the logger server at the \ n endpoint does not have any more replication events to apply . Using \ n adaptive polling is thus useful to reduce the amount of work for both the \ n applier and the logger server for cases when there are only infrequent \ n changes . The downside is that when using adaptive polling , it might take \ n longer for the replication applier to detect that there are new replication \ n events on the logger server . \ n Setting * adaptivePolling * to false will make the replication applier \ n contact the logger server in a constant interval , regardless of whether \ n the logger server provides updates frequently or seldom . \ n - * * password * * : the password to use when connecting to the endpoint . \ n - * * connectionRetryWaitTime * * : the time ( in seconds ) that the applier will intentionally idle before \ n it retries connecting to the master in case of connection problems . \ n This value will be ignored if set to * 0 * . \ n - * * autoResyncRetries * * : number of resynchronization retries that will be performed in a row when \ n automatic resynchronization is enabled and kicks in . Setting this to * 0 * \ n will \ n effectively disable * autoResync * . Setting it to some other value will limit \ n the number of retries that are performed . This helps preventing endless \ n retries \ n in case resynchronizations always fail . \ n - * * chunkSize * * : the requested maximum size for log transfer packets that \ n is used when the endpoint is contacted . \ n \ n \ n \ n \ nSets the configuration of the replication applier . The configuration can \ nonly be changed while the applier is not running . The updated configuration \ nwill be saved immediately but only become active with the next start of the \ napplier . \ n \ nIn case of success , the body of the response is a JSON object with the updated \ nconfiguration . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / applier - config & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " endpoint \ " < / span > : < span class = \ " hljs - string \ " > \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " username \ " < / span > : < span class = \ " hljs - string \ " > \ " replicationApplier \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " password \ " < / span > : < span class = \ " hljs - string \ " > \ " applier1234 @ foxx \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " chunkSize \ " < / span > : < span class = \ " hljs - number \ " > 4194304 < / span > , \ n < span class = \ " hljs - string \ " > \ " autoStart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " adaptivePolling \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " endpoint \ " < / span > : < span class = \ " hljs - string \ " > \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " database \ " < / span > : < span class = \ " hljs - string \ " > \ " _system \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " username \ " < / span > : < span class = \ " hljs - string \ " > \ " replicationApplier \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " requestTimeout \ " < / span > : < span class = \ " hljs - number \ " > 600 < / span > , \ n < span class = \ " hljs - string \ " > \ " connectTimeout \ " < / span > : < span class = \ " hljs - number \ " > 10 < / span > , \ n < span class = \ " hljs - string \ " > \ " ignoreErrors \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " maxConnectRetries \ " < / span > : < span class = \ " hljs - number \ " > 100 < / span > , \ n < span class = \ " hljs - string \ " > \ " lockTimeoutRetries \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " sslProtocol \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " chunkSize \ " < / span > : < span class = \ " hljs - number \ " > 4194304 < / span > , \ n < span class = \ " hljs - string \ " > \ " skipCreateDrop \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " autoStart \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " adaptivePolling \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " autoResync \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " autoResyncRetries \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " includeSystem \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " requireFromPresent \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " verbose \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " incremental \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " restrictType \ " < / span > : < span class = \ " hljs - string \ " > \ " \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " restrictCollections \ " < / span > : [ ] , \ n < span class = \ " hljs - string \ " > \ " connectionRetryWaitTime \ " < / span > : < span class = \ " hljs - number \ " > 15 < / span > , \ n < span class = \ " hljs - string \ " > \ " initialSyncMaxWaitTime \ " < / span > : < span class = \ " hljs - number \ " > 300 < / span > , \ n < span class = \ " hljs - string \ " > \ " idleMinWaitTime \ " < / span > : < span class = \ " hljs - number \ " > 1 < / span > , \ n < span class = \ " hljs - string \ " > \ " idleMaxWaitTime \ " < / span > : < span class = \ " hljs - number \ " > 2 . 5 < / span > , \ n < span class = \ " hljs - string \ " > \ " force32mode \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_adjust . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_adjust . md " <nl> } <nl> } , <nl> " / _api / replication / applier - start " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_start . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_start . md " <nl> } <nl> } , <nl> " / _api / replication / applier - state " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_applier_state . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_applier_state . md " <nl> } <nl> } , <nl> " / _api / replication / applier - stop " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_stop . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_applier_stop . md " <nl> } <nl> } , <nl> " / _api / replication / batch " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / post_batch_replication . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / post_batch_replication . md " <nl> } <nl> } , <nl> " / _api / replication / batch / { id } " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / delete_batch_replication . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / delete_batch_replication . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * ttl * * : the time - to - live for the new batch ( in seconds ) \ n \ n \ n \ n \ nExtends the ttl of an existing dump batch , using the batch ' s id and \ nthe provided ttl value . \ n \ nIf the batch ' s ttl can be extended successfully , the response is empty . \ n \ n * * Note * * : on a coordinator , this request must have the query parameter \ n * DBserver * which must be an ID of a DBserver . \ nThe very same request is forwarded synchronously to that DBserver . \ nIt is an error if this attribute is not bound in the coordinator case . \ n \ n " , <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_batch_replication . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_batch_replication . md " <nl> } <nl> } , <nl> " / _api / replication / clusterInventory " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_cluster_inventory . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_cluster_inventory . md " <nl> } <nl> } , <nl> " / _api / replication / dump " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_dump . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_dump . md " <nl> } <nl> } , <nl> " / _api / replication / inventory " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_inventory . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_inventory . md " <nl> } <nl> } , <nl> " / _api / replication / logger - first - tick " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_first_tick . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_first_tick . md " <nl> } <nl> } , <nl> " / _api / replication / logger - follow " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_returns . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_returns . md " <nl> } <nl> } , <nl> " / _api / replication / logger - state " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_return_state . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_return_state . md " <nl> } <nl> } , <nl> " / _api / replication / logger - tick - ranges " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_tick_ranges . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / get_api_replication_logger_tick_ranges . md " <nl> } <nl> } , <nl> " / _api / replication / make - slave " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_makeSlave . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_makeSlave . md " <nl> } <nl> } , <nl> " / _api / replication / server - id " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_serverID . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_serverID . md " <nl> } <nl> } , <nl> " / _api / replication / sync " : { <nl> <nl> " Replication " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Replication / put_api_replication_synchronize . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Replication / put_api_replication_synchronize . md " <nl> } <nl> } , <nl> " / _api / simple / all " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_all . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_all . md " <nl> } <nl> } , <nl> " / _api / simple / all - keys " : { <nl> " put " : { <nl> - " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * type * * : The type of the result . The following values are allowed : \ n - * id * : returns an array of document ids ( * _id * attributes ) \ n - * key * : returns an array of document keys ( * _key * attributes ) \ n - * path * : returns an array of document URI paths . This is the default . \ n - * * collection * * : The name of the collection . This is only for backward compatibility . \ n In ArangoDB versions < 3 . 0 , the URL path was * / _api / document * and \ n this was passed in via the query parameter \ " collection \ " . \ n This combination was removed . \ n \ n \ n \ n \ nReturns an array of all keys , ids , or URI paths for all documents in the \ ncollection identified by * collection * . The type of the result array is \ ndetermined by the * type * attribute . \ n \ nNote that the results have no defined order and thus the order should \ nnot be relied on . \ n \ n \ n \ n \ n * * Example : * * \ n Return all document paths \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all - keys & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12337 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12340 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12333 \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " hasMore \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " cached \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " stats \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " writesExecuted \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " writesIgnored \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedFull \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedIndex \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " filtered \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " httpRequests \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " executionTime \ " < / span > : < span class = \ " hljs - number \ " > 0 . 00011205673217773438 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " warnings \ " < / span > : [ ] \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Return all document keys \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all - keys & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " id \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " products / 12325 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " products / 12318 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " products / 12322 \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " hasMore \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " cached \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " stats \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " writesExecuted \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " writesIgnored \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedFull \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedIndex \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " filtered \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " httpRequests \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " executionTime \ " < / span > : < span class = \ " hljs - number \ " > 0 . 00011396408081054688 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " warnings \ " < / span > : [ ] \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Collection does not exist \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / document / doesnotexist \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " expecting GET / _api / document / & lt ; document - handle & gt ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1203 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> + " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * type * * : The type of the result . The following values are allowed : \ n - * id * : returns an array of document ids ( * _id * attributes ) \ n - * key * : returns an array of document keys ( * _key * attributes ) \ n - * path * : returns an array of document URI paths . This is the default . \ n - * * collection * * : The collection that should be queried \ n \ n \ n \ n \ nReturns an array of all keys , ids , or URI paths for all documents in the \ ncollection identified by * collection * . The type of the result array is \ ndetermined by the * type * attribute . \ n \ nNote that the results have no defined order and thus the order should \ nnot be relied on . \ n \ n \ n \ n \ n * * Example : * * \ n Return all document paths \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all - keys & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12337 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12340 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " / _db / _system / _api / document / products / 12333 \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " hasMore \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " cached \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " stats \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " writesExecuted \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " writesIgnored \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedFull \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedIndex \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " filtered \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " httpRequests \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " executionTime \ " < / span > : < span class = \ " hljs - number \ " > 0 . 00011205673217773438 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " warnings \ " < / span > : [ ] \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Return all document keys \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all - keys & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " collection \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " id \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 201 < / span > Created \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " result \ " < / span > : [ \ n < span class = \ " hljs - string \ " > \ " products / 12325 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " products / 12318 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " products / 12322 \ " < / span > \ n ] , \ n < span class = \ " hljs - string \ " > \ " hasMore \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " cached \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " stats \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " writesExecuted \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " writesIgnored \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedFull \ " < / span > : < span class = \ " hljs - number \ " > 3 < / span > , \ n < span class = \ " hljs - string \ " > \ " scannedIndex \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " filtered \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " httpRequests \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " executionTime \ " < / span > : < span class = \ " hljs - number \ " > 0 . 00011396408081054688 < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " warnings \ " < / span > : [ ] \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 201 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Collection does not exist \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / document / doesnotexist \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " expecting GET / _api / document / & lt ; document - handle & gt ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1203 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> " parameters " : [ <nl> + { <nl> + " description " : " The name of the collection . \ n * * This parameter is only for an easier migration path from old versions . * * \ nIn ArangoDB versions < 3 . 0 , the URL path was * / _api / document * and \ nthis was passed in via the query parameter \ " collection \ " . \ nThis combination was removed . The collection name can be passed to \ n * / _api / simple / all - keys * as body parameter ( preferred ) or as query parameter . \ n \ n " , <nl> + " in " : " query " , <nl> + " name " : " collection " , <nl> + " required " : false , <nl> + " type " : " string " <nl> + } , <nl> { <nl> " in " : " body " , <nl> " name " : " Json Request Body " , <nl> <nl> " Documents " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Documents / put_read_all_documents . md " <nl> } <nl> } , <nl> " / _api / simple / any " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_any . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_any . md " <nl> } <nl> } , <nl> " / _api / simple / by - example " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_by_example . md " <nl> } <nl> } , <nl> " / _api / simple / first - example " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_first_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_first_example . md " <nl> } <nl> } , <nl> " / _api / simple / fulltext " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_fulltext . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_fulltext . md " <nl> } <nl> } , <nl> " / _api / simple / lookup - by - keys " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / RestLookupByKeys . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / RestLookupByKeys . md " <nl> } <nl> } , <nl> " / _api / simple / near " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_near . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_near . md " <nl> } <nl> } , <nl> " / _api / simple / range " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_range . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_range . md " <nl> } <nl> } , <nl> " / _api / simple / remove - by - example " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_remove_by_example . md " <nl> } <nl> } , <nl> " / _api / simple / remove - by - keys " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / RestRemoveByKeys . md " <nl> } <nl> } , <nl> " / _api / simple / replace - by - example " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_replace_by_example . md " <nl> } <nl> } , <nl> " / _api / simple / update - by - example " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_update_by_example . md " <nl> } <nl> } , <nl> " / _api / simple / within " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within . md " <nl> } <nl> } , <nl> " / _api / simple / within - rectangle " : { <nl> <nl> " Simple Queries " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within_rectangle . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Simple Queries / put_api_simple_within_rectangle . md " <nl> } <nl> } , <nl> " / _api / tasks " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / post_api_new_tasks . md " <nl> } <nl> } , <nl> " / _api / tasks / " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_tasks_all . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_tasks_all . md " <nl> } <nl> } , <nl> " / _api / tasks / { id } " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / delete_api_tasks . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nfetches one existing task on the server specified by * id * \ n \ n \ n # # # # HTTP 200 \ n * A json document with these Properties is returned : * \ n \ nThe requested task \ n \ n - * * name * * : The fully qualified name of the user function \ n - * * created * * : The timestamp when this task was created \ n - * * database * * : the database this task belongs to \ n - * * period * * : this task should run each ` period ` seconds \ n - * * command * * : the javascript function for this dask \ n - * * offset * * : time offset in seconds from the created timestamp \ n - * * type * * : What type of task is this [ ` periodic ` , ` timed ` ] \ n - periodic are tasks that repeat periodically \ n - timed are tasks that execute once at a specific time \ n - * * id * * : A string identifying the task \ n \ n \ n \ n \ n * * Example : * * \ n Fetching a single task by its id \ n \ n < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / tasks & lt ; & lt ; EOF \ n { \ " id \ " : \ " testTask \ " , \ " command \ " : \ " console . log ( ' Hello from task ! ' ) ; \ " , \ " offset \ " : 10000 } \ nEOF \ n \ nshell > curl - - dump - http : / / localhost : 8529 / _api / tasks / testTask \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " testTask \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " user - defined task \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " created \ " < / span > : < span class = \ " hljs - number \ " > 1522936640 . 674124 < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " timed \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " offset \ " < / span > : < span class = \ " hljs - number \ " > 10000 < / span > , \ n < span class = \ " hljs - string \ " > \ " command \ " < / span > : < span class = \ " hljs - string \ " > \ " ( function ( params ) { console . log ( ' Hello from task ! ' ) ; } ) ( params ) ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " database \ " < / span > : < span class = \ " hljs - string \ " > \ " _system \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n * * Example : * * \ n Trying to fetch a non - existing task \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / tasks / non - existing - task \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 404 < / span > Not Found \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 404 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorNum \ " < / span > : < span class = \ " hljs - number \ " > 1852 < / span > , \ n < span class = \ " hljs - string \ " > \ " errorMessage \ " < / span > : < span class = \ " hljs - string \ " > \ " task not found \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_tasks . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * params * * : The parameters to be passed into command \ n - * * offset * * : Number of seconds initial delay \ n - * * command * * : The JavaScript code to be executed \ n - * * name * * : The name of the task \ n - * * period * * : number of seconds between the executions \ n \ n \ n \ n \ nregisters a new task with the specified id \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / tasks / sampleTask & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " SampleTask \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " SampleTask \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " command \ " < / span > : < span class = \ " hljs - string \ " > \ " ( function ( params ) { require ( ' @ arangodb ' ) . print ( params ) ; } ) ( params ) \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " params \ " < / span > : { \ n < span class = \ " hljs - string \ " > \ " foo \ " < / span > : < span class = \ " hljs - string \ " > \ " bar \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " bar \ " < / span > : < span class = \ " hljs - string \ " > \ " foo \ " < / span > \ n } , \ n < span class = \ " hljs - string \ " > \ " period \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " sampleTask \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " SampleTask \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " created \ " < / span > : < span class = \ " hljs - number \ " > 1522936640 . 6792622 < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " periodic \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " period \ " < / span > : < span class = \ " hljs - number \ " > 2 < / span > , \ n < span class = \ " hljs - string \ " > \ " offset \ " < / span > : < span class = \ " hljs - number \ " > 0 < / span > , \ n < span class = \ " hljs - string \ " > \ " command \ " < / span > : < span class = \ " hljs - string \ " > \ " ( function ( params ) { ( function ( params ) { require ( ' @ arangodb ' ) . print ( params ) ; } ) ( params ) } ) ( params ) ; \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " database \ " < / span > : < span class = \ " hljs - string \ " > \ " _system \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / put_api_new_tasks . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / put_api_new_tasks . md " <nl> } <nl> } , <nl> " / _api / transaction " : { <nl> <nl> " Transactions " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Transactions / post_api_transaction . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Transactions / post_api_transaction . md " <nl> } <nl> } , <nl> " / _api / traversal " : { <nl> <nl> " Graph Traversal " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Graph Traversal / HTTP_API_TRAVERSAL . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Graph Traversal / HTTP_API_TRAVERSAL . md " <nl> } <nl> } , <nl> " / _api / user " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nFetches data about the specified user . You can fetch information about \ nyourself or you need the * Administrate * server access level in order to \ nexecute this REST call . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - - dump - http : / / localhost : 8529 / _api / user / admin @ myapp \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " user \ " < / span > : < span class = \ " hljs - string \ " > \ " admin @ myapp \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " active \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n < ! - - mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - - > \ n \ n " , <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " patch " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * passwd * * : The user password as a string . Specifying a password is mandatory , but \ n the empty string is allowed for passwords \ n - * * active * * : An optional flag that specifies whether the user is active . If not \ n specified , this will default to true \ n - * * extra * * : An optional JSON object with arbitrary extra data about the user . \ n \ n \ n \ n \ nPartially updates the data of an existing user . The name of an existing user \ nmust be specified in * user * . You need server access level * Administrate * in \ norder to execute this REST call . Additionally , a user can change his / her own \ ndata . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / user / admin @ myapp & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " passwd \ " < / span > : < span class = \ " hljs - string \ " > \ " secure \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " user \ " < / span > : < span class = \ " hljs - string \ " > \ " admin @ myapp \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " active \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n \ n \ n < ! - - mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - - > \ n \ n " , <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * passwd * * : The user password as a string . Specifying a password is mandatory , but \ n the empty string is allowed for passwords \ n - * * active * * : An optional flag that specifies whether the user is active . If not \ n specified , this will default to true \ n - * * extra * * : An optional JSON object with arbitrary extra data about the user . \ n \ n \ n \ n \ nReplaces the data of an existing user . The name of an existing user must be \ nspecified in * user * . You need server access level * Administrate * in order to \ nexecute this REST call . Additionally , a user can change his / her own data . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / user / admin @ myapp & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " passwd \ " < / span > : < span class = \ " hljs - string \ " > \ " secure \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " user \ " < / span > : < span class = \ " hljs - string \ " > \ " admin @ myapp \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " active \ " < / span > : < span class = \ " hljs - literal \ " > true < / span > , \ n < span class = \ " hljs - string \ " > \ " extra \ " < / span > : { \ n } , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n < ! - - mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - - > \ n \ n " , <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } / database / " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } / database / { database } " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } / database / { database } / { collection } " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } / database / { dbname } " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * grant * * : Use \ " rw \ " to set the database access level to * Administrate * . \ n Use \ " ro \ " to set the database access level to * Access * . \ n Use \ " none \ " to set the database access level to * No access * . \ n \ n \ n \ n \ nSets the database access levels for the database * dbname * of user * user * . You \ nneed the * Administrate * server access level in order to execute this REST \ ncall . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / user / admin @ myapp / database / _system & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " grant \ " < / span > : < span class = \ " hljs - string \ " > \ " rw \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _system \ " < / span > : < span class = \ " hljs - string \ " > \ " rw \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n < ! - - mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - - > \ n \ n " , <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / user / { user } / database / { dbname } / { collection } " : { <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * grant * * : Use \ " rw \ " to set the collection level access to * Read / Write * . \ n Use \ " ro \ " to set the collection level access to * Read Only * . \ n Use \ " none \ " to set the collection level access to * No access * . \ n \ n \ n \ n \ nSets the collection access level for the * collection * in the database * dbname * \ nfor user * user * . You need the * Administrate * server access level in order to \ nexecute this REST call . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / user / admin @ myapp / database / _system / reports & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " grant \ " < / span > : < span class = \ " hljs - string \ " > \ " rw \ " < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " _system / reports \ " < / span > : < span class = \ " hljs - string \ " > \ " rw \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " error \ " < / span > : < span class = \ " hljs - literal \ " > false < / span > , \ n < span class = \ " hljs - string \ " > \ " code \ " < / span > : < span class = \ " hljs - number \ " > 200 < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n \ n < ! - - mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - - > \ n \ n " , <nl> <nl> " User Management " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / User Management / README . md " <nl> } <nl> } , <nl> " / _api / version " : { <nl> <nl> " Administration " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Administration / get_api_return . md " <nl> } <nl> } , <nl> " / _api / view " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / get_api_views . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / get_api_views . md " <nl> } <nl> } , <nl> " / _api / view # arangosearch " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / post_api_view_iresearch . md " <nl> } <nl> } , <nl> " / _api / view / { view - name } " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / delete_api_view . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / delete_api_view . md " <nl> } , <nl> " get " : { <nl> " description " : " \ n \ nThe result is an object describing the view with the following \ nattributes : \ n \ n - * id * : The identifier of the view . \ n \ n - * name * : The name of the view . \ n \ n - * type * : The type of the view as string \ n - arangosearch : ArangoSearch view \ n \ n - * properties * : The properties of the view . \ n \ n " , <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / get_api_view_name . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / get_api_view_name . md " <nl> } <nl> } , <nl> " / _api / view / { view - name } / properties " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / get_api_view_properties . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / get_api_view_properties . md " <nl> } <nl> } , <nl> " / _api / view / { view - name } / properties # ArangoSearch " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / patch_api_view_properties_iresearch . md " <nl> } , <nl> " put " : { <nl> " description " : " \ n * * A JSON object with these properties is required : * * \ n \ n - * * locale * * : The default locale used for queries on analyzed string values ( default : * C * ) . \ n - * * commit * * : \ n - * * consolidate * * : \ n - * * count * * : \ n - * * threshold * * : Consolidate IFF { threshold } > segment_docs { valid } / ( all_segment_docs { valid } / # segments ) ( default : 0 . 85 ) \ n - * * segmentThreshold * * : Apply consolidation policy IFF { segmentThreshold } > = # segments ( default : 300 , to disable use : 0 ) \ n - * * bytes * * : \ n - * * threshold * * : Consolidate IFF { threshold } > segment_bytes / ( all_segment_bytes / # segments ) ( default : 0 . 85 ) \ n - * * segmentThreshold * * : Apply consolidation policy IFF { segmentThreshold } > = # segments ( default : 300 , to disable use : 0 ) \ n - * * bytes_accum * * : \ n - * * threshold * * : Consolidate IFF { threshold } > ( segment_bytes + sum_of_merge_candidate_segment_bytes ) / all_segment_bytes ( default : 0 . 85 ) \ n - * * segmentThreshold * * : Apply consolidation policy IFF { segmentThreshold } > = # segments ( default : 300 , to disable use : 0 ) \ n - * * fill * * : \ n - * * threshold * * : Consolidate IFF { threshold } > # segment_docs { valid } / ( # segment_docs { valid } + # segment_docs { removed } ) ( default : 0 . 85 ) \ n - * * segmentThreshold * * : Apply consolidation policy IFF { segmentThreshold } > = # segments ( default : 300 , to disable use : 0 ) \ n - * * commitIntervalMsec * * : Wait at least this many milliseconds between committing index data changes and \ n making them visible to queries ( default : 60000 , to disable use : 0 ) . \ n For the case where there are a lot of inserts / updates , a lower value , until commit , will cause the index not to account for them and \ n memory usage would continue to grow . \ n For the case where there are a few inserts / updates , a higher value will impact performance and waste disk space for each \ n commit call without any added benefits . \ n - * * cleanupIntervalStep * * : Wait at least this many commits between removing unused files in data directory ( default : 10 , \ n to disable use : 0 ) . \ n For the case where the consolidation policies merge segments often ( i . e . a lot of commit + consolidate ) , a lower value will cause a \ n lot of disk space to be wasted . \ n For the case where the consolidation policies rarely merge segments ( i . e . few inserts / deletes ) , a higher value will impact \ n performance without any added benefits . \ n - * * threadMaxTotal * * : Maximum total number of threads ( > 0 ) for single - run tasks ( default : 5 ) . \ n For the case where there are a lot of parallelizable tasks and an abundance of resources , a lower value would limit performance . \ n For the case where there are limited resources CPU / memory , a higher value will negatively impact performance . \ n - * * threadMaxIdle * * : Maximum idle number of threads for single - run tasks ( default : 5 ) . \ n For the case where there are a lot of short - lived asynchronous tasks , a lower value will cause a lot of thread creation / deletion calls . \ n For the case where there are no short - lived asynchronous tasks , a higher value will only waste memory . \ n - * * links * * : \ n - * * [ collection - name ] * * : \ n - * * analyzers * * ( string ) : The list of analyzers to be used for indexing of string values ( default : [ \ " identity \ " ] ) . \ n - * * [ field - name ] * * : \ n - * * analyzers * * ( string ) : The list of analyzers to be used for indexing of string values ( default : [ \ " identity \ " ] ) . \ n - * * [ field - name ] * * : Specify properties for nested fields here \ n - * * includeAllFields * * : The flag determines whether or not to index all fields on a particular level of depth ( default : false ) . \ n - * * trackListPositions * * : The flag determines whether or not values in a lists should be treated separate ( default : false ) . \ n - * * includeAllFields * * : The flag determines whether or not to index all fields on a particular level of depth ( default : false ) . \ n - * * trackListPositions * * : The flag determines whether or not values in a lists should be treated separate ( default : false ) . \ n \ n \ n \ n \ nChanges the properties of a view . \ n \ nOn success an object with the following attributes is returned : \ n - * id * : The identifier of the view . \ n - * name * : The name of the view . \ n - * type * : The view type . Valid types are : \ n - arangosearch : ArangoSearch view \ n - * properties * : The updated properties of the view . \ n \ n \ n \ n \ n * * Example : * * \ n \ n \ n < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / view / products / properties & lt ; & lt ; EOF \ n { \ n < span class = \ " hljs - string \ " > \ " threadMaxIdle \ " < / span > : < span class = \ " hljs - number \ " > 10 < / span > \ n } \ nEOF \ n \ nHTTP / < span class = \ " hljs - number \ " > 1 . 1 < / span > < span class = \ " hljs - number \ " > 200 < / span > OK \ ncontent - type : application / json ; charset = utf < span class = \ " hljs - number \ " > - 8 < / span > \ nx - content - type - options : nosniff \ n \ n { \ n < span class = \ " hljs - string \ " > \ " id \ " < / span > : < span class = \ " hljs - string \ " > \ " 12656 \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " name \ " < / span > : < span class = \ " hljs - string \ " > \ " products \ " < / span > , \ n < span class = \ " hljs - string \ " > \ " type \ " < / span > : < span class = \ " hljs - string \ " > \ " arangosearch \ " < / span > \ n } \ n < / code > < / pre > \ n \ n \ n \ n \ n " , <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_properties_iresearch . md " <nl> } <nl> } , <nl> " / _api / view / { view - name } / rename " : { <nl> <nl> " Views " <nl> ] , <nl> " x - examples " : [ ] , <nl> - " x - filename " : " / home / willi / src / devel / Documentation / DocuBlocks / Rest / Views / put_api_view_rename . md " <nl> + " x - filename " : " / home / willi / src / devel2 / Documentation / DocuBlocks / Rest / Views / put_api_view_rename . md " <nl> } <nl> } <nl> } , <nl> | Documentation / re add link ( ) | arangodb/arangodb | 7646d02702f5733f8975ef661ea45ab1ff0acd94 | 2018-05-02T09:09:12Z |
mmm a / tensorflow / core / public / version . h <nl> ppp b / tensorflow / core / public / version . h <nl> limitations under the License . <nl> <nl> # define TF_GRAPH_DEF_VERSION_MIN_PRODUCER 0 <nl> # define TF_GRAPH_DEF_VERSION_MIN_CONSUMER 0 <nl> - # define TF_GRAPH_DEF_VERSION 425 / / Updated : 2020 / 6 / 7 <nl> + # define TF_GRAPH_DEF_VERSION 426 / / Updated : 2020 / 6 / 8 <nl> <nl> / / Checkpoint compatibility versions ( the versions field in SavedSliceMeta ) . <nl> / / <nl> | Update GraphDef version to 426 . | tensorflow/tensorflow | bd0f0821f77f29a8a817bb0c21096cc24db302b5 | 2020-06-08T09:07:21Z |
mmm a / python / pywatchman / compat . py <nl> ppp b / python / pywatchman / compat . py <nl> <nl> " " " Compatibility module across Python 2 and 3 . " " " <nl> <nl> <nl> + PYTHON2 = sys . version_info < ( 3 , 0 ) <nl> PYTHON3 = sys . version_info > = ( 3 , 0 ) <nl> <nl> # This is adapted from https : / / bitbucket . org / gutworth / six , and used under the <nl> mmm a / tests / integration / path_utils . py <nl> ppp b / tests / integration / path_utils . py <nl> def get_canonical_filesystem_path ( name ) : <nl> <nl> # The first four chars are / / ? / <nl> if result < = numwchars : <nl> - return buf . value [ 4 : ] . replace ( " \ \ " , " / " ) . encode ( " utf8 " ) <nl> + path = buf . value [ 4 : ] . replace ( " \ \ " , " / " ) <nl> + if compat . PYTHON2 : <nl> + path = path . encode ( " utf8 " ) <nl> + return path <nl> <nl> # Not big enough ; the result is the amount we need <nl> numwchars = result + 1 <nl> | Fix integration tests on Windows with Python 3 | facebook/watchman | d19aab94315d4771421783595bffa1a09b91965b | 2018-11-10T03:21:49Z |
mmm a / tensorflow / tools / dockerfiles / partials / ubuntu / devel - nvidia . partial . Dockerfile <nl> ppp b / tensorflow / tools / dockerfiles / partials / ubuntu / devel - nvidia . partial . Dockerfile <nl> RUN apt - get update & & apt - get install - y - - no - install - recommends \ <nl> rm / usr / lib / $ { LIB_DIR_PREFIX } - linux - gnu / libcudnn_static_v7 . a <nl> <nl> RUN [ [ " $ { ARCH } " = " ppc64le " ] ] | | { apt - get update & & \ <nl> - apt - get install nvinfer - runtime - trt - repo - ubuntu1604 - 5 . 0 . 2 - ga - cuda $ { CUDA } \ <nl> + apt - get install nvinfer - runtime - trt - repo - ubuntu1804 - 5 . 0 . 2 - ga - cuda $ { CUDA } \ <nl> & & apt - get update \ <nl> & & apt - get install - y - - no - install - recommends \ <nl> libnvinfer5 = 5 . 0 . 2 - 1 + cuda $ { CUDA } \ <nl> mmm a / tensorflow / tools / dockerfiles / partials / ubuntu / nvidia . partial . Dockerfile <nl> ppp b / tensorflow / tools / dockerfiles / partials / ubuntu / nvidia . partial . Dockerfile <nl> RUN apt - get update & & apt - get install - y - - no - install - recommends \ <nl> unzip <nl> <nl> RUN [ $ { ARCH } = ppc64le ] | | ( apt - get update & & \ <nl> - apt - get install nvinfer - runtime - trt - repo - ubuntu1604 - 5 . 0 . 2 - ga - cuda $ { CUDA } \ <nl> + apt - get install nvinfer - runtime - trt - repo - ubuntu1804 - 5 . 0 . 2 - ga - cuda $ { CUDA } \ <nl> & & apt - get update \ <nl> & & apt - get install - y - - no - install - recommends libnvinfer5 = 5 . 0 . 2 - 1 + cuda $ { CUDA } \ <nl> & & apt - get clean \ <nl> mmm a / tensorflow / tools / dockerfiles / partials / ubuntu / version . partial . Dockerfile <nl> ppp b / tensorflow / tools / dockerfiles / partials / ubuntu / version . partial . Dockerfile <nl> @ @ - 1 + 1 @ @ <nl> - ARG UBUNTU_VERSION = 16 . 04 <nl> + ARG UBUNTU_VERSION = 18 . 04 <nl> | Merge pull request from angersson : master | tensorflow/tensorflow | ecd5279e11d7f3bebf2248d297663464a24b84f9 | 2019-05-15T21:25:01Z |
mmm a / cereal <nl> ppp b / cereal <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit 32300930f65425bbba136461446149af8f8b42d9 <nl> + Subproject commit 29099e87a1372694fb81b426faaa57e832bbe87a <nl> mmm a / selfdrive / controls / controlsd . py <nl> ppp b / selfdrive / controls / controlsd . py <nl> def __init__ ( self , sm = None , pm = None , can_sock = None ) : <nl> self . can_sock = messaging . sub_sock ( ' can ' , timeout = can_timeout ) <nl> <nl> # wait for one health and one CAN packet <nl> - while not sm . updated [ ' health ' ] : <nl> - sm . update ( ) <nl> - hw_type = sm [ ' health ' ] . hwType <nl> + hw_type = messaging . recv_one ( self . sm . sock [ ' health ' ] ) . health . hwType <nl> has_relay = hw_type in [ HwType . blackPanda , HwType . uno , HwType . dos ] <nl> print ( " Waiting for CAN messages . . . " ) <nl> messaging . get_one_can ( self . can_sock ) <nl> mmm a / selfdrive / test / process_replay / camera_replay . py <nl> ppp b / selfdrive / test / process_replay / camera_replay . py <nl> def camera_replay ( lr , fr ) : <nl> spinner = Spinner ( ) <nl> <nl> pm = messaging . PubMaster ( [ ' frame ' , ' liveCalibration ' ] ) <nl> - model_sock = messaging . sub_sock ( " model " , conflate = False ) <nl> + sm = messaging . SubMaster ( [ ' model ' ] ) <nl> <nl> # TODO : add dmonitoringmodeld <nl> print ( " preparing procs " ) <nl> def camera_replay ( lr , fr ) : <nl> frame_idx + = 1 <nl> <nl> pm . send ( msg . which ( ) , f ) <nl> - log_msgs . append ( messaging . recv_one ( model_sock ) ) <nl> + log_msgs . append ( messaging . recv_one ( sm . sock [ ' model ' ] ) ) <nl> <nl> spinner . update ( " modeld replay % d / % d " % ( frame_idx , fr . frame_count ) ) <nl> <nl> | Revert " can ' t directly access sockets anymore " , was supposed to be on branch | commaai/openpilot | acb25751b75059a6e212192064528afe0ebd5e03 | 2020-07-29T01:41:39Z |
mmm a / dbms / src / AggregateFunctions / AggregateFunctionFactory . cpp <nl> ppp b / dbms / src / AggregateFunctions / AggregateFunctionFactory . cpp <nl> AggregateFunctionPtr AggregateFunctionFactory : : get ( <nl> nested_argument_types . reserve ( argument_types . size ( ) ) ; <nl> <nl> for ( const auto & arg_type : argument_types ) <nl> - { <nl> - if ( arg_type - > isNullable ( ) ) <nl> - { <nl> - const DataTypeNullable & actual_type = static_cast < const DataTypeNullable & > ( * arg_type . get ( ) ) ; <nl> - const DataTypePtr & nested_type = actual_type . getNestedType ( ) ; <nl> - nested_argument_types . push_back ( nested_type ) ; <nl> - } <nl> - else <nl> - nested_argument_types . push_back ( arg_type ) ; <nl> - } <nl> + nested_argument_types . push_back ( removeNullable ( arg_type ) ) ; <nl> <nl> nested_function = getImpl ( name , nested_argument_types , parameters , recursion_level ) ; <nl> } <nl> mmm a / dbms / src / AggregateFunctions / AggregateFunctionGroupUniqArray . h <nl> ppp b / dbms / src / AggregateFunctions / AggregateFunctionGroupUniqArray . h <nl> class AggreagteFunctionGroupUniqArrayGeneric <nl> StringRef str_serialized = getSerialization ( * columns [ 0 ] , row_num , * arena ) ; <nl> set . emplace ( str_serialized , it , inserted ) ; <nl> <nl> - if ( ! is_plain_column ) <nl> + if constexpr ( ! is_plain_column ) <nl> { <nl> if ( ! inserted ) <nl> arena - > rollback ( str_serialized . size ) ; <nl> mmm a / dbms / src / AggregateFunctions / AggregateFunctionTopK . h <nl> ppp b / dbms / src / AggregateFunctions / AggregateFunctionTopK . h <nl> class AggregateFunctionTopKGeneric : public IAggregateFunctionDataHelper < Aggrega <nl> { <nl> auto & set = this - > data ( place ) . value ; <nl> if ( set . capacity ( ) ! = reserved ) <nl> - { <nl> set . resize ( reserved ) ; <nl> - } <nl> <nl> - const char * begin = nullptr ; <nl> - StringRef str_serialized = columns [ 0 ] - > serializeValueIntoArena ( row_num , * arena , begin ) ; <nl> - set . insert ( str_serialized ) ; <nl> - arena - > rollback ( str_serialized . size ) ; <nl> + if constexpr ( is_plain_column ) <nl> + { <nl> + const char * begin = nullptr ; <nl> + StringRef str_serialized = columns [ 0 ] - > serializeValueIntoArena ( row_num , * arena , begin ) ; <nl> + set . insert ( str_serialized ) ; <nl> + arena - > rollback ( str_serialized . size ) ; <nl> + } <nl> + else <nl> + { <nl> + set . insert ( columns [ 0 ] - > getDataAt ( row_num ) ) ; <nl> + } <nl> } <nl> <nl> void merge ( AggregateDataPtr place , ConstAggregateDataPtr rhs , Arena * ) const override <nl> class AggregateFunctionTopKGeneric : public IAggregateFunctionDataHelper < Aggrega <nl> <nl> for ( auto & elem : result_vec ) <nl> { <nl> - deserializeAndInsert ( elem . key , data_to ) ; <nl> + if constexpr ( is_plain_column ) <nl> + data_to . insertData ( elem . key . data , elem . key . size ) ; <nl> + else <nl> + data_to . deserializeAndInsertFromArena ( elem . key . data ) ; <nl> } <nl> } <nl> <nl> class AggregateFunctionTopKGeneric : public IAggregateFunctionDataHelper < Aggrega <nl> } ; <nl> <nl> <nl> - template < > <nl> - inline void AggregateFunctionTopKGeneric < false > : : deserializeAndInsert ( StringRef str , IColumn & data_to ) <nl> - { <nl> - data_to . deserializeAndInsertFromArena ( str . data ) ; <nl> - } <nl> - <nl> - template < > <nl> - inline void AggregateFunctionTopKGeneric < true > : : deserializeAndInsert ( StringRef str , IColumn & data_to ) <nl> - { <nl> - data_to . insertData ( str . data , str . size ) ; <nl> - } <nl> - <nl> - <nl> # undef TOP_K_LOAD_FACTOR <nl> <nl> } <nl> | Simplification of aggregate functions : development [ # CLICKHOUSE - 2 ] . | ClickHouse/ClickHouse | ec3ec48309871138decd9bbf41d86fa46933fb6b | 2017-12-21T02:08:25Z |
mmm a / . gitignore <nl> ppp b / . gitignore <nl> python / * * / * . egg <nl> python / . eggs / <nl> python / . tox <nl> python / build / <nl> + python / docs / _build / <nl> <nl> src / js_embed <nl> src / protoc <nl> new file mode 100644 <nl> index 0000000000 . . 298ea9e213 <nl> mmm / dev / null <nl> ppp b / python / docs / Makefile <nl> <nl> + # Minimal makefile for Sphinx documentation <nl> + # <nl> + <nl> + # You can set these variables from the command line . <nl> + SPHINXOPTS = <nl> + SPHINXBUILD = sphinx - build <nl> + SOURCEDIR = . <nl> + BUILDDIR = _build <nl> + <nl> + # Put it first so that " make " without argument is like " make help " . <nl> + help : <nl> + @ $ ( SPHINXBUILD ) - M help " $ ( SOURCEDIR ) " " $ ( BUILDDIR ) " $ ( SPHINXOPTS ) $ ( O ) <nl> + <nl> + . PHONY : help Makefile <nl> + <nl> + # Catch - all target : route all unknown targets to Sphinx using the new <nl> + # " make mode " option . $ ( O ) is meant as a shortcut for $ ( SPHINXOPTS ) . <nl> + % : Makefile <nl> + @ $ ( SPHINXBUILD ) - M $ @ " $ ( SOURCEDIR ) " " $ ( BUILDDIR ) " $ ( SPHINXOPTS ) $ ( O ) <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 0000000000 . . 0b42b55d50 <nl> mmm / dev / null <nl> ppp b / python / docs / conf . py <nl> <nl> + # - * - coding : utf - 8 - * - <nl> + # Protocol Buffers - Google ' s data interchange format <nl> + # Copyright 2019 Google LLC . All rights reserved . <nl> + # https : / / developers . google . com / protocol - buffers / <nl> + # <nl> + # Redistribution and use in source and binary forms , with or without <nl> + # modification , are permitted provided that the following conditions are <nl> + # met : <nl> + # <nl> + # * Redistributions of source code must retain the above copyright <nl> + # notice , this list of conditions and the following disclaimer . <nl> + # * Redistributions in binary form must reproduce the above <nl> + # copyright notice , this list of conditions and the following disclaimer <nl> + # in the documentation and / or other materials provided with the <nl> + # distribution . <nl> + # * Neither the name of Google Inc . nor the names of its <nl> + # contributors may be used to endorse or promote products derived from <nl> + # this software without specific prior written permission . <nl> + # <nl> + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + # " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + # LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + # A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + # SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + # LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + # DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + # THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + # ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + # OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + # Configuration file for the Sphinx documentation builder . <nl> + # <nl> + # This file does only contain a selection of the most common options . For a <nl> + # full list see the documentation : <nl> + # http : / / www . sphinx - doc . org / en / master / config <nl> + <nl> + # - - Path setup mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + <nl> + # If extensions ( or modules to document with autodoc ) are in another directory , <nl> + # add these directories to sys . path here . If the directory is relative to the <nl> + # documentation root , use os . path . abspath to make it absolute , like shown here . <nl> + # <nl> + # import os <nl> + # import sys <nl> + # sys . path . insert ( 0 , os . path . abspath ( ' . ' ) ) <nl> + import google . protobuf <nl> + <nl> + # - - Project information mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + <nl> + project = u " Protocol Buffers " <nl> + copyright = u " 2008 , Google LLC " <nl> + author = u " Google LLC " <nl> + <nl> + # The short X . Y version <nl> + version = u " " <nl> + # The full version , including alpha / beta / rc tags <nl> + release = google . protobuf . __version__ <nl> + <nl> + <nl> + # - - General configuration mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + # If your documentation needs a minimal Sphinx version , state it here . <nl> + # <nl> + # needs_sphinx = ' 1 . 0 ' <nl> + <nl> + # Add any Sphinx extension module names here , as strings . They can be <nl> + # extensions coming with Sphinx ( named ' sphinx . ext . * ' ) or your custom <nl> + # ones . <nl> + extensions = [ <nl> + " sphinx . ext . autosummary " , <nl> + " sphinx . ext . intersphinx " , <nl> + " sphinxcontrib . napoleon " , <nl> + ] <nl> + <nl> + # Add any paths that contain templates here , relative to this directory . <nl> + templates_path = [ " _templates " ] <nl> + <nl> + # The suffix ( es ) of source filenames . <nl> + # You can specify multiple suffix as a list of string : <nl> + # <nl> + # source_suffix = [ ' . rst ' , ' . md ' ] <nl> + source_suffix = " . rst " <nl> + <nl> + # The master toctree document . <nl> + master_doc = " index " <nl> + <nl> + # The language for content autogenerated by Sphinx . Refer to documentation <nl> + # for a list of supported languages . <nl> + # <nl> + # This is also used if you do content translation via gettext catalogs . <nl> + # Usually you set " language " from the command line for these cases . <nl> + language = None <nl> + <nl> + # List of patterns , relative to source directory , that match files and <nl> + # directories to ignore when looking for source files . <nl> + # This pattern also affects html_static_path and html_extra_path . <nl> + exclude_patterns = [ u " _build " , " Thumbs . db " , " . DS_Store " ] <nl> + <nl> + # The name of the Pygments ( syntax highlighting ) style to use . <nl> + pygments_style = None <nl> + <nl> + <nl> + # - - Options for HTML output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + # The theme to use for HTML and HTML Help pages . See the documentation for <nl> + # a list of builtin themes . <nl> + # <nl> + html_theme = " alabaster " <nl> + <nl> + # Remove JavaScript . <nl> + html_js_files = [ ] <nl> + <nl> + # Theme options are theme - specific and customize the look and feel of a theme <nl> + # further . For a list of options available for each theme , see the <nl> + # documentation . <nl> + # <nl> + # html_theme_options = { } <nl> + <nl> + # Add any paths that contain custom static files ( such as style sheets ) here , <nl> + # relative to this directory . They are copied after the builtin static files , <nl> + # so a file named " default . css " will overwrite the builtin " default . css " . <nl> + html_static_path = [ " _static " ] <nl> + <nl> + html_show_sourcelink = True <nl> + <nl> + # Custom sidebar templates , must be a dictionary that maps document names <nl> + # to template names . <nl> + # <nl> + # The default sidebars ( for documents that don ' t match any pattern ) are <nl> + # defined by theme itself . Builtin themes are using these templates by <nl> + # default : ` ` [ ' localtoc . html ' , ' relations . html ' , ' sourcelink . html ' , <nl> + # ' searchbox . html ' ] ` ` . <nl> + <nl> + # Remove searchbox . html to avoid embedded JavaScript . <nl> + html_sidebars = { <nl> + " * * " : [ <nl> + " globaltoc . html " , " localtoc . html " , " relations . html " , " sourcelink . html " , <nl> + ] , <nl> + } <nl> + <nl> + <nl> + # - - Options for HTMLHelp output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + # Output file base name for HTML help builder . <nl> + htmlhelp_basename = " ProtocolBuffersdoc " <nl> + <nl> + <nl> + # - - Options for LaTeX output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + latex_elements = { <nl> + # The paper size ( ' letterpaper ' or ' a4paper ' ) . <nl> + # <nl> + # ' papersize ' : ' letterpaper ' , <nl> + # The font size ( ' 10pt ' , ' 11pt ' or ' 12pt ' ) . <nl> + # <nl> + # ' pointsize ' : ' 10pt ' , <nl> + # Additional stuff for the LaTeX preamble . <nl> + # <nl> + # ' preamble ' : ' ' , <nl> + # Latex figure ( float ) alignment <nl> + # <nl> + # ' figure_align ' : ' htbp ' , <nl> + } <nl> + <nl> + # Grouping the document tree into LaTeX files . List of tuples <nl> + # ( source start file , target name , title , <nl> + # author , documentclass [ howto , manual , or own class ] ) . <nl> + latex_documents = [ <nl> + ( <nl> + master_doc , <nl> + " ProtocolBuffers . tex " , <nl> + " Protocol Buffers Documentation " , <nl> + " Google LLC " , <nl> + " manual " , <nl> + ) <nl> + ] <nl> + <nl> + <nl> + # - - Options for manual page output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + # One entry per manual page . List of tuples <nl> + # ( source start file , name , description , authors , manual section ) . <nl> + man_pages = [ <nl> + ( <nl> + master_doc , # source start file <nl> + " protocolbuffers " , # name <nl> + " Protocol Buffers Documentation " , # description <nl> + [ author ] , # authors <nl> + 1 , # manual section <nl> + ) <nl> + ] <nl> + <nl> + <nl> + # - - Options for Texinfo output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + # Grouping the document tree into Texinfo files . List of tuples <nl> + # ( source start file , target name , title , author , <nl> + # dir menu entry , description , category ) <nl> + texinfo_documents = [ <nl> + ( <nl> + master_doc , <nl> + " ProtocolBuffers " , <nl> + u " Protocol Buffers Documentation " , <nl> + author , <nl> + " ProtocolBuffers " , <nl> + " One line description of project . " , <nl> + " Miscellaneous " , <nl> + ) <nl> + ] <nl> + <nl> + <nl> + # - - Options for Epub output mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + # Bibliographic Dublin Core info . <nl> + epub_title = project <nl> + <nl> + # The unique identifier of the text . This can be a ISBN number <nl> + # or the project homepage . <nl> + # <nl> + # epub_identifier = ' ' <nl> + <nl> + # A unique identification for the text . <nl> + # <nl> + # epub_uid = ' ' <nl> + <nl> + # A list of files that should not be packed into the epub file . <nl> + epub_exclude_files = [ " search . html " ] <nl> + <nl> + <nl> + # - - Extension configuration mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + # - - Options for autosummary extension mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + autosummary_generate = True <nl> + <nl> + # - - Options for intersphinx extension mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + # Example configuration for intersphinx : refer to the Python standard library . <nl> + intersphinx_mapping = { " https : / / docs . python . org / " : None } <nl> new file mode 100644 <nl> index 0000000000 . . 2c7a8d93ef <nl> mmm / dev / null <nl> ppp b / python / docs / environment . yml <nl> <nl> + name : protobuf <nl> + channels : <nl> + - conda - forge <nl> + - defaults <nl> + dependencies : <nl> + - libprotobuf = 3 . 11 . 2 <nl> + - make = 4 . 2 . 1 <nl> + - pip = 19 . 3 . 1 <nl> + - python = 3 . 7 . 6 <nl> + - sphinx = 2 . 3 . 1 <nl> + - sphinx_rtd_theme = 0 . 4 . 3 <nl> + - sphinxcontrib - napoleon = 0 . 7 <nl> new file mode 100755 <nl> index 0000000000 . . 9b76e0cac1 <nl> mmm / dev / null <nl> ppp b / python / docs / generate_docs . py <nl> <nl> + # ! / usr / bin / env python <nl> + # Protocol Buffers - Google ' s data interchange format <nl> + # Copyright 2008 Google Inc . All rights reserved . <nl> + # https : / / developers . google . com / protocol - buffers / <nl> + # <nl> + # Redistribution and use in source and binary forms , with or without <nl> + # modification , are permitted provided that the following conditions are <nl> + # met : <nl> + # <nl> + # * Redistributions of source code must retain the above copyright <nl> + # notice , this list of conditions and the following disclaimer . <nl> + # * Redistributions in binary form must reproduce the above <nl> + # copyright notice , this list of conditions and the following disclaimer <nl> + # in the documentation and / or other materials provided with the <nl> + # distribution . <nl> + # * Neither the name of Google Inc . nor the names of its <nl> + # contributors may be used to endorse or promote products derived from <nl> + # this software without specific prior written permission . <nl> + # <nl> + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + # " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + # LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + # A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + # SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + # LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + # DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + # THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + # ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + # OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + <nl> + " " " Script to generate a list of all modules to use in autosummary . <nl> + <nl> + This script creates a ReStructured Text file for each public module in the <nl> + protobuf Python package . The script also updates the table of contents in <nl> + ` ` docs / index . rst ` ` to point to these module references . <nl> + <nl> + To build the docs with Sphinx : <nl> + <nl> + 1 . Install the needed packages ( ` ` sphinx ` ` , ` ` sphinxcontrib - napoleon ` ` for <nl> + Google - style docstring support ) . I ' ve created a conda environment file to <nl> + make this easier : <nl> + <nl> + . . code : : bash <nl> + <nl> + conda env create - f python / docs / environment . yml <nl> + <nl> + 2 . ( Optional ) Generate reference docs files and regenerate index : <nl> + <nl> + . . code : : bash <nl> + <nl> + cd python / docs <nl> + python generate_docs . py <nl> + <nl> + 3 . Run Sphinx . <nl> + <nl> + . . code : : bash <nl> + <nl> + make html <nl> + " " " <nl> + <nl> + import pathlib <nl> + import re <nl> + <nl> + <nl> + DOCS_DIR = pathlib . Path ( __file__ ) . parent . resolve ( ) <nl> + PYTHON_DIR = DOCS_DIR . parent <nl> + SOURCE_DIR = PYTHON_DIR / " google " / " protobuf " <nl> + SOURCE_POSIX = SOURCE_DIR . as_posix ( ) <nl> + IGNORED_PACKAGES = ( <nl> + " compiler " , <nl> + " internal " , <nl> + " pyext " , <nl> + " util " , <nl> + ) <nl> + IGNORED_MODULES = ( <nl> + " any_test_pb2 " , <nl> + " api_pb2 " , <nl> + " unittest " , <nl> + " source_context_pb2 " , <nl> + " test_messages_proto3_pb2 " , <nl> + " test_messages_proto2 " , <nl> + ) <nl> + TOC_REGEX = re . compile ( <nl> + r " \ . \ . START REFTOC . * \ . \ . END REFTOC \ . \ n " , <nl> + flags = re . DOTALL , <nl> + ) <nl> + TOC_TEMPLATE = " " " . . START REFTOC , generated by generate_docs . py . <nl> + . . toctree : : <nl> + <nl> + { toctree } <nl> + <nl> + . . END REFTOC . <nl> + " " " <nl> + <nl> + AUTOMODULE_TEMPLATE = " " " . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + { module } <nl> + { underline } <nl> + <nl> + . . automodule : : { module } <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> + " " " <nl> + <nl> + <nl> + def find_modules ( ) : <nl> + modules = [ ] <nl> + for module_path in SOURCE_DIR . glob ( " * * / * . py " ) : <nl> + package_posix = module_path . parent . as_posix ( ) <nl> + if any ( ignored in package_posix for ignored in IGNORED_PACKAGES ) : <nl> + continue <nl> + if any ( ignored in module_path . stem for ignored in IGNORED_MODULES ) : <nl> + continue <nl> + <nl> + package_name = " google . protobuf { } " . format ( <nl> + package_posix [ len ( SOURCE_POSIX ) : ] . replace ( " / " , " . " ) <nl> + ) <nl> + if module_path . name = = " __init__ . py " : <nl> + modules . append ( package_name ) <nl> + else : <nl> + module_name = module_path . stem <nl> + modules . append ( " { } . { } " . format ( package_name , module_name ) ) <nl> + <nl> + return modules <nl> + <nl> + <nl> + def write_automodule ( module ) : <nl> + contents = AUTOMODULE_TEMPLATE . format ( module = module , underline = " = " * len ( module ) , ) <nl> + automodule_path = DOCS_DIR . joinpath ( * module . split ( " . " ) ) . with_suffix ( " . rst " ) <nl> + try : <nl> + automodule_path . parent . mkdir ( parents = True ) <nl> + except FileExistsError : <nl> + pass <nl> + with open ( automodule_path , " w " ) as automodule_file : <nl> + automodule_file . write ( contents ) <nl> + <nl> + <nl> + def replace_toc ( modules ) : <nl> + toctree = [ module . replace ( " . " , " / " ) for module in modules ] <nl> + with open ( DOCS_DIR / " index . rst " , " r " ) as index_file : <nl> + index_contents = index_file . read ( ) <nl> + toc = TOC_TEMPLATE . format ( <nl> + toctree = " \ n " . join ( toctree ) <nl> + ) <nl> + index_contents = re . sub ( TOC_REGEX , toc , index_contents ) <nl> + with open ( DOCS_DIR / " index . rst " , " w " ) as index_file : <nl> + index_file . write ( index_contents ) <nl> + <nl> + <nl> + def main ( ) : <nl> + modules = list ( sorted ( find_modules ( ) ) ) <nl> + for module in modules : <nl> + print ( " Generating reference for { } " . format ( module ) ) <nl> + write_automodule ( module ) <nl> + print ( " Generating index . rst " ) <nl> + replace_toc ( modules ) <nl> + <nl> + if __name__ = = " __main__ " : <nl> + main ( ) <nl> new file mode 100644 <nl> index 0000000000 . . 0982bf31a8 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf <nl> + = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 42198c8cf6 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / any_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . any_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . any_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 949b97f61a <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / descriptor . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . descriptor <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . descriptor <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 912439d1f7 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / descriptor_database . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . descriptor_database <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . descriptor_database <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 6b18b44bb9 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / descriptor_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . descriptor_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . descriptor_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 8b1ab9943a <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / descriptor_pool . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . descriptor_pool <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . descriptor_pool <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 7e39f9e94e <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / duration_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . duration_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . duration_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 39cbb6431e <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / empty_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . empty_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . empty_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 3a0c40cd3b <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / field_mask_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . field_mask_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . field_mask_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 8f436e154d <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / json_format . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . json_format <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . json_format <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 03d0dda86f <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / message . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . message <nl> + = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . message <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . b098a8d392 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / message_factory . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . message_factory <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . message_factory <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 5cc17409d3 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / proto_builder . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . proto_builder <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . proto_builder <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 0bee3e5f45 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / reflection . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . reflection <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . reflection <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 9b3a7b3267 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / service . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . service <nl> + = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . service <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 1fb688531e <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / service_reflection . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . service_reflection <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . service_reflection <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 617a580f78 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / struct_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . struct_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . struct_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 76bcf2f4e5 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / symbol_database . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . symbol_database <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . symbol_database <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 0e3a5d97ba <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / text_encoding . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . text_encoding <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . text_encoding <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 6b4c40a661 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / text_format . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . text_format <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . text_format <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 4160fe0731 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / timestamp_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . timestamp_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . timestamp_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 8f0fb1481c <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / type_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . type_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . type_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . 037af966f0 <nl> mmm / dev / null <nl> ppp b / python / docs / google / protobuf / wrappers_pb2 . rst <nl> <nl> + . . DO NOT EDIT , generated by generate_docs . py . <nl> + <nl> + google . protobuf . wrappers_pb2 <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + . . automodule : : google . protobuf . wrappers_pb2 <nl> + : members : <nl> + : inherited - members : <nl> + : undoc - members : <nl> new file mode 100644 <nl> index 0000000000 . . d5bb39e780 <nl> mmm / dev / null <nl> ppp b / python / docs / index . rst <nl> <nl> + . . Protocol Buffers documentation master file , created by <nl> + sphinx - quickstart on Thu Aug 15 13 : 56 : 43 2019 . <nl> + You can adapt this file completely to your liking , but it should at least <nl> + contain the root ` toctree ` directive . <nl> + <nl> + Protocol Buffers Python API Reference <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> + <nl> + The complete documentation for Protocol Buffers is available via the web at : <nl> + <nl> + https : / / developers . google . com / protocol - buffers / <nl> + <nl> + <nl> + Modules and Packages <nl> + mmmmmmmmmmmmmmmmmm - - <nl> + <nl> + . . START REFTOC , generated by generate_docs . py . <nl> + . . toctree : : <nl> + <nl> + google / protobuf <nl> + google / protobuf / any_pb2 <nl> + google / protobuf / descriptor <nl> + google / protobuf / descriptor_database <nl> + google / protobuf / descriptor_pb2 <nl> + google / protobuf / descriptor_pool <nl> + google / protobuf / duration_pb2 <nl> + google / protobuf / empty_pb2 <nl> + google / protobuf / field_mask_pb2 <nl> + google / protobuf / json_format <nl> + google / protobuf / message <nl> + google / protobuf / message_factory <nl> + google / protobuf / proto_builder <nl> + google / protobuf / reflection <nl> + google / protobuf / service <nl> + google / protobuf / service_reflection <nl> + google / protobuf / struct_pb2 <nl> + google / protobuf / symbol_database <nl> + google / protobuf / text_encoding <nl> + google / protobuf / text_format <nl> + google / protobuf / timestamp_pb2 <nl> + google / protobuf / type_pb2 <nl> + google / protobuf / wrappers_pb2 <nl> + <nl> + . . END REFTOC . <nl> + <nl> + Indices and tables <nl> + mmmmmmmmmmmmmmmmmm <nl> + <nl> + * : ref : ` genindex ` <nl> + * : ref : ` modindex ` <nl> new file mode 100644 <nl> index 0000000000 . . 27f573b87a <nl> mmm / dev / null <nl> ppp b / python / docs / make . bat <nl> <nl> + @ ECHO OFF <nl> + <nl> + pushd % ~ dp0 <nl> + <nl> + REM Command file for Sphinx documentation <nl> + <nl> + if " % SPHINXBUILD % " = = " " ( <nl> + set SPHINXBUILD = sphinx - build <nl> + ) <nl> + set SOURCEDIR = . <nl> + set BUILDDIR = _build <nl> + <nl> + if " % 1 " = = " " goto help <nl> + <nl> + % SPHINXBUILD % > NUL 2 > NUL <nl> + if errorlevel 9009 ( <nl> + echo . <nl> + echo . The ' sphinx - build ' command was not found . Make sure you have Sphinx <nl> + echo . installed , then set the SPHINXBUILD environment variable to point <nl> + echo . to the full path of the ' sphinx - build ' executable . Alternatively you <nl> + echo . may add the Sphinx directory to PATH . <nl> + echo . <nl> + echo . If you don ' t have Sphinx installed , grab it from <nl> + echo . http : / / sphinx - doc . org / <nl> + exit / b 1 <nl> + ) <nl> + <nl> + % SPHINXBUILD % - M % 1 % SOURCEDIR % % BUILDDIR % % SPHINXOPTS % <nl> + goto end <nl> + <nl> + : help <nl> + % SPHINXBUILD % - M help % SOURCEDIR % % BUILDDIR % % SPHINXOPTS % <nl> + <nl> + : end <nl> + popd <nl> new file mode 100644 <nl> index 0000000000 . . 2b3e98925e <nl> mmm / dev / null <nl> ppp b / python / docs / requirements . txt <nl> <nl> + sphinx = = 2 . 3 . 1 <nl> + sphinx_rtd_theme = = 0 . 4 . 3 <nl> + sphinxcontrib - napoleon = = 0 . 7 <nl> mmm a / tests . sh <nl> ppp b / tests . sh <nl> build_cpp_distcheck ( ) { <nl> # List all files that should be included in the distribution package . <nl> git ls - files | grep " ^ \ ( java \ | python \ | objectivec \ | csharp \ | js \ | ruby \ | php \ | cmake \ | examples \ | src / google / protobuf / . * \ . proto \ ) " | \ <nl> grep - v " . gitignore " | grep - v " java / compatibility_tests " | grep - v " java / lite / proguard . pgcfg " | \ <nl> - grep - v " python / compatibility_tests " | grep - v " csharp / compatibility_tests " > dist . lst <nl> + grep - v " python / compatibility_tests " | grep - v " python / docs " | grep - v " csharp / compatibility_tests " > dist . lst <nl> # Unzip the dist tar file . <nl> DIST = ` ls * . tar . gz ` <nl> tar - xf $ DIST <nl> | python : add sphinx docs ( ) | protocolbuffers/protobuf | 29c83baeccf4b5dc27308999e5cca65eb3774e86 | 2020-02-11T19:40:17Z |
mmm a / AUTHORS <nl> ppp b / AUTHORS <nl> Developers : <nl> kyokomi <nl> Fix the bug that UIButton doesn ' t support TTF font <nl> Fix a but of TextReader <nl> + <nl> + gin0606 <nl> + Add a new line at the end of a file <nl> <nl> Retired Core Developers : <nl> WenSheng Yang <nl> | [ ci skip ] | cocos2d/cocos2d-x | 2f959a3f87810b8513d3fb2c3fc2cfe2bcd6229e | 2014-06-19T12:48:45Z |
mmm a / hphp / runtime / vm / jit / memory - effects . cpp <nl> ppp b / hphp / runtime / vm / jit / memory - effects . cpp <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> return may_load_store ( AHeapAny , AEmpty / * Note * / ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / Instructions that allocate new objects , so any effects they have on some <nl> - / / types of memory locations we track are isolated from anything else we care <nl> - / / about . <nl> + / / Instructions that allocate new objects , without reading any other memory <nl> + / / at all , so any effects they have on some types of memory locations we <nl> + / / track are isolated from anything else we care about . <nl> <nl> case NewArray : <nl> case NewCol : <nl> case NewInstanceRaw : <nl> - case NewLikeArray : <nl> case NewMixedArray : <nl> case AllocPackedArray : <nl> case ConvBoolToArr : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ConvDblToArr : <nl> case ConvIntToArr : <nl> case ConvIntToStr : <nl> - case ConvResToStr : <nl> - case CreateSSWH : <nl> case Box : / / conditional allocation <nl> return IrrelevantEffects { } ; <nl> <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case Ceil : <nl> case Floor : <nl> case DefLabel : <nl> - case DecRefNZ : <nl> case CheckInit : <nl> case Nop : <nl> case ClsNeq : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Instructions that technically do some things w / memory , but not in any way <nl> - / / we currently care about . <nl> + / / we currently care about . They however don ' t return IrrelevantEffects <nl> + / / because we assume ( in refcount - opts ) that IrrelevantEffects instructions <nl> + / / can ' t even inspect Countable reference count fields , and several of these <nl> + / / can . All GeneralEffects instructions are assumed to possibly do so . <nl> <nl> - case CheckRefs : <nl> + case DecRefNZ : <nl> case AFWHBlockOn : <nl> + case IncRef : <nl> + case IncRefCtx : <nl> + case StClosureArg : <nl> + case StClosureCtx : <nl> + case StContArKey : <nl> + case StContArValue : <nl> + case StRetVal : <nl> + case ConcatIntStr : <nl> + case ConcatStr3 : <nl> + case ConcatStr4 : <nl> + case ConcatStrInt : <nl> + case ConcatStrStr : <nl> + case CoerceStrToDbl : <nl> + case CoerceStrToInt : <nl> + case ConvStrToInt : <nl> + case OrdStr : <nl> + case CreateSSWH : <nl> + case ConvResToStr : <nl> + case NewLikeArray : <nl> + case CheckRefs : <nl> case LdClsCctx : <nl> case BeginCatch : <nl> case CheckSurpriseFlags : <nl> case CheckType : <nl> case FreeActRec : <nl> - case IncRef : <nl> - case IncRefCtx : <nl> case LdRetAddr : <nl> case RegisterLiveObj : <nl> case RetAdjustStk : <nl> - case StClosureArg : <nl> - case StClosureCtx : <nl> case StClosureFunc : <nl> - case StContArKey : <nl> case StContArResume : <nl> case StContArState : <nl> - case StContArValue : <nl> - case StRetVal : <nl> case ZeroErrorLevel : <nl> case RestoreErrorLevel : <nl> case CheckCold : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ContArIncKey : <nl> case ContArUpdateIdx : <nl> case ContValid : <nl> - case ConcatIntStr : <nl> - case ConcatStr3 : <nl> - case ConcatStr4 : <nl> - case ConcatStrInt : <nl> - case ConcatStrStr : <nl> - case CoerceStrToDbl : <nl> - case CoerceStrToInt : <nl> - case ConvStrToInt : <nl> - case OrdStr : <nl> case IncProfCounter : <nl> case IncStat : <nl> case IncStatGrouped : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case LookupClsRDSHandle : <nl> case CoerceCellToDbl : <nl> case CoerceCellToInt : <nl> - return IrrelevantEffects { } ; <nl> + return may_load_store ( AEmpty , AEmpty ) ; <nl> <nl> / / Some that touch memory we might care about later , but currently don ' t : <nl> case CheckStaticLocInit : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case VectorHasImmCopy : <nl> case CheckPackedArrayBounds : <nl> case LdColArray : <nl> - return IrrelevantEffects { } ; <nl> + return may_load_store ( AEmpty , AEmpty ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Instructions that can re - enter the VM and touch most heap things . They <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> / / Could re - enter to run a destructor . <nl> return may_reenter ( may_load_store ( AEmpty , AEmpty ) ) ; <nl> } <nl> - return IrrelevantEffects { } ; <nl> + return may_load_store ( AEmpty , AEmpty ) ; <nl> <nl> case LdArrFPushCuf : / / autoloads <nl> case LdArrFuncCtx : / / autoloads <nl> | Make some instructions that can read memory use AEmpty GeneralEffects | facebook/hhvm | d12decbf7fea04274a1a5e2b8a5c30d7bcb95ae5 | 2015-03-10T20:00:52Z |
mmm a / addons / skin . confluence / language / Catalan / strings . xml <nl> ppp b / addons / skin . confluence / language / Catalan / strings . xml <nl> <nl> < string id = " 31352 " > Atura < / string > <nl> < string id = " 31353 " > Avança ràpid < / string > <nl> < string id = " 31354 " > Rebobina < / string > <nl> - < string id = " 31355 " > Menú de pel · lícules < / string > <nl> + < string id = " 31355 " > Menú de la pel · lícula < / string > <nl> < string id = " 31356 " > Descarrega els subtítols < / string > <nl> < string id = " 31357 " > < / string > <nl> <nl> mmm a / language / Catalan / strings . xml <nl> ppp b / language / Catalan / strings . xml <nl> <nl> < string id = " 420 " > HDMI < / string > <nl> <nl> < string id = " 422 " > Esborra la informació de l ' àlbum < / string > <nl> - < string id = " 423 " > Esborraa la informació del CD < / string > <nl> + < string id = " 423 " > Esborra la informació del CD < / string > <nl> < string id = " 424 " > Seleccioneu < / string > <nl> < string id = " 425 " > No s ' ha trobat la informació de l ' àlbum . < / string > <nl> < string id = " 426 " > No s ' ha trobat la informació del CD . < / string > <nl> <nl> < string id = " 492 " > Redimensiona els subtítols VOB < / string > <nl> < string id = " 493 " > Opcions avançades ( Només experts ! ) < / string > <nl> < string id = " 494 " > Overall audio headroom < / string > <nl> - < string id = " 495 " > Reescalar Videos a la resolució del entorn ( GUI ) < / string > <nl> + < string id = " 495 " > Reescala els vídeos a la resolució de l ' entorn ( GUI ) < / string > <nl> < string id = " 496 " > Calibratge < / string > <nl> - < string id = " 497 " > Mostra les extensions d ' arxiu < / string > <nl> + < string id = " 497 " > Mostra les extensions de fitxer < / string > <nl> < string id = " 498 " > Ordena per : Tipus < / string > <nl> < string id = " 499 " > No es pot connectar al servei de cerca en línia < / string > <nl> - < string id = " 500 " > Descarrega d ' informació d ' àlbum fallida < / string > <nl> + < string id = " 500 " > Descarrega de la informació de l ' àlbum fallida < / string > <nl> < string id = " 501 " > S ' està cercant el nom dels àlbums . . . < / string > <nl> < string id = " 502 " > Obre < / string > <nl> < string id = " 503 " > Ocupat < / string > <nl> <nl> < string id = " 607 " > A : < / string > <nl> < string id = " 608 " > No es pot extreure el CD o la pista < / string > <nl> < string id = " 609 " > CDDARipPath no està definit . < / string > <nl> - < string id = " 610 " > Extreu pista d ' àudio < / string > <nl> + < string id = " 610 " > Extreu la pista d ' àudio < / string > <nl> < string id = " 611 " > Introduïu el número < / string > <nl> < string id = " 612 " > Bits / Mostra < / string > <nl> < string id = " 613 " > Freqüència de mostreig < / string > <nl> <nl> < string id = " 641 " > Nivell del preamplificador - Replay gained files < / string > <nl> < string id = " 642 " > Nivell del preamplificador - Non replay gained files < / string > <nl> < string id = " 643 " > Avoid clipping on replay gained files < / string > <nl> - < string id = " 644 " > Crop black bars < / string > <nl> - < string id = " 645 " > Need to unpack a big file . Continue ? < / string > <nl> + < string id = " 644 " > Elimina les vores negres < / string > <nl> + < string id = " 645 " > Es necessita descomprimir un fitxer gran . Voleu continuar ? < / string > <nl> < string id = " 646 " > Elimina de la biblioteca < / string > <nl> < string id = " 647 " > Exporta la biblioteca de vídeo < / string > <nl> < string id = " 648 " > Importa la biblioteca de vídeo < / string > <nl> <nl> <nl> < string id = " 1260 " > Anuncia aquests serveis a altres sistemes a través de Zeroconf < / string > <nl> <nl> - < string id = " 1270 " > Allow XBMC to receive AirPlay content < / string > <nl> + < string id = " 1270 " > Permet que l ' XBMC rebi contingut de l ' AirPlay < / string > <nl> + < string id = " 1271 " > Nom del dispositiu < / string > <nl> <nl> < string id = " 1300 " > Dispositiu d ' àudio personalitzat < / string > <nl> < string id = " 1301 " > Custom passthrough device < / string > <nl> <nl> < string id = " 2100 " > L ' script ha fallat : % s < / string > <nl> < string id = " 2101 " > Es necessita un versió més recent - Veure el registre < / string > <nl> <nl> - < string id = " 4501 " > Activaa l ' LCD / VFD < / string > <nl> + < string id = " 4501 " > Activa l ' LCD / VFD < / string > <nl> <nl> < string id = " 10000 " > Inici < / string > <nl> < string id = " 10001 " > Programes < / string > <nl> <nl> < string id = " 13022 " > La extracció del dispositiu no és segura < / string > <nl> < string id = " 13023 " > El dispositiu s ' ha extret amb èxit < / string > <nl> < string id = " 13024 " > S ' ha connectat un joystick < / string > <nl> - < string id = " 13025 " > S ' ha desconnectat un joystick unplugged < / string > <nl> + < string id = " 13025 " > S ' ha desconnectat el joystick < / string > <nl> <nl> < string id = " 13050 " > La càrrega de la bateria és baixa < / string > <nl> <nl> <nl> < string id = " 13421 " > VDPAU < / string > <nl> < string id = " 13422 " > Comença la presentació aquí < / string > <nl> < string id = " 13423 " > Recorda aquest camí < / string > <nl> - < string id = " 13424 " > Use pixel buffer objects < / string > <nl> + < string id = " 13424 " > Utilitza objectes de memòria intermèdia de píxels < / string > <nl> < string id = " 13425 " > Permet l ' acceleració per maquinari ( VDPAU ) < / string > <nl> < string id = " 13426 " > Permet l ' acceleració per maquinari ( VAAPI ) < / string > <nl> < string id = " 13427 " > Permet l ' acceleració per maquinari ( DXVA2 ) < / string > <nl> <nl> < string id = " 13501 " > Rellotge d ' àudio < / string > <nl> < string id = " 13502 " > Rellotge del vídeo ( Drop / Dupe àudio ) < / string > <nl> < string id = " 13503 " > Rellotge del vídeo ( Remostreja l ' àudio ) < / string > <nl> - < string id = " 13504 " > Maximum resample amount ( % ) < / string > <nl> + < string id = " 13504 " > Quantitat màxima de remostreig ( % ) < / string > <nl> < string id = " 13505 " > Qualitat del remostreig < / string > <nl> < string id = " 13506 " > Baix ( ràpid ) < / string > <nl> < string id = " 13507 " > Mitja < / string > <nl> <nl> < string id = " 16315 " > Lanczos3 optimitzat < / string > <nl> < string id = " 16316 " > Auto < / string > <nl> < string id = " 16317 " > Temporal ( Half ) < / string > <nl> - < string id = " 16318 " > Temporal / Spatial ( Half ) < / string > <nl> + < string id = " 16318 " > Temporal / Espacial ( Half ) < / string > <nl> < string id = " 16319 " > DXVA < / string > <nl> + < string id = " 16320 " > DXVA Bob < / string > <nl> + < string id = " 16321 " > DXVA Best < / string > <nl> + < string id = " 16322 " > Spline36 < / string > <nl> + < string id = " 16323 " > Spline36 optimitzat < / string > <nl> <nl> < string id = " 16400 " > Postprocessament < / string > <nl> <nl> <nl> < string id = " 20258 " > Client de MythTV < / string > <nl> < string id = " 20259 " > Network Filesystem ( NFS ) < / string > <nl> < string id = " 20260 " > Secure Shell ( SSH / SFTP ) < / string > <nl> + < string id = " 20261 " > Apple File Protocol ( AFP ) < / string > <nl> <nl> < string id = " 20300 " > Directori del servidor web ( HTTP ) < / string > <nl> < string id = " 20301 " > Directori dels servidor web ( HTTPS ) < / string > <nl> <nl> < string id = " 20454 " > Oient < / string > <nl> < string id = " 20455 " > Oients < / string > <nl> < string id = " 20456 " > Set movieset fanart < / string > <nl> + < string id = " 20457 " > Movie set < / string > <nl> < ! - - up to 21329 is reserved for the video db ! ! ! - - > <nl> <nl> < string id = " 21330 " > Mostra els fitxers i directoris ocults < / string > <nl> | updated catalan translation | xbmc/xbmc | 7717148eab7d80d3da6548b63bd2f7974786326e | 2011-09-14T18:37:37Z |
mmm a / arangod / Utils / Transaction . h <nl> ppp b / arangod / Utils / Transaction . h <nl> <nl> <nl> # include " Cluster / ServerState . h " <nl> <nl> + # include " Utils / Exception . h " <nl> # include " VocBase / barrier . h " <nl> # include " VocBase / collection . h " <nl> # include " VocBase / document - collection . h " <nl> namespace triagens { <nl> return TRI_ERROR_OUT_OF_MEMORY ; <nl> } <nl> <nl> - int res = TRI_ReadShapedJsonDocumentCollection ( trxCollection , <nl> - ( TRI_voc_key_t ) key . c_str ( ) , <nl> - mptr , <nl> - ! isLocked ( trxCollection , TRI_TRANSACTION_READ ) ) ; <nl> - <nl> - return res ; <nl> + try { <nl> + return TRI_ReadShapedJsonDocumentCollection ( trxCollection , <nl> + ( TRI_voc_key_t ) key . c_str ( ) , <nl> + mptr , <nl> + ! isLocked ( trxCollection , TRI_TRANSACTION_READ ) ) ; <nl> + } <nl> + catch ( triagens : : arango : : Exception const & ex ) { <nl> + return ex . code ( ) ; <nl> + } <nl> + catch ( . . . ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> <nl> bool lock = ! isLocked ( trxCollection , TRI_TRANSACTION_WRITE ) ; <nl> <nl> - int res = TRI_InsertShapedJsonDocumentCollection ( trxCollection , <nl> - key , <nl> - rid , <nl> - mptr , <nl> - shaped , <nl> - static_cast < TRI_document_edge_t const * > ( data ) , <nl> - lock , <nl> - forceSync , <nl> - false ) ; <nl> - <nl> - return res ; <nl> + try { <nl> + return TRI_InsertShapedJsonDocumentCollection ( trxCollection , <nl> + key , <nl> + rid , <nl> + mptr , <nl> + shaped , <nl> + static_cast < TRI_document_edge_t const * > ( data ) , <nl> + lock , <nl> + forceSync , <nl> + false ) ; <nl> + } <nl> + catch ( triagens : : arango : : Exception const & ex ) { <nl> + return ex . code ( ) ; <nl> + } <nl> + catch ( . . . ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> policy , <nl> expectedRevision , <nl> actualRevision , <nl> - forceSync ) ; <nl> - <nl> + forceSync ) ; <nl> + <nl> TRI_FreeShapedJson ( zone , shaped ) ; <nl> - <nl> return res ; <nl> } <nl> <nl> namespace triagens { <nl> return TRI_ERROR_OUT_OF_MEMORY ; <nl> } <nl> <nl> - int res = TRI_UpdateShapedJsonDocumentCollection ( trxCollection , <nl> - ( const TRI_voc_key_t ) key . c_str ( ) , <nl> - rid , <nl> - mptr , <nl> - shaped , <nl> - & updatePolicy , <nl> - ! isLocked ( trxCollection , TRI_TRANSACTION_WRITE ) , <nl> - forceSync ) ; <nl> - <nl> - return res ; <nl> + try { <nl> + return TRI_UpdateShapedJsonDocumentCollection ( trxCollection , <nl> + ( const TRI_voc_key_t ) key . c_str ( ) , <nl> + rid , <nl> + mptr , <nl> + shaped , <nl> + & updatePolicy , <nl> + ! isLocked ( trxCollection , TRI_TRANSACTION_WRITE ) , <nl> + forceSync ) ; <nl> + } <nl> + catch ( triagens : : arango : : Exception const & ex ) { <nl> + return ex . code ( ) ; <nl> + } <nl> + catch ( . . . ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> bool forceSync ) { <nl> <nl> TRI_doc_update_policy_t updatePolicy ( policy , expectedRevision , actualRevision ) ; <nl> - <nl> - int res = TRI_RemoveShapedJsonDocumentCollection ( trxCollection , <nl> - ( TRI_voc_key_t ) key . c_str ( ) , <nl> - rid , <nl> - & updatePolicy , <nl> - ! isLocked ( trxCollection , TRI_TRANSACTION_WRITE ) , <nl> - forceSync ) ; <nl> - <nl> - return res ; <nl> + <nl> + try { <nl> + return TRI_RemoveShapedJsonDocumentCollection ( trxCollection , <nl> + ( TRI_voc_key_t ) key . c_str ( ) , <nl> + rid , <nl> + & updatePolicy , <nl> + ! isLocked ( trxCollection , TRI_TRANSACTION_WRITE ) , <nl> + forceSync ) ; <nl> + } <nl> + catch ( triagens : : arango : : Exception const & ex ) { <nl> + return ex . code ( ) ; <nl> + } <nl> + catch ( . . . ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> return res ; <nl> } <nl> <nl> - for ( auto it = ids . begin ( ) ; it ! = ids . end ( ) ; + + it ) { <nl> - res = TRI_RemoveShapedJsonDocumentCollection ( trxCollection , <nl> - ( TRI_voc_key_t ) ( * it ) . c_str ( ) , <nl> - 0 , <nl> - nullptr , / / policy <nl> - false , <nl> - forceSync ) ; <nl> + try { <nl> + for ( auto it = ids . begin ( ) ; it ! = ids . end ( ) ; + + it ) { <nl> + res = TRI_RemoveShapedJsonDocumentCollection ( trxCollection , <nl> + ( TRI_voc_key_t ) ( * it ) . c_str ( ) , <nl> + 0 , <nl> + nullptr , / / policy <nl> + false , <nl> + forceSync ) ; <nl> <nl> - if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> - / / halt on first error <nl> - break ; <nl> + if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> + / / halt on first error <nl> + break ; <nl> + } <nl> } <nl> } <nl> + catch ( triagens : : arango : : Exception const & ex ) { <nl> + res = ex . code ( ) ; <nl> + } <nl> + catch ( . . . ) { <nl> + res = TRI_ERROR_INTERNAL ; <nl> + } <nl> <nl> this - > unlock ( trxCollection , TRI_TRANSACTION_WRITE ) ; <nl> / / WRITE - LOCK END <nl> mmm a / arangod / VocBase / document - collection . cpp <nl> ppp b / arangod / VocBase / document - collection . cpp <nl> <nl> # include " Utils / transactions . h " <nl> # include " Utils / CollectionReadLocker . h " <nl> # include " Utils / CollectionWriteLocker . h " <nl> + # include " Utils / Exception . h " <nl> # include " VocBase / edge - collection . h " <nl> # include " VocBase / index . h " <nl> # include " VocBase / key - generator . h " <nl> static int InsertDocument ( TRI_transaction_collection_t * trxCollection , <nl> DeletePrimaryIndex ( document , header , true ) ; <nl> return res ; <nl> } <nl> + <nl> + document - > _numberDocuments + + ; <nl> <nl> operation . indexed ( ) ; <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoOperation " ) { <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoOperationExcept " ) { <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> res = TRI_AddOperationTransaction ( operation , syncRequested ) ; <nl> <nl> static int UpdateDocument ( TRI_transaction_collection_t * trxCollection , <nl> } <nl> <nl> operation . indexed ( ) ; <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoOperation " ) { <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoOperationExcept " ) { <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> res = TRI_AddOperationTransaction ( operation , syncRequested ) ; <nl> <nl> bool TRI_IsFullyCollectedDocumentCollection ( TRI_document_collection_t * document <nl> TRI_READ_LOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION ( document ) ; <nl> <nl> int64_t uncollected = document - > _uncollectedLogfileEntries ; <nl> - <nl> + <nl> TRI_READ_UNLOCK_DOCUMENTS_INDEXES_PRIMARY_COLLECTION ( document ) ; <nl> + <nl> return ( uncollected = = 0 ) ; <nl> } <nl> <nl> int TRI_ReadShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxColle <nl> mptr - > setDataPtr ( nullptr ) ; / / PROTECTED by trx in trxCollection <nl> <nl> { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " ReadDocumentNoLock " ) { <nl> + / / test what happens if no lock can be acquired <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " ReadDocumentNoLockExcept " ) { <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> + <nl> + <nl> TRI_document_collection_t * document = trxCollection - > _collection - > _collection ; <nl> triagens : : arango : : CollectionReadLocker collectionLocker ( document , lock ) ; <nl> <nl> int TRI_RemoveShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> TRI_ASSERT ( key ! = nullptr ) ; <nl> <nl> TRI_document_collection_t * document = trxCollection - > _collection - > _collection ; <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " RemoveDocumentNoMarker " ) { <nl> + / / test what happens when no marker can be created <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " RemoveDocumentNoMarkerExcept " ) { <nl> + / / test what happens if no marker can be created <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> triagens : : wal : : Marker * marker = new triagens : : wal : : RemoveMarker ( document - > _vocbase - > _id , <nl> document - > _info . _cid , <nl> int TRI_RemoveShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> TRI_doc_mptr_t * header ; <nl> int res ; <nl> { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " RemoveDocumentNoLock " ) { <nl> + / / test what happens if no lock can be acquired <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> triagens : : arango : : CollectionWriteLocker collectionLocker ( document , lock ) ; <nl> <nl> triagens : : wal : : DocumentOperation operation ( marker , trxCollection , TRI_VOC_DOCUMENT_OPERATION_REMOVE , rid ) ; <nl> int TRI_RemoveShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> } <nl> <nl> operation . indexed ( ) ; <nl> + <nl> + document - > _headersPtr - > unlink ( header ) ; / / PROTECTED by trx in trxCollection <nl> + document - > _numberDocuments - - ; <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " RemoveDocumentNoOperation " ) { <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " RemoveDocumentNoOperationExcept " ) { <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> res = TRI_AddOperationTransaction ( operation , forceSync ) ; <nl> } <nl> int TRI_InsertShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> / / construct a legend for the shaped json <nl> triagens : : basics : : JsonLegend legend ( document - > getShaper ( ) ) ; / / PROTECTED by trx in trxCollection <nl> <nl> - TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentInvalidShape " ) { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoLegend " ) { <nl> + / / test what happens when no legend can be created <nl> return TRI_ERROR_DEBUG ; <nl> } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoLegendExcept " ) { <nl> + / / test what happens if no legend can be created <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> int res = legend . addShape ( shaped - > _sid , & shaped - > _data ) ; <nl> <nl> int TRI_InsertShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> return res ; <nl> } <nl> <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoMarker " ) { <nl> + / / test what happens when no marker can be created <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoMarkerExcept " ) { <nl> + / / test what happens if no marker can be created <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> + <nl> triagens : : wal : : Marker * marker = nullptr ; <nl> <nl> if ( edge = = nullptr ) { <nl> int TRI_InsertShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> <nl> / / now insert into indexes <nl> { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoLock " ) { <nl> + / / test what happens if no lock can be acquired <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> triagens : : arango : : CollectionWriteLocker collectionLocker ( document , lock ) ; <nl> <nl> triagens : : wal : : DocumentOperation operation ( marker , trxCollection , TRI_VOC_DOCUMENT_OPERATION_INSERT , rid ) ; <nl> <nl> TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoHeader " ) { <nl> + / / test what happens if no header can be acquired <nl> return TRI_ERROR_DEBUG ; <nl> } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " InsertDocumentNoHeaderExcept " ) { <nl> + / / test what happens if no header can be acquired <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> / / create a new header <nl> TRI_doc_mptr_t * header = operation . header = document - > _headersPtr - > request ( marker - > size ( ) ) ; / / PROTECTED by trx in trxCollection <nl> int TRI_UpdateShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> <nl> TRI_document_collection_t * document = trxCollection - > _collection - > _collection ; <nl> / / TRI_ASSERT_EXPENSIVE ( lock | | TRI_IsLockedCollectionTransaction ( trxCollection , TRI_TRANSACTION_WRITE , 0 ) ) ; <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoLegend " ) { <nl> + / / test what happens when no legend can be created <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoLegendExcept " ) { <nl> + / / test what happens when no legend can be created <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> / / create legend <nl> triagens : : basics : : JsonLegend legend ( document - > getShaper ( ) ) ; / / PROTECTED by trx in trxCollection <nl> int TRI_UpdateShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> } <nl> <nl> { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoLock " ) { <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> triagens : : arango : : CollectionWriteLocker collectionLocker ( document , lock ) ; <nl> <nl> / / get the header pointer of the previous revision <nl> int TRI_UpdateShapedJsonDocumentCollection ( TRI_transaction_collection_t * trxCol <nl> if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> return res ; <nl> } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoMarker " ) { <nl> + / / test what happens when no marker can be created <nl> + return TRI_ERROR_DEBUG ; <nl> + } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " UpdateDocumentNoMarkerExcept " ) { <nl> + / / test what happens when no marker can be created <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> triagens : : wal : : Marker * marker = nullptr ; <nl> TRI_df_marker_t const * original = static_cast < TRI_df_marker_t const * > ( oldHeader - > getDataPtr ( ) ) ; / / PROTECTED by trx in trxCollection <nl> mmm a / arangod / VocBase / transaction . cpp <nl> ppp b / arangod / VocBase / transaction . cpp <nl> <nl> # include " BasicsC / logging . h " <nl> # include " BasicsC / tri - strings . h " <nl> <nl> + # include " Utils / Exception . h " <nl> # include " VocBase / collection . h " <nl> # include " VocBase / document - collection . h " <nl> # include " VocBase / replication - logger . h " <nl> int TRI_AddOperationTransaction ( triagens : : wal : : DocumentOperation & operation , <nl> trx - > _waitForSync = true ; <nl> } <nl> <nl> - TRI_DEBUG_INTENTIONAL_FAIL_IF ( " AddTransactionOperationNoSlot " ) { <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " TransactionOperationNoSlot " ) { <nl> return TRI_ERROR_DEBUG ; <nl> } <nl> + <nl> + TRI_DEBUG_INTENTIONAL_FAIL_IF ( " TransactionOperationNoSlotExcept " ) { <nl> + THROW_ARANGO_EXCEPTION ( TRI_ERROR_DEBUG ) ; <nl> + } <nl> <nl> triagens : : wal : : SlotInfoCopy slotInfo = triagens : : wal : : LogfileManager : : instance ( ) - > allocateAndWrite ( operation . marker - > mem ( ) , operation . marker - > size ( ) , waitForSync ) ; <nl> <nl> int TRI_AddOperationTransaction ( triagens : : wal : : DocumentOperation & operation , <nl> return slotInfo . errorCode ; <nl> } <nl> <nl> - <nl> - <nl> if ( operation . type = = TRI_VOC_DOCUMENT_OPERATION_INSERT | | <nl> operation . type = = TRI_VOC_DOCUMENT_OPERATION_UPDATE ) { <nl> / / adjust the data position in the header <nl> mmm a / arangod / Wal / CollectorThread . cpp <nl> ppp b / arangod / Wal / CollectorThread . cpp <nl> static bool ScanMarker ( TRI_df_marker_t const * marker , <nl> <nl> if ( state - > failedTransactions . find ( transactionId ) ! = state - > failedTransactions . end ( ) ) { <nl> / / transaction had failed <nl> + state - > operationsCount [ collectionId ] + + ; <nl> break ; <nl> } <nl> <nl> static bool ScanMarker ( TRI_df_marker_t const * marker , <nl> <nl> if ( state - > failedTransactions . find ( transactionId ) ! = state - > failedTransactions . end ( ) ) { <nl> / / transaction had failed <nl> + state - > operationsCount [ collectionId ] + + ; <nl> break ; <nl> } <nl> <nl> static bool ScanMarker ( TRI_df_marker_t const * marker , <nl> <nl> if ( state - > failedTransactions . find ( transactionId ) ! = state - > failedTransactions . end ( ) ) { <nl> / / transaction had failed <nl> + state - > operationsCount [ collectionId ] + + ; <nl> break ; <nl> } <nl> <nl> mmm a / arangod / Wal / DocumentOperation . h <nl> ppp b / arangod / Wal / DocumentOperation . h <nl> namespace triagens { <nl> <nl> TRI_document_collection_t * document = trxCollection - > _collection - > _collection ; <nl> <nl> - if ( type = = TRI_VOC_DOCUMENT_OPERATION_INSERT ) { <nl> - / / nothing special to do for insert <nl> - document - > _numberDocuments + + ; <nl> - } <nl> - else if ( type = = TRI_VOC_DOCUMENT_OPERATION_UPDATE ) { <nl> + if ( type = = TRI_VOC_DOCUMENT_OPERATION_UPDATE ) { <nl> / / move header to the end of the list <nl> document - > _headersPtr - > moveBack ( header , & oldHeader ) ; / / PROTECTED by trx in trxCollection <nl> } <nl> - else if ( type = = TRI_VOC_DOCUMENT_OPERATION_REMOVE ) { <nl> - / / unlink the header <nl> - document - > _headersPtr - > unlink ( header ) ; / / PROTECTED by trx in trxCollection <nl> - document - > _numberDocuments - - ; <nl> - } <nl> <nl> / / free the local marker buffer <nl> delete [ ] marker - > steal ( ) ; <nl> mmm a / arangod / Wal / LogfileManager . cpp <nl> ppp b / arangod / Wal / LogfileManager . cpp <nl> int LogfileManager : : readShutdownInfo ( ) { <nl> int LogfileManager : : writeShutdownInfo ( bool writeShutdownTime ) { <nl> std : : string const filename = shutdownFilename ( ) ; <nl> <nl> - std : : string content ; <nl> - content . append ( " { \ " tick \ " : \ " " ) ; <nl> - content . append ( basics : : StringUtils : : itoa ( TRI_CurrentTickServer ( ) ) ) ; <nl> - content . append ( " \ " , \ " lastCollected \ " : \ " " ) ; <nl> - content . append ( basics : : StringUtils : : itoa ( _lastCollectedId ) ) ; <nl> - content . append ( " \ " , \ " lastSealed \ " : \ " " ) ; <nl> - content . append ( basics : : StringUtils : : itoa ( _lastSealedId ) ) ; <nl> + TRI_json_t * json = TRI_CreateArrayJson ( TRI_UNKNOWN_MEM_ZONE ) ; <nl> <nl> - if ( writeShutdownTime ) { <nl> - content . append ( " \ " , \ " shutdownTime \ " : \ " " ) ; <nl> - content . append ( getTimeString ( ) ) ; <nl> + if ( json = = nullptr ) { <nl> + LOG_ERROR ( " unable to write WAL state file ' % s ' " , filename . c_str ( ) ) ; <nl> + return TRI_ERROR_OUT_OF_MEMORY ; <nl> } <nl> <nl> - content . append ( " \ " } \ n " ) ; <nl> + std : : string val ; <nl> + <nl> + val = basics : : StringUtils : : itoa ( TRI_CurrentTickServer ( ) ) ; <nl> + TRI_Insert3ArrayJson ( TRI_UNKNOWN_MEM_ZONE , json , " tick " , TRI_CreateString2CopyJson ( TRI_UNKNOWN_MEM_ZONE , val . c_str ( ) , val . size ( ) ) ) ; <nl> + <nl> + val = basics : : StringUtils : : itoa ( _lastCollectedId ) ; <nl> + TRI_Insert3ArrayJson ( TRI_UNKNOWN_MEM_ZONE , json , " lastCollected " , TRI_CreateString2CopyJson ( TRI_UNKNOWN_MEM_ZONE , val . c_str ( ) , val . size ( ) ) ) ; <nl> <nl> - try { <nl> - basics : : FileUtils : : spit ( filename , content ) ; <nl> + val = basics : : StringUtils : : itoa ( _lastSealedId ) ; <nl> + TRI_Insert3ArrayJson ( TRI_UNKNOWN_MEM_ZONE , json , " lastSealed " , TRI_CreateString2CopyJson ( TRI_UNKNOWN_MEM_ZONE , val . c_str ( ) , val . size ( ) ) ) ; <nl> + <nl> + if ( writeShutdownTime ) { <nl> + std : : string const t ( getTimeString ( ) ) ; <nl> + TRI_Insert3ArrayJson ( TRI_UNKNOWN_MEM_ZONE , json , " shutdownTime " , TRI_CreateString2CopyJson ( TRI_UNKNOWN_MEM_ZONE , t . c_str ( ) , t . size ( ) ) ) ; <nl> } <nl> - catch ( std : : exception & ex ) { <nl> + <nl> + if ( ! TRI_SaveJson ( filename . c_str ( ) , json , false ) ) { <nl> + TRI_FreeJson ( TRI_UNKNOWN_MEM_ZONE , json ) ; <nl> LOG_ERROR ( " unable to write WAL state file ' % s ' " , filename . c_str ( ) ) ; <nl> + <nl> return TRI_ERROR_CANNOT_WRITE_FILE ; <nl> } <nl> - <nl> + <nl> + TRI_FreeJson ( TRI_UNKNOWN_MEM_ZONE , json ) ; <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> <nl> mmm a / js / server / tests / shell - transactions - noncluster . js <nl> ppp b / js / server / tests / shell - transactions - noncluster . js <nl> var internal = require ( " internal " ) ; <nl> var arangodb = require ( " org / arangodb " ) ; <nl> var helper = require ( " org / arangodb / aql - helper " ) ; <nl> var db = arangodb . db ; <nl> - <nl> + var testHelper = require ( " org / arangodb / test - helper " ) . Helper ; <nl> <nl> var compareStringIds = function ( l , r ) { <nl> if ( l . length ! = r . length ) { <nl> function transactionCrossCollectionSuite ( ) { <nl> TRANSACTION ( obj ) ; <nl> assertEqual ( 0 , c1 . count ( ) ) ; <nl> assertEqual ( 0 , c2 . count ( ) ) ; <nl> - <nl> - c1 . unload ( ) ; <nl> - c2 . unload ( ) ; <nl> - internal . wait ( 4 ) ; <nl> + <nl> + testHelper . waitUnload ( c1 ) ; <nl> + testHelper . waitUnload ( c2 ) ; <nl> <nl> assertEqual ( 0 , c1 . count ( ) ) ; <nl> assertEqual ( 0 , c2 . count ( ) ) ; <nl> function transactionCrossCollectionSuite ( ) { <nl> assertEqual ( 9 , c1 . count ( ) ) ; <nl> assertEqual ( 9 , c2 . count ( ) ) ; <nl> <nl> - c1 . unload ( ) ; <nl> - c2 . unload ( ) ; <nl> - <nl> - internal . wait ( 4 ) ; <nl> + testHelper . waitUnload ( c1 ) ; <nl> + testHelper . waitUnload ( c2 ) ; <nl> <nl> assertEqual ( 9 , c1 . count ( ) ) ; <nl> assertEqual ( 9 , c2 . count ( ) ) ; <nl> function transactionCrossCollectionSuite ( ) { <nl> <nl> c1 . unload ( ) ; <nl> c2 . unload ( ) ; <nl> - <nl> - internal . wait ( 4 ) ; <nl> + testHelper . waitUnload ( c1 ) ; <nl> + testHelper . waitUnload ( c2 ) ; <nl> <nl> assertEqual ( 1 , c1 . count ( ) ) ; <nl> assertEqual ( 1 , c2 . count ( ) ) ; <nl> function transactionCrossCollectionSuite ( ) { <nl> / / / @ brief test suite <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - function transactionServerFailuresSuite ( ) { <nl> + function transactionConstraintsSuite ( ) { <nl> var cn = " UnitTestsTransaction " ; <nl> <nl> var c = null ; <nl> function transactionServerFailuresSuite ( ) { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> setUp : function ( ) { <nl> - internal . debugClearFailAt ( ) ; <nl> db . _drop ( cn ) ; <nl> } , <nl> <nl> function transactionServerFailuresSuite ( ) { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> tearDown : function ( ) { <nl> - internal . debugClearFailAt ( ) ; <nl> - <nl> if ( c ! = = null ) { <nl> c . drop ( ) ; <nl> } <nl> function transactionServerFailuresSuite ( ) { <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackInsertSingle1 : function ( ) { <nl> + testMultiHashConstraintInsert1 : function ( ) { <nl> c = db . _create ( cn ) ; <nl> + var idx1 = c . ensureUniqueConstraint ( " value1 " ) ; <nl> + var idx2 = c . ensureUniqueConstraint ( " value2 " ) ; <nl> + <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , value1 : i , value2 : i } ) ; <nl> + } <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> try { <nl> - c . save ( { _key : " foo " } ) ; <nl> + c . save ( { value1 : 9 , value2 : 17 } ) ; <nl> fail ( ) ; <nl> } <nl> catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + assertEqual ( internal . errors . ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value1 ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value2 ) ; <nl> + <nl> + var doc ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleHash ( idx1 . id , { value1 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> + } <nl> + <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleHash ( idx2 . id , { value2 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> } <nl> } , <nl> <nl> function transactionServerFailuresSuite ( ) { <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackInsertSingle2 : function ( ) { <nl> + testMultiHashConstraintInsert2 : function ( ) { <nl> c = db . _create ( cn ) ; <nl> + var idx1 = c . ensureUniqueConstraint ( " value1 " ) ; <nl> + var idx2 = c . ensureUniqueConstraint ( " value2 " ) ; <nl> + <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , value1 : i , value2 : i } ) ; <nl> + } <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> <nl> - c . save ( { _key : " foo " } ) ; <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> try { <nl> - c . save ( { _key : " foo2 " } ) ; <nl> + c . save ( { value1 : 17 , value2 : 9 } ) ; <nl> fail ( ) ; <nl> } <nl> catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + assertEqual ( internal . errors . ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value1 ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value2 ) ; <nl> + <nl> + var doc ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleHash ( idx1 . id , { value1 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> + } <nl> + <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleHash ( idx2 . id , { value2 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> } <nl> } , <nl> <nl> function transactionServerFailuresSuite ( ) { <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackInsertMulti1 : function ( ) { <nl> + testMultiSkipConstraintInsert1 : function ( ) { <nl> c = db . _create ( cn ) ; <nl> - c . save ( { _key : " baz " } ) ; <nl> + var idx1 = c . ensureUniqueSkiplist ( " value1 " ) ; <nl> + var idx2 = c . ensureUniqueSkiplist ( " value2 " ) ; <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - c . save ( { _key : " foo " } ) ; <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> - fail ( ) ; <nl> - } <nl> - } ; <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , value1 : i , value2 : i } ) ; <nl> + } <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> <nl> try { <nl> - TRANSACTION ( obj ) ; <nl> + c . save ( { value1 : 9 , value2 : 17 } ) ; <nl> fail ( ) ; <nl> } <nl> catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + assertEqual ( internal . errors . ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value1 ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value2 ) ; <nl> + <nl> + var doc ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleSkiplist ( idx1 . id , { value1 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> } <nl> <nl> - assertEqual ( 1 , c . count ( ) ) ; <nl> - assertEqual ( " baz " , c . document ( " baz " ) . _key ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleSkiplist ( idx2 . id , { value2 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> + } <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackInsertMulti2 : function ( ) { <nl> + testMultiSkipConstraintInsert2 : function ( ) { <nl> c = db . _create ( cn ) ; <nl> + var idx1 = c . ensureUniqueSkiplist ( " value1 " ) ; <nl> + var idx2 = c . ensureUniqueSkiplist ( " value2 " ) ; <nl> <nl> var i ; <nl> - for ( i = 0 ; i < 100 ; + + i ) { <nl> - c . save ( { _key : " key " + i } ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , value1 : i , value2 : i } ) ; <nl> } <nl> - <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - for ( i = 0 ; i < 100 ; + + i ) { <nl> - c . save ( { _key : " foo " + i } ) ; <nl> - } <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> - fail ( ) ; <nl> - } <nl> - } ; <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> <nl> try { <nl> - TRANSACTION ( obj ) ; <nl> + c . save ( { value1 : 17 , value2 : 9 } ) ; <nl> fail ( ) ; <nl> } <nl> catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + assertEqual ( internal . errors . ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value1 ) ; <nl> + assertEqual ( 9 , c . document ( " test9 " ) . value2 ) ; <nl> + <nl> + var doc ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleSkiplist ( idx1 . id , { value1 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> } <nl> <nl> - assertEqual ( 100 , c . count ( ) ) ; <nl> - assertEqual ( " key0 " , c . document ( " key0 " ) . _key ) ; <nl> - assertEqual ( " key99 " , c . document ( " key99 " ) . _key ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + doc = c . byExampleSkiplist ( idx2 . id , { value2 : i } ) . toArray ( ) ; <nl> + assertEqual ( 1 , doc . length ) ; <nl> + doc = doc [ 0 ] ; <nl> + assertEqual ( " test " + i , doc . _key ) ; <nl> + assertEqual ( i , doc . value1 ) ; <nl> + assertEqual ( i , doc . value2 ) ; <nl> + } <nl> + } <nl> + <nl> + } ; <nl> + } <nl> + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + / / - - SECTION - - test suite <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test suite <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + function transactionServerFailuresSuite ( ) { <nl> + var cn = " UnitTestsTransaction " ; <nl> + <nl> + var c = null ; <nl> + <nl> + return { <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief set up <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + setUp : function ( ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief test : rollback in case of a server - side fail <nl> + / / / @ brief tear down <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - testRollbackUpdateSingle1 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> <nl> - c . save ( { _key : " foo " , value : 1 } ) ; <nl> + tearDown : function ( ) { <nl> + internal . debugClearFailAt ( ) ; <nl> <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> - try { <nl> - c . update ( " foo " , { value : 2 } ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + if ( c ! = = null ) { <nl> + c . drop ( ) ; <nl> } <nl> <nl> - assertEqual ( 1 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( 1 , c . document ( " foo " ) . value ) ; <nl> + c = null ; <nl> + internal . wait ( 0 ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - testRollbackUpdateSingle2 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> <nl> - c . save ( { _key : " foo " , value : 1 } ) ; <nl> - c . save ( { _key : " bar " , value : " a " } ) ; <nl> + testReadServerFailures : function ( ) { <nl> + var failures = [ " ReadDocumentNoLock " , " ReadDocumentNoLockExcept " ] ; <nl> <nl> - c . update ( " foo " , { value : 2 } ) ; <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> - try { <nl> - c . update ( " bar " , { value : " b " } ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( 2 , c . document ( " foo " ) . value ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> - assertEqual ( " a " , c . document ( " bar " ) . value ) ; <nl> + internal . debugSetFailAt ( f ) ; <nl> + <nl> + try { <nl> + c . document ( " foo " ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackUpdateMulti1 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> - <nl> - c . save ( { _key : " foo " , value : 1 } ) ; <nl> - c . save ( { _key : " bar " , value : " a " } ) ; <nl> + testInsertServerFailuresEmpty : function ( ) { <nl> + var failures = [ " InsertDocumentNoLegend " , " InsertDocumentNoLegendExcept " , " InsertDocumentNoMarker " , " InsertDocumentNoMarkerExcept " , " InsertDocumentNoHeader " , " InsertDocumentNoHeaderExcept " , " InsertDocumentNoLock " , " InsertDocumentNoOperation " , " InsertDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - c . update ( " foo " , { value : 2 } ) ; <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . update ( " bar " , { value : " b " } ) ; <nl> - fail ( ) ; <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> + <nl> + try { <nl> + c . save ( { _key : " foo " , a : 1 } ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> } <nl> - } ; <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> + assertEqual ( 0 , c . count ( ) ) ; <nl> + } ) ; <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : rollback in case of a server - side fail <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testInsertServerFailuresNonEmpty : function ( ) { <nl> + var failures = [ " InsertDocumentNoLegend " , " InsertDocumentNoLegendExcept " , " InsertDocumentNoMarker " , " InsertDocumentNoMarkerExcept " , " InsertDocumentNoHeader " , " InsertDocumentNoHeaderExcept " , " InsertDocumentNoLock " , " InsertDocumentNoOperation " , " InsertDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> + <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + c . save ( { _key : " bar " , foo : " bar " } ) ; <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " bar " ) . foo ) ; <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( 1 , c . document ( " foo " ) . value ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> - assertEqual ( " a " , c . document ( " bar " ) . value ) ; <nl> + try { <nl> + c . save ( { _key : " foo " , a : 1 } ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " bar " ) . foo ) ; <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackUpdateMulti2 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> - <nl> - c . save ( { _key : " foo " , value : 1 } ) ; <nl> - c . save ( { _key : " bar " , value : " a " } ) ; <nl> + testInsertServerFailuresConstraint : function ( ) { <nl> + var failures = [ " InsertDocumentNoLegend " , " InsertDocumentNoLegendExcept " , " InsertDocumentNoMarker " , " InsertDocumentNoMarkerExcept " , " InsertDocumentNoHeader " , " InsertDocumentNoHeaderExcept " , " InsertDocumentNoLock " ] ; <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . update ( " foo " , { value : 2 } ) ; <nl> - fail ( ) ; <nl> - } <nl> - } ; <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + c . save ( { _key : " foo " , foo : " bar " } ) ; <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> + internal . debugSetFailAt ( f ) ; <nl> <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( 1 , c . document ( " foo " ) . value ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> - assertEqual ( " a " , c . document ( " bar " ) . value ) ; <nl> + try { <nl> + c . save ( { _key : " foo " , foo : " baz " } ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - testRollbackRemoveSingle1 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> <nl> - c . save ( { _key : " foo " } ) ; <nl> + testInsertServerFailuresMulti : function ( ) { <nl> + var failures = [ " InsertDocumentNoLegend " , " InsertDocumentNoLegendExcept " , " InsertDocumentNoMarker " , " InsertDocumentNoMarkerExcept " , " InsertDocumentNoHeader " , " InsertDocumentNoHeaderExcept " , " InsertDocumentNoLock " , " InsertDocumentNoOperation " , " InsertDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> - try { <nl> - c . remove ( " foo " ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> <nl> - assertEqual ( 1 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( var i = 0 ; i < 10 ; + + i ) { <nl> + if ( i = = 9 ) { <nl> + internal . debugSetFailAt ( f ) ; <nl> + } <nl> + c . save ( { _key : " test " + i , a : 1 } ) ; <nl> + } <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 0 , c . count ( ) ) ; <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - testRollbackRemoveSingle2 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> <nl> - c . save ( { _key : " foo " } ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> + testRemoveServerFailuresEmpty : function ( ) { <nl> + var failures = [ " RemoveDocumentNoMarker " , " RemoveDocumentNoMarkerExcept " , " RemoveDocumentNoLock " ] ; <nl> <nl> - internal . debugSetFailAt ( " TRI_WriteOperationDocumentCollection " ) ; <nl> - try { <nl> - c . remove ( " foo " ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> + internal . debugSetFailAt ( f ) ; <nl> + <nl> + try { <nl> + c . remove ( " foo " ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 0 , c . count ( ) ) ; <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackRemoveMulti1 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> - <nl> - c . save ( { _key : " foo " } ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> + testRemoveServerFailuresNonEmpty : function ( ) { <nl> + var failures = [ " RemoveDocumentNoMarker " , " RemoveDocumentNoMarkerExcept " , " RemoveDocumentNoLock " , " RemoveDocumentNoOperation " , " RemoveDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + c . save ( { _key : " foo " , foo : " bar " } ) ; <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> + <nl> + try { <nl> c . remove ( " foo " ) ; <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . remove ( " bar " ) ; <nl> - fail ( ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> } <nl> - } ; <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> - <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackRemoveMulti2 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> - <nl> - c . save ( { _key : " foo " } ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> + testRemoveServerFailuresMulti : function ( ) { <nl> + var failures = [ " RemoveDocumentNoMarker " , " RemoveDocumentNoMarkerExcept " , " RemoveDocumentNoLock " , " RemoveDocumentNoOperation " , " RemoveDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . remove ( " foo " ) ; <nl> - fail ( ) ; <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> } <nl> - } ; <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> - <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( var i = 0 ; i < 10 ; + + i ) { <nl> + if ( i = = 9 ) { <nl> + internal . debugSetFailAt ( f ) ; <nl> + } <nl> + c . remove ( " test " + i ) ; <nl> + } <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + assertEqual ( i , c . document ( " test " + i ) . a ) ; <nl> + } <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackRemoveMixed1 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> + testUpdateServerFailuresNonEmpty : function ( ) { <nl> + var failures = [ " UpdateDocumentNoLegend " , " UpdateDocumentNoLegendExcept " , " UpdateDocumentNoMarker " , " UpdateDocumentNoMarkerExcept " , " UpdateDocumentNoLock " , " UpdateDocumentNoOperation " , " UpdateDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> + <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + c . save ( { _key : " foo " , foo : " bar " } ) ; <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> <nl> - var i ; <nl> + try { <nl> + c . update ( " foo " , { bar : " baz " } ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> <nl> - for ( i = 0 ; i < 100 ; + + i ) { <nl> - c . save ( { _key : " key " + i , value : i } ) ; <nl> - } <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( " bar " , c . document ( " foo " ) . foo ) ; <nl> + assertEqual ( undefined , c . document ( " foo " ) . bar ) ; <nl> + } ) ; <nl> + } , <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : rollback in case of a server - side fail <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - for ( i = 0 ; i < 50 ; + + i ) { <nl> - c . remove ( " key " + i ) ; <nl> - } <nl> + testUpdateServerFailuresMulti : function ( ) { <nl> + var failures = [ " UpdateDocumentNoLegend " , " UpdateDocumentNoLegendExcept " , " UpdateDocumentNoMarker " , " UpdateDocumentNoMarkerExcept " , " UpdateDocumentNoLock " , " UpdateDocumentNoOperation " , " UpdateDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - for ( i = 50 ; i < 100 ; + + i ) { <nl> - c . update ( " key " + i , { value : i - 50 } ) ; <nl> - } <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> + } <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( var i = 0 ; i < 10 ; + + i ) { <nl> + if ( i = = 9 ) { <nl> + internal . debugSetFailAt ( f ) ; <nl> + } <nl> + c . update ( " test " + i , { a : i + 1 } ) ; <nl> + } <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . remove ( " key50 " ) ; <nl> - fail ( ) ; <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + assertEqual ( i , c . document ( " test " + i ) . a ) ; <nl> } <nl> - } ; <nl> + } ) ; <nl> + } , <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> - <nl> - assertEqual ( 100 , c . count ( ) ) ; <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : rollback in case of a server - side fail <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testUpdateServerFailuresMultiUpdate : function ( ) { <nl> + var failures = [ " UpdateDocumentNoLegend " , " UpdateDocumentNoLegendExcept " , " UpdateDocumentNoMarker " , " UpdateDocumentNoMarkerExcept " , " UpdateDocumentNoLock " , " UpdateDocumentNoOperation " , " UpdateDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> + <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> + } <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( var i = 0 ; i < 10 ; + + i ) { <nl> + if ( i = = 9 ) { <nl> + internal . debugSetFailAt ( f ) ; <nl> + } <nl> + / / double update <nl> + c . update ( " test " + i , { a : i + 1 } ) ; <nl> + c . update ( " test " + i , { a : i + 2 , b : 2 } ) ; <nl> + } <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + assertEqual ( i , c . document ( " test " + i ) . a ) ; <nl> + assertEqual ( undefined , c . document ( " test " + i ) . b ) ; <nl> + } <nl> + } ) ; <nl> } , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief test : rollback in case of a server - side fail <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - testRollbackRemoveMixed2 : function ( ) { <nl> - c = db . _create ( cn ) ; <nl> + testTruncateServerFailures : function ( ) { <nl> + var failures = [ " RemoveDocumentNoMarker " , " RemoveDocumentNoMarkerExcept " , " RemoveDocumentNoLock " , " RemoveDocumentNoOperation " , " RemoveDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> + <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + <nl> + var i ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> + } <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> <nl> - c . save ( { _key : " foo " } ) ; <nl> - c . save ( { _key : " bar " } ) ; <nl> + try { <nl> + c . truncate ( ) ; <nl> + fail ( ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> <nl> - var obj = { <nl> - collections : { <nl> - write : [ cn ] <nl> - } , <nl> - action : function ( ) { <nl> - var i ; <nl> + assertEqual ( 10 , c . count ( ) ) ; <nl> + for ( i = 0 ; i < 10 ; + + i ) { <nl> + assertEqual ( i , c . document ( " test " + i ) . a ) ; <nl> + } <nl> + } ) ; <nl> + } , <nl> <nl> - for ( i = 0 ; i < 10 ; + + i ) { <nl> - c . save ( { _key : " key " + i , value : i } ) ; <nl> - } <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : rollback in case of a server - side fail <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - for ( i = 0 ; i < 5 ; + + i ) { <nl> - c . remove ( " key " + i ) ; <nl> - } <nl> + testMixedServerFailures : function ( ) { <nl> + var failures = [ " UpdateDocumentNoLegend " , " UpdateDocumentNoLegendExcept " , " UpdateDocumentNoMarker " , " UpdateDocumentNoMarkerExcept " , " UpdateDocumentNoLock " , " UpdateDocumentNoOperation " , " UpdateDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " , " RemoveDocumentNoMarker " , " RemoveDocumentNoMarkerExcept " , " RemoveDocumentNoLock " , " RemoveDocumentNoOperation " , " RemoveDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " , " InsertDocumentNoLegend " , " InsertDocumentNoLegendExcept " , " InsertDocumentNoMarker " , " InsertDocumentNoMarkerExcept " , " InsertDocumentNoHeader " , " InsertDocumentNoHeaderExcept " , " InsertDocumentNoLock " , " InsertDocumentNoOperation " , " InsertDocumentNoOperationExcept " , " TransactionOperationNoSlot " , " TransactionOperationNoSlotExcept " ] ; <nl> <nl> - for ( i = 5 ; i < 10 ; + + i ) { <nl> - c . update ( " key " + i , { value : i - 5 } ) ; <nl> - } <nl> + failures . forEach ( function ( f ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + var i ; <nl> + for ( i = 0 ; i < 100 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> + } <nl> + assertEqual ( 100 , c . count ( ) ) ; <nl> + <nl> + internal . debugSetFailAt ( f ) ; <nl> <nl> - internal . debugSetFailAt ( " AddCollectionOperation - OOM " ) ; <nl> - c . remove ( " key5 " ) ; <nl> - fail ( ) ; <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( i = 100 ; i < 150 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : i } ) ; <nl> + } <nl> + assertEqual ( 150 , c . count ( ) ) ; <nl> + <nl> + for ( i = 0 ; i < 50 ; + + i ) { <nl> + c . remove ( " test " + i ) ; <nl> + } <nl> + assertEqual ( 100 , c . count ( ) ) ; <nl> + <nl> + for ( i = 50 ; i < 100 ; + + i ) { <nl> + c . update ( " test " + i , { a : i - 50 , b : " foo " } ) ; <nl> + } <nl> + assertEqual ( 100 , c . count ( ) ) ; <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> } <nl> - } ; <nl> <nl> - try { <nl> - TRANSACTION ( obj ) ; <nl> - fail ( ) ; <nl> - } <nl> - catch ( err ) { <nl> - assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> - } <nl> - <nl> - assertEqual ( 2 , c . count ( ) ) ; <nl> - assertEqual ( " foo " , c . document ( " foo " ) . _key ) ; <nl> - assertEqual ( " bar " , c . document ( " bar " ) . _key ) ; <nl> + assertEqual ( 100 , c . count ( ) ) ; <nl> + for ( i = 0 ; i < 100 ; + + i ) { <nl> + assertEqual ( i , c . document ( " test " + i ) . a ) ; <nl> + assertEqual ( undefined , c . document ( " test " + i ) . b ) ; <nl> + } <nl> + } ) ; <nl> } <nl> <nl> } ; <nl> jsunity . run ( transactionGraphSuite ) ; <nl> jsunity . run ( transactionRollbackSuite ) ; <nl> jsunity . run ( transactionCountSuite ) ; <nl> jsunity . run ( transactionCrossCollectionSuite ) ; <nl> + jsunity . run ( transactionConstraintsSuite ) ; <nl> <nl> / / only run this test suite if server - side failures are enabled <nl> if ( internal . debugCanUseFailAt ( ) ) { <nl> - / / jsunity . run ( transactionServerFailuresSuite ) ; <nl> + jsunity . run ( transactionServerFailuresSuite ) ; <nl> } <nl> <nl> return jsunity . done ( ) ; <nl> + / * <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test : rollback in case of a server - side fail <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + testInsertServerFailuresMultiConstraint : function ( ) { <nl> + internal . debugClearFailAt ( ) ; <nl> + db . _drop ( cn ) ; <nl> + c = db . _create ( cn ) ; <nl> + c . save ( { _key : " test9 " , a : 1 } ) ; <nl> + <nl> + try { <nl> + TRANSACTION ( { <nl> + collections : { <nl> + write : [ cn ] , <nl> + } , <nl> + action : function ( ) { <nl> + for ( var i = 0 ; i < 10 ; + + i ) { <nl> + c . save ( { _key : " test " + i , a : 10 } ) ; <nl> + } <nl> + } <nl> + } ) ; <nl> + } <nl> + catch ( err ) { <nl> + assertEqual ( internal . errors . ERROR_DEBUG . code , err . errorNum ) ; <nl> + } <nl> + <nl> + assertEqual ( 1 , c . count ( ) ) ; <nl> + assertEqual ( 1 , c . document ( " test9 " ) . a ) ; <nl> + } , <nl> + * / <nl> <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> / / - - SECTION - - END - OF - FILE <nl> mmm a / lib / BasicsC / json . c <nl> ppp b / lib / BasicsC / json . c <nl> bool TRI_PrintJson ( int fd , TRI_json_t const * object ) { <nl> <nl> bool TRI_SaveJson ( char const * filename , <nl> TRI_json_t const * object , <nl> - const bool syncFile ) { <nl> + bool syncFile ) { <nl> char * tmp ; <nl> int fd ; <nl> int res ; <nl> mmm a / lib / BasicsC / json . h <nl> ppp b / lib / BasicsC / json . h <nl> bool TRI_PrintJson ( int fd , TRI_json_t const * ) ; <nl> / / / @ brief saves a json object <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - bool TRI_SaveJson ( char const * , TRI_json_t const * , const bool ) ; <nl> + bool TRI_SaveJson ( char const * , TRI_json_t const * , bool ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief copies a json object into a given buffer <nl> | added many tests | arangodb/arangodb | 2a0e0b18d089b9b1ff32b99889fe9b1d1c3de2fc | 2014-06-16T12:39:26Z |
mmm a / src / mongo / db / auth / address_restriction . h <nl> ppp b / src / mongo / db / auth / address_restriction . h <nl> class AddressRestriction : public NamedRestriction { <nl> } <nl> } <nl> <nl> - / * * <nl> - * If the given BSONElement represents a valid CIDR range , constructs and returns the <nl> - * AddressRestriction . Otherwise returns an error . <nl> - * / <nl> - static StatusWith < AddressRestriction < T > > parse ( BSONElement from ) { <nl> - auto cidr = CIDR : : parse ( from ) ; <nl> - if ( ! cidr . isOK ( ) ) { <nl> - return cidr . getStatus ( ) ; <nl> - } <nl> - return AddressRestriction < T > ( std : : move ( cidr . getValue ( ) ) ) ; <nl> - } <nl> - <nl> - / * * <nl> - * If the given string represents a valid CIDR range , constructs and returns the <nl> - * AddressRestriction . Otherwise returns an error . <nl> - * / <nl> - static StatusWith < AddressRestriction < T > > parse ( StringData from ) { <nl> - auto cidr = CIDR : : parse ( from ) ; <nl> - if ( ! cidr . isOK ( ) ) { <nl> - return cidr . getStatus ( ) ; <nl> - } <nl> - return AddressRestriction < T > ( std : : move ( cidr . getValue ( ) ) ) ; <nl> - } <nl> - <nl> / * * <nl> * Returns true if the Environment ' s client / server ' s address <nl> * satisfies this restriction set . <nl> | SERVER - 30209 Remove ` AddressRestriction < T > : : parse ` | mongodb/mongo | fcdd2fc9e86f39938fe10e4fd340fe1ba9989f55 | 2017-07-27T16:03:43Z |
mmm a / . travis . yml <nl> ppp b / . travis . yml <nl> <nl> sudo : false <nl> # Enabling test on Linux and OS X <nl> os : <nl> + - linux <nl> - osx <nl> <nl> # Use Build Matrix to do lint and build seperately <nl> mmm a / scripts / travis_osx_install . sh <nl> ppp b / scripts / travis_osx_install . sh <nl> conda update - q conda <nl> # Useful for debugging any issues with conda <nl> conda info - a <nl> <nl> - if [ $ { TASK } = = " python - package3 " ] ; then <nl> + if [ $ { TASK } = = " package3 " ] ; then <nl> conda create - n myenv python = 3 . 4 <nl> - alias python3 = python <nl> else <nl> conda create - n myenv python = 2 . 7 <nl> fi <nl> source activate myenv <nl> conda install numpy scipy matplotlib nose <nl> - python - m pip install graphviz <nl> + python - m pip install graphviz <nl> \ No newline at end of file <nl> mmm a / scripts / travis_script . sh <nl> ppp b / scripts / travis_script . sh <nl> fi <nl> # prereqs for things that need make <nl> cp make / config . mk config . mk <nl> <nl> + export NOSE3 = nosetests3 <nl> + export PYTHON3 = python3 <nl> if [ $ { TRAVIS_OS_NAME } = = " osx " ] ; then <nl> source scripts / travis_osx_install . sh <nl> echo " USE_BLAS = apple " > > config . mk <nl> echo " USE_OPENMP = 0 " > > config . mk <nl> - alias nosetests = ' python - m noise ' <nl> - alias nosetests3 = ' python - m noise ' <nl> + alias nosetests = ' python - m nose ' <nl> + export NOSE3 = ' python - m nose ' <nl> + export PYTHON3 = python <nl> else <nl> echo " USE_BLAS = blas " > > config . mk <nl> echo " USE_CUDNN = 0 " > > config . mk <nl> fi <nl> if [ $ { TASK } = = " python " ] ; then <nl> echo " USE_CUDA = 0 " > > config . mk <nl> make all | | exit - 1 <nl> + python - - version <nl> export MXNET_ENGINE_TYPE = ThreadedEngine <nl> nosetests tests / python / unittest | | exit - 1 <nl> nosetests tests / python / train | | exit - 1 <nl> if [ $ { TASK } = = " python3 " ] ; then <nl> echo " USE_CUDA = 0 " > > config . mk <nl> make all | | exit - 1 <nl> export MXNET_ENGINE_TYPE = ThreadedEngine <nl> - nosetests3 tests / python / unittest | | exit - 1 <nl> - nosetests3 tests / python / train | | exit - 1 <nl> + $ { PYTHON3 } - - version <nl> + $ { NOSE3 } tests / python / unittest | | exit - 1 <nl> + $ { NOSE3 } tests / python / train | | exit - 1 <nl> fi <nl> <nl> if [ $ { TASK } = = " python_naive " ] ; then <nl> echo " USE_CUDA = 0 " > > config . mk <nl> make all | | exit - 1 <nl> export MXNET_ENGINE_TYPE = NaiveEngine <nl> + python - - version <nl> nosetests tests / python / unittest | | exit - 1 <nl> nosetests tests / python / train | | exit - 1 <nl> fi <nl> if [ $ { TASK } = = " python_perdev " ] ; then <nl> echo " USE_CUDA = 0 " > > config . mk <nl> make all | | exit - 1 <nl> export MXNET_ENGINE_TYPE = ThreadedEnginePerDevice <nl> + python - - version <nl> nosetests tests / python / unittest | | exit - 1 <nl> nosetests tests / python / train | | exit - 1 <nl> fi <nl> mmm a / src / io / image_augmenter . h <nl> ppp b / src / io / image_augmenter . h <nl> struct ImageAugmentParam : public dmlc : : Parameter < ImageAugmentParam > { <nl> class ImageAugmenter { <nl> public : <nl> / / contructor <nl> - ImageAugmenter ( void ) <nl> - : tmpres_ ( false ) { <nl> + ImageAugmenter ( void ) { <nl> # if MXNET_USE_OPENCV <nl> rotateM_ = cv : : Mat ( 2 , 3 , CV_32F ) ; <nl> # endif <nl> class ImageAugmenter { <nl> # endif <nl> <nl> private : <nl> - / / temp input space <nl> - mshadow : : TensorContainer < cpu , 3 > tmpres_ ; <nl> - / / mean image <nl> - mshadow : : TensorContainer < cpu , 3 > meanimg_ ; <nl> - / * ! \ brief temp space * / <nl> - mshadow : : TensorContainer < cpu , 3 > img_ ; <nl> # if MXNET_USE_OPENCV <nl> / / temporal space <nl> cv : : Mat temp_ ; <nl> / / rotation param <nl> cv : : Mat rotateM_ ; <nl> - / / whether the mean file is ready <nl> # endif <nl> - bool meanfile_ready_ ; <nl> / / parameters <nl> ImageAugmentParam param_ ; <nl> / * ! \ brief list of possible rotate angle * / <nl> | [ OSX ] Make OSX Travis work | apache/incubator-mxnet | f92c496d23c73a33cc537b876b9f42aa49d32056 | 2015-09-27T04:24:24Z |
mmm a / editor / filesystem_dock . cpp <nl> ppp b / editor / filesystem_dock . cpp <nl> void FileSystemDock : : _notification ( int p_what ) { <nl> <nl> if ( low_height_mode ) { <nl> <nl> - file_list_vb - > hide ( ) ; <nl> + tree - > hide ( ) ; <nl> tree - > set_v_size_flags ( SIZE_EXPAND_FILL ) ; <nl> button_tree - > show ( ) ; <nl> } else { <nl> void FileSystemDock : : _notification ( int p_what ) { <nl> button_favorite - > show ( ) ; <nl> _update_tree ( true ) ; <nl> } <nl> + tree - > ensure_cursor_is_visible ( ) ; <nl> <nl> if ( ! file_list_vb - > is_visible ( ) ) { <nl> file_list_vb - > show ( ) ; <nl> void FileSystemDock : : navigate_to_path ( const String & p_path ) { <nl> _update_tree ( true ) ; <nl> _update_files ( false ) ; <nl> } else { <nl> - if ( file_name . empty ( ) ) { <nl> - _go_to_tree ( ) ; <nl> - } else { <nl> - _go_to_file_list ( ) ; <nl> - } <nl> + _go_to_file_list ( ) ; <nl> } <nl> <nl> if ( ! file_name . empty ( ) ) { <nl> | Merge pull request from volzhs / fix - filesystem - dock | godotengine/godot | d510c33cd84a8cfc190cb1dce4e19f033704650e | 2017-12-21T21:25:04Z |
mmm a / . travis . yml <nl> ppp b / . travis . yml <nl> env : <nl> - TASK = cpp_test <nl> # run tests / python <nl> - TASK = python_test <nl> - # - TASK = r_test <nl> - - TASK = installation_packaged_test <nl> - - TASK = installation_source_test <nl> + - TASK = r_test <nl> # - TASK = julia JULIA_VER = 0 . 4 <nl> # - TASK = scala_test <nl> <nl> addons : <nl> before_install : <nl> - export NVCC_PREFIX = $ { HOME } <nl> - source dmlc - core / scripts / travis / travis_setup_env . sh <nl> + - export PYTHONPATH = $ { PYTHONPATH } : $ { PWD } / python <nl> - export MAVEN_SKIP_RC = true <nl> - export MAVEN_OPTS = " - Xmx512m - XX : MaxPermSize = 256m - XX : - UseGCOverheadLimit - XX : + CMSClassUnloadingEnabled - XX : + UseConcMarkSweepGC " <nl> <nl> mmm a / docs / get_started / install . md <nl> ppp b / docs / get_started / install . md <nl> If not already installed , [ download and install Xcode ] ( https : / / developer . apple . c <nl> < br / > <nl> <nl> * * Step 1 * * Install prerequisites - Homebrew , python development tools . <nl> - < ! - - Pipe to / dev / null in case Homebrew or package was already installed to prevent failure - - > <nl> + <nl> ` ` ` bash <nl> # Install Homebrew <nl> - $ / usr / bin / ruby - e " $ ( curl - fsSL https : / / raw . githubusercontent . com / Homebrew / install / master / install ) " < / dev / null <nl> + $ / usr / bin / ruby - e " $ ( curl - fsSL https : / / raw . githubusercontent . com / Homebrew / install / master / install ) " <nl> $ export PATH = / usr / local / bin : / usr / local / sbin : $ PATH <nl> <nl> # Install python development tools - python2 . 7 , pip , python - setuptools <nl> - $ brew list python & > / dev / null | | brew install python <nl> + $ brew install python <nl> ` ` ` <nl> <nl> * * Step 2 * * Install virtualenv for macOS . <nl> Install * MXNet * with OpenBLAS acceleration . <nl> < br / > <nl> <nl> * * Step 1 * * Install prerequisites - Homebrew , python development tools . <nl> - < ! - - Pipe to / dev / null in case Homebrew or package was already installed to prevent failure - - > <nl> + <nl> ` ` ` bash <nl> # Install Homebrew <nl> - $ / usr / bin / ruby - e " $ ( curl - fsSL https : / / raw . githubusercontent . com / Homebrew / install / master / install ) " < / dev / null <nl> + $ / usr / bin / ruby - e " $ ( curl - fsSL https : / / raw . githubusercontent . com / Homebrew / install / master / install ) " <nl> $ export PATH = / usr / local / bin : / usr / local / sbin : $ PATH <nl> <nl> # Install python development tools - python2 . 7 , pip , python - setuptools <nl> - $ brew list python & > / dev / null | | brew install python <nl> + $ brew install python <nl> ` ` ` <nl> <nl> * * Step 2 * * Install MXNet with OpenBLAS acceleration . <nl> mmm a / tests / jenkins / run_test_installation_docs . sh <nl> ppp b / tests / jenkins / run_test_installation_docs . sh <nl> function retrieve_closest_index ( ) { <nl> cur_num = $ { arr [ $ { i } ] } <nl> if [ [ $ { cur_num } - eq $ { number } | | $ { cur_num } - gt $ { number } ] ] <nl> then <nl> - echo " $ { i } " <nl> + echo $ { i } <nl> return <nl> fi <nl> done <nl> function retrieve_commands ( ) { <nl> fi <nl> done <nl> done <nl> - echo " $ { commands } " <nl> + echo $ { commands } <nl> } <nl> <nl> # Sorts array of numbers . <nl> function retrieve_commands ( ) { <nl> function sort ( ) { <nl> declare - a lineno_array = ( " $ { ! 1 } " ) <nl> size = $ { # lineno_array [ @ ] } <nl> - for ( ( i = 1 ; i < size ; i + + ) ) <nl> + for ( ( i = 1 ; i < = $ ( ( $ size - 1 ) ) ; i + + ) ) <nl> do <nl> - temp = $ { lineno_array [ i ] } <nl> - j = $ ( ( i - 1 ) ) <nl> - while [ $ temp - lt $ { lineno_array [ j ] } ] <nl> - do <nl> - lineno_array [ j + 1 ] = $ { lineno_array [ j ] } <nl> - j = $ ( ( $ j - 1 ) ) <nl> - if [ $ j = = - 1 ] <nl> - then <nl> - break <nl> - fi <nl> - done <nl> - lineno_array [ j + 1 ] = $ temp <nl> + j = $ i <nl> + while ( ( $ { j } > 0 & & $ { lineno_array [ $ j - 1 ] } > $ { lineno_array [ $ j ] } ) ) ; do <nl> + x = $ { lineno_array [ $ j - 1 ] } <nl> + lineno_array [ $ j - 1 ] = $ { lineno_array [ $ j ] } <nl> + lineno_array [ $ j ] = $ x <nl> + j = $ j - 1 <nl> + done <nl> done <nl> printf " $ { lineno_array [ * ] } " <nl> } <nl> <nl> - if ( ( $ # < 2 ) ) ; then <nl> + if ( ( $ # < 1 ) ) ; then <nl> echo " " <nl> - echo " Usage : $ ( basename $ 0 ) FILE ENV " <nl> + echo " Usage : $ ( basename $ 0 ) FILE " <nl> echo " " <nl> exit 1 <nl> fi <nl> FILE = $ { 1 } <nl> - TASK = $ { 2 } <nl> <nl> # get all line numbers with " ` ` ` " signifying start or end of source section and put them in an array <nl> SOURCE_REGEX = " \ ` \ ` \ ` " <nl> PIP_LINENO_ALL = ( $ ( grep - n " < div class = \ " pip \ " > " " $ { FILE } " | cut - d : - f 1 ) ) <nl> DOCKER_LINENO_ALL = ( $ ( grep - n " < div class = \ " docker \ " > " " $ { FILE } " | cut - d : - f 1 ) ) <nl> BUILDFROMSOURCE_LINENO_ALL = ( $ ( grep - n " < div class = \ " build - from - source \ " > " " $ { FILE } " | cut - d : - f 1 ) ) <nl> <nl> - # validation instructions <nl> - PYTHON_GPU_VALIDATION = " import mxnet as mx ; a = mx . nd . ones ( ( 2 , 3 ) , mx . gpu ( ) ) ; b = a * 2 + 1 ; b . asnumpy ( ) " <nl> - PYTHON_CPU_VALIDATION = " import mxnet as mx ; a = mx . nd . ones ( ( 2 , 3 ) ) ; b = a * 2 + 1 ; b . asnumpy ( ) " <nl> - <nl> # Given two line numbers , collects instruction sets for installing via Virtualenv , Pip , Docker , and source within the <nl> # two lines assuming there is one of each . <nl> # <nl> function set_instruction_set ( ) { <nl> $ { sorted_indexes [ $ end_buildfromsource_command_index ] } ) <nl> } <nl> <nl> - if [ [ " $ { TASK } " = = " linux " ] ] <nl> - then <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # LINUX - PYTHON - CPU # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - echo <nl> - echo <nl> - echo " # # # Testing LINUX - PYTHON - CPU # # # " <nl> - echo <nl> - # range of all lines inside Linux - Python - CPU instructions <nl> - LINUX_PYTHON_CPU_START_LINENO = $ ( grep - n " START - Linux Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - LINUX_PYTHON_CPU_END_LINENO = $ ( grep - n " END - Linux Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - <nl> - set_instruction_set $ { LINUX_PYTHON_CPU_START_LINENO } $ { LINUX_PYTHON_CPU_END_LINENO } <nl> - <nl> - virtualenv_commands = " $ { virtualenv_commands } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Virtualenv # # # " <nl> - echo " $ { virtualenv_commands } " <nl> - echo <nl> - docker run - - rm ubuntu : 14 . 04 bash - c " $ { virtualenv_commands } " <nl> - <nl> - pip_commands = " $ { pip_commands } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Pip # # # " <nl> - echo " $ { pip_commands } " <nl> - echo <nl> - docker run - - rm ubuntu : 14 . 04 bash - c " $ { pip_commands } " <nl> - <nl> - docker_img = $ ( echo " $ docker_commands " | sed ' s / . * docker pull \ ( . * \ ) / \ 1 / ' | sed ' s / ; . * / / ' ) <nl> - docker_commands = " $ { docker_commands } docker run $ { docker_img } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Docker # # # " <nl> - echo " $ { docker_commands } " <nl> - echo <nl> - eval " $ { docker_commands } " <nl> - <nl> - buildfromsource_commands = " $ { buildfromsource_commands } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Build From Source # # # " <nl> - echo " $ { buildfromsource_commands } " <nl> - echo <nl> - docker run - - rm ubuntu : 14 . 04 bash - c " $ { buildfromsource_commands } " <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # LINUX - PYTHON - GPU # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - <nl> - echo <nl> - echo <nl> - echo " # # # Testing LINUX - PYTHON - GPU # # # " <nl> - echo <nl> - # range of all lines inside Linux - Python - GPU instructions <nl> - LINUX_PYTHON_GPU_START_LINENO = $ ( grep - n " START - Linux Python GPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - LINUX_PYTHON_GPU_END_LINENO = $ ( grep - n " END - Linux Python GPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - <nl> - set_instruction_set $ { LINUX_PYTHON_GPU_START_LINENO } $ { LINUX_PYTHON_GPU_END_LINENO } <nl> - <nl> - virtualenv_commands = " $ { virtualenv_commands } python - c \ " $ { PYTHON_GPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Virtualenv # # # " <nl> - echo " $ { virtualenv_commands } " <nl> - echo <nl> - nvidia - docker run - - rm nvidia / cuda : 8 . 0 - cudnn5 - devel - ubuntu14 . 04 bash - c " $ { virtualenv_commands } " <nl> - <nl> - pip_commands = " $ { pip_commands } python - c \ " $ { PYTHON_GPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Pip # # # " <nl> - echo " $ { pip_commands } " <nl> - echo <nl> - nvidia - docker run - - rm nvidia / cuda : 8 . 0 - cudnn5 - devel - ubuntu14 . 04 bash - c " $ { pip_commands } " <nl> - <nl> - docker_img = $ ( echo " $ docker_commands " | sed ' s / . * docker pull \ ( . * \ ) / \ 1 / ' | sed ' s / ; . * / / ' ) <nl> - docker_commands = " $ { docker_commands } nvidia - docker run $ { docker_img } python - c \ " $ { PYTHON_GPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Docker # # # " <nl> - echo " $ { docker_commands } " <nl> - echo <nl> - eval " $ { docker_commands } " <nl> - <nl> - buildfromsource_commands = " $ { buildfromsource_commands } python - c \ " $ { PYTHON_GPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Build From Source # # # " <nl> - echo " $ { buildfromsource_commands } " <nl> - echo <nl> - nvidia - docker run - - rm nvidia / cuda : 8 . 0 - cudnn5 - devel - ubuntu14 . 04 bash - c " $ { buildfromsource_commands } " <nl> - <nl> - else <nl> - <nl> - # # # # # # # # # # # # # # # # # # # # # # # # # MACOS - PYTHON - CPU # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> - # Currently this section is invoked in . . / travis / run_test . sh so this test can run on MacOS . <nl> - echo <nl> - echo <nl> - echo " # # # Testing MACOS - PYTHON - CPU # # # " <nl> - echo <nl> - # range of all lines inside MacOS - Python - CPU instructions <nl> - MAC_PYTHON_CPU_START_LINENO = $ ( grep - n " START - MacOS Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - MAC_PYTHON_CPU_END_LINENO = $ ( grep - n " END - Mac OS Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> - <nl> - set_instruction_set $ { MAC_PYTHON_CPU_START_LINENO } $ { MAC_PYTHON_CPU_END_LINENO } <nl> - <nl> - if [ [ " $ { TASK } " = = " installation_packaged_test " ] ] <nl> - then <nl> - virtualenv_commands = " $ { virtualenv_commands } python - c \ " import sys ; print hasattr ( sys , ' real_prefix ' ) ; $ { PYTHON_CPU_VALIDATION } \ " ; deactivate ; " <nl> - echo <nl> - echo " # # # Testing Virtualenv # # # " <nl> - echo " $ { virtualenv_commands } " <nl> - echo <nl> - eval " $ { virtualenv_commands } " <nl> - <nl> - pip_commands = " $ { pip_commands } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Pip # # # " <nl> - echo " $ { pip_commands } " <nl> - echo <nl> - eval " $ { pip_commands } " <nl> - <nl> - exit <nl> - fi <nl> - <nl> - # # # COMMENTING THIS OUT FOR NOW AS TRAVIS DOES NOT SUPPORT DOCKER FOR MAC <nl> - # echo <nl> - # echo " # # # Testing Docker # # # " <nl> - # echo " $ { docker_commands } " <nl> - # echo <nl> - # eval $ { docker_commands } <nl> <nl> - if [ [ " $ { TASK } " = = " installation_source_test " ] ] <nl> - then <nl> - buildfromsource_commands = " $ { buildfromsource_commands } python - c \ " $ { PYTHON_CPU_VALIDATION } \ " " <nl> - echo <nl> - echo " # # # Testing Build From Source # # # " <nl> - echo " $ { buildfromsource_commands } " <nl> - echo <nl> - eval " $ { buildfromsource_commands } " <nl> - <nl> - exit <nl> - fi <nl> - fi <nl> + # # # # # # # # # # # # # # # # # # # # # # # # LINUX - PYTHON - CPU # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> + echo <nl> + echo <nl> + echo " # # # Testing LINUX - PYTHON - CPU # # # " <nl> + echo <nl> + # range of all lines inside Linux - Python - CPU instructions <nl> + LINUX_PYTHON_CPU_START_LINENO = $ ( grep - n " START - Linux Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> + LINUX_PYTHON_CPU_END_LINENO = $ ( grep - n " END - Linux Python CPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> + <nl> + set_instruction_set $ { LINUX_PYTHON_CPU_START_LINENO } $ { LINUX_PYTHON_CPU_END_LINENO } <nl> + <nl> + echo <nl> + echo " # # # Testing Virtualenv # # # " <nl> + echo " $ { virtualenv_commands } " <nl> + echo <nl> + docker run - - rm ubuntu : 14 . 04 bash - c " $ { virtualenv_commands } " <nl> + <nl> + echo <nl> + echo " # # # Testing Pip # # # " <nl> + echo " $ { pip_commands } " <nl> + echo <nl> + docker run - - rm ubuntu : 14 . 04 bash - c " $ { pip_commands } " <nl> + <nl> + echo <nl> + echo " # # # Testing Docker # # # " <nl> + echo " $ { docker_commands } " <nl> + echo <nl> + eval $ { docker_commands } <nl> + <nl> + echo <nl> + echo " # # # Testing Build From Source # # # " <nl> + echo " $ { buildfromsource_commands } " <nl> + echo <nl> + docker run - - rm ubuntu : 14 . 04 bash - c " $ { buildfromsource_commands } " <nl> + <nl> + # # # # # # # # # # # # # # # # # # # # # # # # # LINUX - PYTHON - GPU # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> + <nl> + echo <nl> + echo <nl> + echo " # # # Testing LINUX - PYTHON - GPU # # # " <nl> + echo <nl> + # range of all lines inside Linux - Python - GPU instructions <nl> + LINUX_PYTHON_GPU_START_LINENO = $ ( grep - n " START - Linux Python GPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> + LINUX_PYTHON_GPU_END_LINENO = $ ( grep - n " END - Linux Python GPU Installation Instructions " " $ { FILE } " | cut - d : - f 1 ) <nl> + <nl> + set_instruction_set $ { LINUX_PYTHON_GPU_START_LINENO } $ { LINUX_PYTHON_GPU_END_LINENO } <nl> + <nl> + echo <nl> + echo " # # # Testing Virtualenv # # # " <nl> + echo " $ { virtualenv_commands } " <nl> + echo <nl> + nvidia - docker run - - rm nvidia / cuda : 7 . 5 - cudnn5 - devel bash - c " $ { virtualenv_commands } " <nl> + <nl> + echo <nl> + echo " # # # Testing Pip # # # " <nl> + echo " $ { pip_commands } " <nl> + echo <nl> + nvidia - docker run - - rm nvidia / cuda : 7 . 5 - cudnn5 - devel bash - c " $ { pip_commands } " <nl> + <nl> + echo <nl> + echo " # # # Testing Docker # # # " <nl> + echo " $ { docker_commands } " <nl> + echo <nl> + eval $ { docker_commands } <nl> + <nl> + echo <nl> + echo " # # # Testing Build From Source # # # " <nl> + echo " $ { buildfromsource_commands } " <nl> + echo <nl> + nvidia - docker run - - rm nvidia / cuda : 7 . 5 - cudnn5 - devel bash - c " $ { buildfromsource_commands } " <nl> mmm a / tests / travis / run_test . sh <nl> ppp b / tests / travis / run_test . sh <nl> then <nl> exit 0 <nl> fi <nl> <nl> - if [ [ $ { TASK } = = * " installation " * ] ] ; then <nl> - git remote add main https : / / github . com / dmlc / mxnet . git <nl> - git fetch main master <nl> - echo " File changes compared to origin / master : " <nl> - echo " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * " <nl> - git diff - - name - only remotes / main / master <nl> - echo " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * " <nl> - <nl> - if [ [ ! $ ( git diff - - name - only remotes / main / master | grep install . md ) ] ] ; then <nl> - echo " No changes to install . md . Skipping installation tasks . . . " <nl> - exit 0 <nl> - fi <nl> - . / tests / jenkins / run_test_installation_docs . sh docs / get_started / install . md $ { TASK } <nl> - exit $ ? <nl> - fi <nl> - <nl> if [ $ { TASK } = = " lint " ] ; then <nl> make lint | | exit - 1 <nl> echo " Check documentations of c + + code . . . " <nl> if [ $ { TASK } = = " r_test " ] ; then <nl> fi <nl> <nl> if [ $ { TASK } = = " python_test " ] ; then <nl> - export PYTHONPATH = $ { PYTHONPATH } : $ { PWD } / python <nl> make all | | exit - 1 <nl> # use cached dir for storing data <nl> rm - rf $ { PWD } / data <nl> mmm a / tests / travis / setup . sh <nl> ppp b / tests / travis / setup . sh <nl> then <nl> exit 0 <nl> fi <nl> <nl> - if [ [ $ { TRAVIS_OS_NAME } = = " osx " & & $ { TASK } ! = * " installation " * ] ] ; then <nl> + if [ $ { TRAVIS_OS_NAME } = = " osx " ] ; then <nl> brew update <nl> brew tap homebrew / science <nl> brew install opencv <nl> | Revert " Test install . md for macOS ( ) " ( ) | apache/incubator-mxnet | fa61571eaf6608d57f291465f0b34c4d5844367f | 2017-06-01T00:36:08Z |
mmm a / Telegram / PrepareWin . bat <nl> ppp b / Telegram / PrepareWin . bat <nl> <nl> @ echo OFF <nl> <nl> - set " AppVersion = 8002 " <nl> - set " AppVersionStrSmall = 0 . 8 . 2 " <nl> - set " AppVersionStr = 0 . 8 . 2 " <nl> - set " AppVersionStrFull = 0 . 8 . 2 . 0 " <nl> - set " DevChannel = 1 " <nl> + set " AppVersion = 8003 " <nl> + set " AppVersionStrSmall = 0 . 8 . 3 " <nl> + set " AppVersionStr = 0 . 8 . 3 " <nl> + set " AppVersionStrFull = 0 . 8 . 3 . 0 " <nl> + set " DevChannel = 0 " <nl> <nl> if % DevChannel % neq 0 goto preparedev <nl> <nl> mmm a / Telegram / SourceFiles / config . h <nl> ppp b / Telegram / SourceFiles / config . h <nl> Copyright ( c ) 2014 John Preston , https : / / desktop . telegram . org <nl> * / <nl> # pragma once <nl> <nl> - static const int32 AppVersion = 8002 ; <nl> - static const wchar_t * AppVersionStr = L " 0 . 8 . 2 " ; <nl> - static const bool DevChannel = true ; <nl> + static const int32 AppVersion = 8003 ; <nl> + static const wchar_t * AppVersionStr = L " 0 . 8 . 3 " ; <nl> + static const bool DevChannel = false ; <nl> <nl> static const wchar_t * AppNameOld = L " Telegram Win ( Unofficial ) " ; <nl> static const wchar_t * AppName = L " Telegram Desktop " ; <nl> mmm a / Telegram / Telegram . plist <nl> ppp b / Telegram / Telegram . plist <nl> <nl> < key > CFBundlePackageType < / key > <nl> < string > APPL < / string > <nl> < key > CFBundleShortVersionString < / key > <nl> - < string > 0 . 8 . 2 < / string > <nl> + < string > 0 . 8 . 3 < / string > <nl> < key > CFBundleSignature < / key > <nl> < string > ? ? ? ? < / string > <nl> < key > CFBundleURLTypes < / key > <nl> Binary files a / Telegram / Telegram . rc and b / Telegram / Telegram . rc differ <nl> mmm a / Telegram / Telegram . xcodeproj / project . pbxproj <nl> ppp b / Telegram / Telegram . xcodeproj / project . pbxproj <nl> <nl> buildSettings = { <nl> ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon ; <nl> COPY_PHASE_STRIP = NO ; <nl> - CURRENT_PROJECT_VERSION = 0 . 8 . 2 ; <nl> + CURRENT_PROJECT_VERSION = 0 . 8 . 3 ; <nl> DEBUG_INFORMATION_FORMAT = dwarf ; <nl> GCC_GENERATE_DEBUGGING_SYMBOLS = YES ; <nl> GCC_OPTIMIZATION_LEVEL = 0 ; <nl> <nl> buildSettings = { <nl> ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon ; <nl> COPY_PHASE_STRIP = YES ; <nl> - CURRENT_PROJECT_VERSION = 0 . 8 . 2 ; <nl> + CURRENT_PROJECT_VERSION = 0 . 8 . 3 ; <nl> GCC_GENERATE_DEBUGGING_SYMBOLS = NO ; <nl> GCC_OPTIMIZATION_LEVEL = fast ; <nl> GCC_PREFIX_HEADER = . / SourceFiles / stdafx . h ; <nl> <nl> CLANG_WARN__DUPLICATE_METHOD_MATCH = YES ; <nl> CODE_SIGN_IDENTITY = " " ; <nl> COPY_PHASE_STRIP = NO ; <nl> - CURRENT_PROJECT_VERSION = 0 . 8 . 2 ; <nl> + CURRENT_PROJECT_VERSION = 0 . 8 . 3 ; <nl> DEBUG_INFORMATION_FORMAT = " dwarf - with - dsym " ; <nl> DYLIB_COMPATIBILITY_VERSION = 0 . 8 ; <nl> - DYLIB_CURRENT_VERSION = 0 . 8 . 2 ; <nl> + DYLIB_CURRENT_VERSION = 0 . 8 . 3 ; <nl> ENABLE_STRICT_OBJC_MSGSEND = YES ; <nl> FRAMEWORK_SEARCH_PATHS = " " ; <nl> GCC_GENERATE_DEBUGGING_SYMBOLS = YES ; <nl> <nl> CLANG_WARN__DUPLICATE_METHOD_MATCH = YES ; <nl> CODE_SIGN_IDENTITY = " " ; <nl> COPY_PHASE_STRIP = NO ; <nl> - CURRENT_PROJECT_VERSION = 0 . 8 . 2 ; <nl> + CURRENT_PROJECT_VERSION = 0 . 8 . 3 ; <nl> DEBUG_INFORMATION_FORMAT = dwarf ; <nl> DYLIB_COMPATIBILITY_VERSION = 0 . 8 ; <nl> - DYLIB_CURRENT_VERSION = 0 . 8 . 2 ; <nl> + DYLIB_CURRENT_VERSION = 0 . 8 . 3 ; <nl> ENABLE_STRICT_OBJC_MSGSEND = YES ; <nl> FRAMEWORK_SEARCH_PATHS = " " ; <nl> GCC_GENERATE_DEBUGGING_SYMBOLS = YES ; <nl> mmm a / Telegram / Version . sh <nl> ppp b / Telegram / Version . sh <nl> <nl> - echo 8002 0 . 8 . 2 1 <nl> + echo 8003 0 . 8 . 3 0 <nl> # AppVersion AppVersionStr DevChannel <nl> | version 0 . 8 . 3 prepared | telegramdesktop/tdesktop | 8bc0410830188a42c4cdf45aceca1582779f4ab0 | 2015-04-08T14:22:06Z |
mmm a / src / d8 - posix . cc <nl> ppp b / src / d8 - posix . cc <nl> static Handle < Value > GetStdout ( int child_fd , <nl> int read_timeout , <nl> int total_timeout ) { <nl> Handle < String > accumulator = String : : Empty ( ) ; <nl> - const char * source = " function ( a , b ) { return a + b ; } " ; <nl> + const char * source = " ( function ( a , b ) { return a + b ; } ) " ; <nl> Handle < Value > cons_as_obj ( Script : : Compile ( String : : New ( source ) ) - > Run ( ) ) ; <nl> Handle < Function > cons_function ( Function : : Cast ( * cons_as_obj ) ) ; <nl> Handle < Value > cons_args [ 2 ] ; <nl> | A follow - up to r3009 : add parentheses around one more anonymous function . | v8/v8 | a7c0c6e5c453828e4b444e4220f090eda40ab91e | 2009-10-13T15:17:24Z |
mmm a / bench / formatter - bench . cpp <nl> ppp b / bench / formatter - bench . cpp <nl> void bench_formatter ( benchmark : : State & state , std : : string pattern ) <nl> std : : string logger_name = " logger - name " ; <nl> const char * text = " Hello . This is some message with length of 80 " ; <nl> <nl> - spdlog : : details : : log_msg msg ( & logger_name , spdlog : : level : : info , text ) ; <nl> + spdlog : : details : : log_msg msg ( spdlog : : source_loc { __FILE__ , __LINE__ } , & logger_name , spdlog : : level : : info , text ) ; <nl> / / formatter - > format ( msg , dest ) ; <nl> / / printf ( " % s \ n " , fmt : : to_string ( dest ) . c_str ( ) ) ; <nl> <nl> | Updated formatter - bench to include source location | gabime/spdlog | 216cd6935fb39f98abeeb77e9bf46d83557d002f | 2018-11-22T15:05:27Z |
mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> set ( CMAKE_EXPORT_COMPILE_COMMANDS " ON " ) <nl> set ( BUILD_DOXYGEN FALSE CACHE BOOL " Build doxygen documentation on every make " ) <nl> set ( BUILD_MONGO_DB_PLUGIN FALSE CACHE BOOL " Build mongo database plugin " ) <nl> <nl> - # set ( USE_PCH 1 ) <nl> - <nl> - if ( USE_PCH ) <nl> - include ( cotire ) <nl> - endif ( USE_PCH ) <nl> - <nl> # add defaults for openssl <nl> if ( " $ { OPENSSL_ROOT_DIR } " STREQUAL " " ) <nl> if ( NOT " $ ENV { OPENSSL_ROOT_DIR } " STREQUAL " " ) <nl> deleted file mode 100644 <nl> index ab611007dc . . 0000000000 <nl> mmm a / CMakeModules / cotire . cmake <nl> ppp / dev / null <nl> <nl> - # - cotire ( compile time reducer ) <nl> - # <nl> - # See the cotire manual for usage hints . <nl> - # <nl> - # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - # Copyright 2012 - 2016 Sascha Kratky <nl> - # <nl> - # Permission is hereby granted , free of charge , to any person <nl> - # obtaining a copy of this software and associated documentation <nl> - # files ( the " Software " ) , to deal in the Software without <nl> - # restriction , including without limitation the rights to use , <nl> - # copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> - # copies of the Software , and to permit persons to whom the <nl> - # Software is furnished to do so , subject to the following <nl> - # conditions : <nl> - # <nl> - # The above copyright notice and this permission notice shall be <nl> - # included in all copies or substantial portions of the Software . <nl> - # <nl> - # THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , <nl> - # EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES <nl> - # OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND <nl> - # NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT <nl> - # HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , <nl> - # WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING <nl> - # FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR <nl> - # OTHER DEALINGS IN THE SOFTWARE . <nl> - # = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - <nl> - if ( __COTIRE_INCLUDED ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( __COTIRE_INCLUDED TRUE ) <nl> - <nl> - # call cmake_minimum_required , but prevent modification of the CMake policy stack in include mode <nl> - # cmake_minimum_required also sets the policy version as a side effect , which we have to avoid <nl> - if ( NOT CMAKE_SCRIPT_MODE_FILE ) <nl> - cmake_policy ( PUSH ) <nl> - endif ( ) <nl> - cmake_minimum_required ( VERSION 2 . 8 . 12 ) <nl> - if ( NOT CMAKE_SCRIPT_MODE_FILE ) <nl> - cmake_policy ( POP ) <nl> - endif ( ) <nl> - <nl> - set ( COTIRE_CMAKE_MODULE_FILE " $ { CMAKE_CURRENT_LIST_FILE } " ) <nl> - set ( COTIRE_CMAKE_MODULE_VERSION " 1 . 7 . 9 " ) <nl> - <nl> - # activate select policies <nl> - if ( POLICY CMP0025 ) <nl> - # Compiler id for Apple Clang is now AppleClang <nl> - cmake_policy ( SET CMP0025 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0026 ) <nl> - # disallow use of the LOCATION target property <nl> - cmake_policy ( SET CMP0026 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0038 ) <nl> - # targets may not link directly to themselves <nl> - cmake_policy ( SET CMP0038 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0039 ) <nl> - # utility targets may not have link dependencies <nl> - cmake_policy ( SET CMP0039 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0040 ) <nl> - # target in the TARGET signature of add_custom_command ( ) must exist <nl> - cmake_policy ( SET CMP0040 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0045 ) <nl> - # error on non - existent target in get_target_property <nl> - cmake_policy ( SET CMP0045 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0046 ) <nl> - # error on non - existent dependency in add_dependencies <nl> - cmake_policy ( SET CMP0046 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0049 ) <nl> - # do not expand variables in target source entries <nl> - cmake_policy ( SET CMP0049 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0050 ) <nl> - # disallow add_custom_command SOURCE signatures <nl> - cmake_policy ( SET CMP0050 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0051 ) <nl> - # include TARGET_OBJECTS expressions in a target ' s SOURCES property <nl> - cmake_policy ( SET CMP0051 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0053 ) <nl> - # simplify variable reference and escape sequence evaluation <nl> - cmake_policy ( SET CMP0053 NEW ) <nl> - endif ( ) <nl> - <nl> - if ( POLICY CMP0054 ) <nl> - # only interpret if ( ) arguments as variables or keywords when unquoted <nl> - cmake_policy ( SET CMP0054 NEW ) <nl> - endif ( ) <nl> - <nl> - include ( CMakeParseArguments ) <nl> - include ( ProcessorCount ) <nl> - <nl> - function ( cotire_get_configuration_types _configsVar ) <nl> - set ( _configs " " ) <nl> - if ( CMAKE_CONFIGURATION_TYPES ) <nl> - list ( APPEND _configs $ { CMAKE_CONFIGURATION_TYPES } ) <nl> - endif ( ) <nl> - if ( CMAKE_BUILD_TYPE ) <nl> - list ( APPEND _configs " $ { CMAKE_BUILD_TYPE } " ) <nl> - endif ( ) <nl> - if ( _configs ) <nl> - list ( REMOVE_DUPLICATES _configs ) <nl> - set ( $ { _configsVar } $ { _configs } PARENT_SCOPE ) <nl> - else ( ) <nl> - set ( $ { _configsVar } " None " PARENT_SCOPE ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_file_extension _sourceFile _extVar ) <nl> - # get_filename_component returns extension from first occurrence of . in file name <nl> - # this function computes the extension from last occurrence of . in file name <nl> - string ( FIND " $ { _sourceFile } " " . " _index REVERSE ) <nl> - if ( _index GREATER - 1 ) <nl> - math ( EXPR _index " $ { _index } + 1 " ) <nl> - string ( SUBSTRING " $ { _sourceFile } " $ { _index } - 1 _sourceExt ) <nl> - else ( ) <nl> - set ( _sourceExt " " ) <nl> - endif ( ) <nl> - set ( $ { _extVar } " $ { _sourceExt } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_check_is_path_relative_to _path _isRelativeVar ) <nl> - set ( $ { _isRelativeVar } FALSE ) <nl> - if ( IS_ABSOLUTE " $ { _path } " ) <nl> - foreach ( _dir $ { ARGN } ) <nl> - file ( RELATIVE_PATH _relPath " $ { _dir } " " $ { _path } " ) <nl> - if ( NOT _relPath OR ( NOT IS_ABSOLUTE " $ { _relPath } " AND NOT " $ { _relPath } " MATCHES " ^ \ \ . \ \ . " ) ) <nl> - set ( $ { _isRelativeVar } TRUE ) <nl> - break ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_filter_language_source_files _language _target _sourceFilesVar _excludedSourceFilesVar _cotiredSourceFilesVar ) <nl> - if ( CMAKE_ $ { _language } _SOURCE_FILE_EXTENSIONS ) <nl> - set ( _languageExtensions " $ { CMAKE_ $ { _language } _SOURCE_FILE_EXTENSIONS } " ) <nl> - else ( ) <nl> - set ( _languageExtensions " " ) <nl> - endif ( ) <nl> - if ( CMAKE_ $ { _language } _IGNORE_EXTENSIONS ) <nl> - set ( _ignoreExtensions " $ { CMAKE_ $ { _language } _IGNORE_EXTENSIONS } " ) <nl> - else ( ) <nl> - set ( _ignoreExtensions " " ) <nl> - endif ( ) <nl> - if ( COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS ) <nl> - set ( _excludeExtensions " $ { COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS } " ) <nl> - else ( ) <nl> - set ( _excludeExtensions " " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _languageExtensions ) <nl> - message ( STATUS " $ { _language } source file extensions : $ { _languageExtensions } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _ignoreExtensions ) <nl> - message ( STATUS " $ { _language } ignore extensions : $ { _ignoreExtensions } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _excludeExtensions ) <nl> - message ( STATUS " $ { _language } exclude extensions : $ { _excludeExtensions } " ) <nl> - endif ( ) <nl> - if ( CMAKE_VERSION VERSION_LESS " 3 . 1 . 0 " ) <nl> - set ( _allSourceFiles $ { ARGN } ) <nl> - else ( ) <nl> - # as of CMake 3 . 1 target sources may contain generator expressions <nl> - # since we cannot obtain required property information about source files added <nl> - # through generator expressions at configure time , we filter them out <nl> - string ( GENEX_STRIP " $ { ARGN } " _allSourceFiles ) <nl> - endif ( ) <nl> - set ( _filteredSourceFiles " " ) <nl> - set ( _excludedSourceFiles " " ) <nl> - foreach ( _sourceFile $ { _allSourceFiles } ) <nl> - get_source_file_property ( _sourceIsHeaderOnly " $ { _sourceFile } " HEADER_FILE_ONLY ) <nl> - get_source_file_property ( _sourceIsExternal " $ { _sourceFile } " EXTERNAL_OBJECT ) <nl> - get_source_file_property ( _sourceIsSymbolic " $ { _sourceFile } " SYMBOLIC ) <nl> - if ( NOT _sourceIsHeaderOnly AND NOT _sourceIsExternal AND NOT _sourceIsSymbolic ) <nl> - cotire_get_source_file_extension ( " $ { _sourceFile } " _sourceExt ) <nl> - if ( _sourceExt ) <nl> - list ( FIND _ignoreExtensions " $ { _sourceExt } " _ignoreIndex ) <nl> - if ( _ignoreIndex LESS 0 ) <nl> - list ( FIND _excludeExtensions " $ { _sourceExt } " _excludeIndex ) <nl> - if ( _excludeIndex GREATER - 1 ) <nl> - list ( APPEND _excludedSourceFiles " $ { _sourceFile } " ) <nl> - else ( ) <nl> - list ( FIND _languageExtensions " $ { _sourceExt } " _sourceIndex ) <nl> - if ( _sourceIndex GREATER - 1 ) <nl> - # consider source file unless it is excluded explicitly <nl> - get_source_file_property ( _sourceIsExcluded " $ { _sourceFile } " COTIRE_EXCLUDED ) <nl> - if ( _sourceIsExcluded ) <nl> - list ( APPEND _excludedSourceFiles " $ { _sourceFile } " ) <nl> - else ( ) <nl> - list ( APPEND _filteredSourceFiles " $ { _sourceFile } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - get_source_file_property ( _sourceLanguage " $ { _sourceFile } " LANGUAGE ) <nl> - if ( " $ { _sourceLanguage } " STREQUAL " $ { _language } " ) <nl> - # add to excluded sources , if file is not ignored and has correct language without having the correct extension <nl> - list ( APPEND _excludedSourceFiles " $ { _sourceFile } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - # separate filtered source files from already cotired ones <nl> - # the COTIRE_TARGET property of a source file may be set while a target is being processed by cotire <nl> - set ( _sourceFiles " " ) <nl> - set ( _cotiredSourceFiles " " ) <nl> - foreach ( _sourceFile $ { _filteredSourceFiles } ) <nl> - get_source_file_property ( _sourceIsCotired " $ { _sourceFile } " COTIRE_TARGET ) <nl> - if ( _sourceIsCotired ) <nl> - list ( APPEND _cotiredSourceFiles " $ { _sourceFile } " ) <nl> - else ( ) <nl> - get_source_file_property ( _sourceCompileFlags " $ { _sourceFile } " COMPILE_FLAGS ) <nl> - if ( _sourceCompileFlags ) <nl> - # add to excluded sources , if file has custom compile flags <nl> - list ( APPEND _excludedSourceFiles " $ { _sourceFile } " ) <nl> - else ( ) <nl> - list ( APPEND _sourceFiles " $ { _sourceFile } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - if ( _sourceFiles ) <nl> - message ( STATUS " Filtered $ { _target } $ { _language } sources : $ { _sourceFiles } " ) <nl> - endif ( ) <nl> - if ( _excludedSourceFiles ) <nl> - message ( STATUS " Excluded $ { _target } $ { _language } sources : $ { _excludedSourceFiles } " ) <nl> - endif ( ) <nl> - if ( _cotiredSourceFiles ) <nl> - message ( STATUS " Cotired $ { _target } $ { _language } sources : $ { _cotiredSourceFiles } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _sourceFilesVar } $ { _sourceFiles } PARENT_SCOPE ) <nl> - set ( $ { _excludedSourceFilesVar } $ { _excludedSourceFiles } PARENT_SCOPE ) <nl> - set ( $ { _cotiredSourceFilesVar } $ { _cotiredSourceFiles } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_objects_with_property_on _filteredObjectsVar _property _type ) <nl> - set ( _filteredObjects " " ) <nl> - foreach ( _object $ { ARGN } ) <nl> - get_property ( _isSet $ { _type } " $ { _object } " PROPERTY $ { _property } SET ) <nl> - if ( _isSet ) <nl> - get_property ( _propertyValue $ { _type } " $ { _object } " PROPERTY $ { _property } ) <nl> - if ( _propertyValue ) <nl> - list ( APPEND _filteredObjects " $ { _object } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _filteredObjectsVar } $ { _filteredObjects } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_objects_with_property_off _filteredObjectsVar _property _type ) <nl> - set ( _filteredObjects " " ) <nl> - foreach ( _object $ { ARGN } ) <nl> - get_property ( _isSet $ { _type } " $ { _object } " PROPERTY $ { _property } SET ) <nl> - if ( _isSet ) <nl> - get_property ( _propertyValue $ { _type } " $ { _object } " PROPERTY $ { _property } ) <nl> - if ( NOT _propertyValue ) <nl> - list ( APPEND _filteredObjects " $ { _object } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _filteredObjectsVar } $ { _filteredObjects } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_file_property_values _valuesVar _property ) <nl> - set ( _values " " ) <nl> - foreach ( _sourceFile $ { ARGN } ) <nl> - get_source_file_property ( _propertyValue " $ { _sourceFile } " $ { _property } ) <nl> - if ( _propertyValue ) <nl> - list ( APPEND _values " $ { _propertyValue } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _valuesVar } $ { _values } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_resolve_config_properites _configurations _propertiesVar ) <nl> - set ( _properties " " ) <nl> - foreach ( _property $ { ARGN } ) <nl> - if ( " $ { _property } " MATCHES " < CONFIG > " ) <nl> - foreach ( _config $ { _configurations } ) <nl> - string ( TOUPPER " $ { _config } " _upperConfig ) <nl> - string ( REPLACE " < CONFIG > " " $ { _upperConfig } " _configProperty " $ { _property } " ) <nl> - list ( APPEND _properties $ { _configProperty } ) <nl> - endforeach ( ) <nl> - else ( ) <nl> - list ( APPEND _properties $ { _property } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _propertiesVar } $ { _properties } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_copy_set_properites _configurations _type _source _target ) <nl> - cotire_resolve_config_properites ( " $ { _configurations } " _properties $ { ARGN } ) <nl> - foreach ( _property $ { _properties } ) <nl> - get_property ( _isSet $ { _type } $ { _source } PROPERTY $ { _property } SET ) <nl> - if ( _isSet ) <nl> - get_property ( _propertyValue $ { _type } $ { _source } PROPERTY $ { _property } ) <nl> - set_property ( $ { _type } $ { _target } PROPERTY $ { _property } " $ { _propertyValue } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_usage_requirements _target _targetRequirementsVar ) <nl> - set ( _targetRequirements " " ) <nl> - get_target_property ( _librariesToProcess $ { _target } LINK_LIBRARIES ) <nl> - while ( _librariesToProcess ) <nl> - # remove from head <nl> - list ( GET _librariesToProcess 0 _library ) <nl> - list ( REMOVE_AT _librariesToProcess 0 ) <nl> - if ( TARGET $ { _library } ) <nl> - list ( FIND _targetRequirements $ { _library } _index ) <nl> - if ( _index LESS 0 ) <nl> - list ( APPEND _targetRequirements $ { _library } ) <nl> - # BFS traversal of transitive libraries <nl> - get_target_property ( _libraries $ { _library } INTERFACE_LINK_LIBRARIES ) <nl> - if ( _libraries ) <nl> - list ( APPEND _librariesToProcess $ { _libraries } ) <nl> - list ( REMOVE_DUPLICATES _librariesToProcess ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endwhile ( ) <nl> - set ( $ { _targetRequirementsVar } $ { _targetRequirements } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_filter_compile_flags _language _flagFilter _matchedOptionsVar _unmatchedOptionsVar ) <nl> - if ( WIN32 AND CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - set ( _flagPrefix " [ / - ] " ) <nl> - else ( ) <nl> - set ( _flagPrefix " - - ? " ) <nl> - endif ( ) <nl> - set ( _optionFlag " " ) <nl> - set ( _matchedOptions " " ) <nl> - set ( _unmatchedOptions " " ) <nl> - foreach ( _compileFlag $ { ARGN } ) <nl> - if ( _compileFlag ) <nl> - if ( _optionFlag AND NOT " $ { _compileFlag } " MATCHES " ^ $ { _flagPrefix } " ) <nl> - # option with separate argument <nl> - list ( APPEND _matchedOptions " $ { _compileFlag } " ) <nl> - set ( _optionFlag " " ) <nl> - elseif ( " $ { _compileFlag } " MATCHES " ^ ( $ { _flagPrefix } ) ( $ { _flagFilter } ) $ " ) <nl> - # remember option <nl> - set ( _optionFlag " $ { CMAKE_MATCH_2 } " ) <nl> - elseif ( " $ { _compileFlag } " MATCHES " ^ ( $ { _flagPrefix } ) ( $ { _flagFilter } ) ( . + ) $ " ) <nl> - # option with joined argument <nl> - list ( APPEND _matchedOptions " $ { CMAKE_MATCH_3 } " ) <nl> - set ( _optionFlag " " ) <nl> - else ( ) <nl> - # flush remembered option <nl> - if ( _optionFlag ) <nl> - list ( APPEND _matchedOptions " $ { _optionFlag } " ) <nl> - set ( _optionFlag " " ) <nl> - endif ( ) <nl> - # add to unfiltered options <nl> - list ( APPEND _unmatchedOptions " $ { _compileFlag } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _optionFlag ) <nl> - list ( APPEND _matchedOptions " $ { _optionFlag } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _matchedOptions ) <nl> - message ( STATUS " Filter $ { _flagFilter } matched : $ { _matchedOptions } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _unmatchedOptions ) <nl> - message ( STATUS " Filter $ { _flagFilter } unmatched : $ { _unmatchedOptions } " ) <nl> - endif ( ) <nl> - set ( $ { _matchedOptionsVar } $ { _matchedOptions } PARENT_SCOPE ) <nl> - set ( $ { _unmatchedOptionsVar } $ { _unmatchedOptions } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_is_target_supported _target _isSupportedVar ) <nl> - if ( NOT TARGET " $ { _target } " ) <nl> - set ( $ { _isSupportedVar } FALSE PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - get_target_property ( _imported $ { _target } IMPORTED ) <nl> - if ( _imported ) <nl> - set ( $ { _isSupportedVar } FALSE PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - if ( NOT _targetType MATCHES " EXECUTABLE | ( STATIC | SHARED | MODULE | OBJECT ) _LIBRARY " ) <nl> - set ( $ { _isSupportedVar } FALSE PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( $ { _isSupportedVar } TRUE PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_compile_flags _config _language _target _flagsVar ) <nl> - string ( TOUPPER " $ { _config } " _upperConfig ) <nl> - # collect options from CMake language variables <nl> - set ( _compileFlags " " ) <nl> - if ( CMAKE_ $ { _language } _FLAGS ) <nl> - set ( _compileFlags " $ { _compileFlags } $ { CMAKE_ $ { _language } _FLAGS } " ) <nl> - endif ( ) <nl> - if ( CMAKE_ $ { _language } _FLAGS_ $ { _upperConfig } ) <nl> - set ( _compileFlags " $ { _compileFlags } $ { CMAKE_ $ { _language } _FLAGS_ $ { _upperConfig } } " ) <nl> - endif ( ) <nl> - if ( _target ) <nl> - # add target compile flags <nl> - get_target_property ( _targetflags $ { _target } COMPILE_FLAGS ) <nl> - if ( _targetflags ) <nl> - set ( _compileFlags " $ { _compileFlags } $ { _targetflags } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( UNIX ) <nl> - separate_arguments ( _compileFlags UNIX_COMMAND " $ { _compileFlags } " ) <nl> - elseif ( WIN32 ) <nl> - separate_arguments ( _compileFlags WINDOWS_COMMAND " $ { _compileFlags } " ) <nl> - else ( ) <nl> - separate_arguments ( _compileFlags ) <nl> - endif ( ) <nl> - # target compile options <nl> - if ( _target ) <nl> - get_target_property ( _targetOptions $ { _target } COMPILE_OPTIONS ) <nl> - if ( _targetOptions ) <nl> - list ( APPEND _compileFlags $ { _targetOptions } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # interface compile options from linked library targets <nl> - if ( _target ) <nl> - set ( _linkedTargets " " ) <nl> - cotire_get_target_usage_requirements ( $ { _target } _linkedTargets ) <nl> - foreach ( _linkedTarget $ { _linkedTargets } ) <nl> - get_target_property ( _targetOptions $ { _linkedTarget } INTERFACE_COMPILE_OPTIONS ) <nl> - if ( _targetOptions ) <nl> - list ( APPEND _compileFlags $ { _targetOptions } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - # handle language standard properties <nl> - if ( CMAKE_ $ { _language } _STANDARD_DEFAULT ) <nl> - # used compiler supports language standard levels <nl> - if ( _target ) <nl> - get_target_property ( _targetLanguageStandard $ { _target } $ { _language } _STANDARD ) <nl> - if ( _targetLanguageStandard ) <nl> - set ( _type " EXTENSION " ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY $ { _language } _EXTENSIONS SET ) <nl> - if ( _isSet ) <nl> - get_target_property ( _targetUseLanguageExtensions $ { _target } $ { _language } _EXTENSIONS ) <nl> - if ( NOT _targetUseLanguageExtensions ) <nl> - set ( _type " STANDARD " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( CMAKE_ $ { _language } $ { _targetLanguageStandard } _ $ { _type } _COMPILE_OPTION ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } $ { _targetLanguageStandard } _ $ { _type } _COMPILE_OPTION } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # handle the POSITION_INDEPENDENT_CODE target property <nl> - if ( _target ) <nl> - get_target_property ( _targetPIC $ { _target } POSITION_INDEPENDENT_CODE ) <nl> - if ( _targetPIC ) <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - if ( _targetType STREQUAL " EXECUTABLE " AND CMAKE_ $ { _language } _COMPILE_OPTIONS_PIE ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _COMPILE_OPTIONS_PIE } " ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILE_OPTIONS_PIC ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _COMPILE_OPTIONS_PIC } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # handle visibility target properties <nl> - if ( _target ) <nl> - get_target_property ( _targetVisibility $ { _target } $ { _language } _VISIBILITY_PRESET ) <nl> - if ( _targetVisibility AND CMAKE_ $ { _language } _COMPILE_OPTIONS_VISIBILITY ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _COMPILE_OPTIONS_VISIBILITY } $ { _targetVisibility } " ) <nl> - endif ( ) <nl> - get_target_property ( _targetVisibilityInlines $ { _target } VISIBILITY_INLINES_HIDDEN ) <nl> - if ( _targetVisibilityInlines AND CMAKE_ $ { _language } _COMPILE_OPTIONS_VISIBILITY_INLINES_HIDDEN ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _COMPILE_OPTIONS_VISIBILITY_INLINES_HIDDEN } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # platform specific flags <nl> - if ( APPLE ) <nl> - get_target_property ( _architectures $ { _target } OSX_ARCHITECTURES_ $ { _upperConfig } ) <nl> - if ( NOT _architectures ) <nl> - get_target_property ( _architectures $ { _target } OSX_ARCHITECTURES ) <nl> - endif ( ) <nl> - if ( _architectures ) <nl> - foreach ( _arch $ { _architectures } ) <nl> - list ( APPEND _compileFlags " - arch " " $ { _arch } " ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - if ( CMAKE_OSX_SYSROOT ) <nl> - if ( CMAKE_ $ { _language } _SYSROOT_FLAG ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _SYSROOT_FLAG } " " $ { CMAKE_OSX_SYSROOT } " ) <nl> - else ( ) <nl> - list ( APPEND _compileFlags " - isysroot " " $ { CMAKE_OSX_SYSROOT } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( CMAKE_OSX_DEPLOYMENT_TARGET ) <nl> - if ( CMAKE_ $ { _language } _OSX_DEPLOYMENT_TARGET_FLAG ) <nl> - list ( APPEND _compileFlags " $ { CMAKE_ $ { _language } _OSX_DEPLOYMENT_TARGET_FLAG } $ { CMAKE_OSX_DEPLOYMENT_TARGET } " ) <nl> - else ( ) <nl> - list ( APPEND _compileFlags " - mmacosx - version - min = $ { CMAKE_OSX_DEPLOYMENT_TARGET } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _compileFlags ) <nl> - message ( STATUS " Target $ { _target } compile flags : $ { _compileFlags } " ) <nl> - endif ( ) <nl> - set ( $ { _flagsVar } $ { _compileFlags } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_include_directories _config _language _target _includeDirsVar _systemIncludeDirsVar ) <nl> - set ( _includeDirs " " ) <nl> - set ( _systemIncludeDirs " " ) <nl> - # default include dirs <nl> - if ( CMAKE_INCLUDE_CURRENT_DIR ) <nl> - list ( APPEND _includeDirs " $ { CMAKE_CURRENT_BINARY_DIR } " ) <nl> - list ( APPEND _includeDirs " $ { CMAKE_CURRENT_SOURCE_DIR } " ) <nl> - endif ( ) <nl> - set ( _targetFlags " " ) <nl> - cotire_get_target_compile_flags ( " $ { _config } " " $ { _language } " " $ { _target } " _targetFlags ) <nl> - # parse additional include directories from target compile flags <nl> - if ( CMAKE_INCLUDE_FLAG_ $ { _language } ) <nl> - string ( STRIP " $ { CMAKE_INCLUDE_FLAG_ $ { _language } } " _includeFlag ) <nl> - string ( REGEX REPLACE " ^ [ - / ] + " " " _includeFlag " $ { _includeFlag } " ) <nl> - if ( _includeFlag ) <nl> - set ( _dirs " " ) <nl> - cotire_filter_compile_flags ( " $ { _language } " " $ { _includeFlag } " _dirs _ignore $ { _targetFlags } ) <nl> - if ( _dirs ) <nl> - list ( APPEND _includeDirs $ { _dirs } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # parse additional system include directories from target compile flags <nl> - if ( CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } ) <nl> - string ( STRIP " $ { CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } } " _includeFlag ) <nl> - string ( REGEX REPLACE " ^ [ - / ] + " " " _includeFlag " $ { _includeFlag } " ) <nl> - if ( _includeFlag ) <nl> - set ( _dirs " " ) <nl> - cotire_filter_compile_flags ( " $ { _language } " " $ { _includeFlag } " _dirs _ignore $ { _targetFlags } ) <nl> - if ( _dirs ) <nl> - list ( APPEND _systemIncludeDirs $ { _dirs } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # target include directories <nl> - get_directory_property ( _dirs DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " INCLUDE_DIRECTORIES ) <nl> - if ( _target ) <nl> - get_target_property ( _targetDirs $ { _target } INCLUDE_DIRECTORIES ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _dirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - get_target_property ( _targetDirs $ { _target } INTERFACE_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _systemIncludeDirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # interface include directories from linked library targets <nl> - if ( _target ) <nl> - set ( _linkedTargets " " ) <nl> - cotire_get_target_usage_requirements ( $ { _target } _linkedTargets ) <nl> - foreach ( _linkedTarget $ { _linkedTargets } ) <nl> - get_target_property ( _linkedTargetType $ { _linkedTarget } TYPE ) <nl> - if ( CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE AND NOT CMAKE_VERSION VERSION_LESS " 3 . 4 . 0 " AND <nl> - _linkedTargetType MATCHES " ( STATIC | SHARED | MODULE | OBJECT ) _LIBRARY " ) <nl> - # CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE refers to CMAKE_CURRENT_BINARY_DIR and CMAKE_CURRENT_SOURCE_DIR <nl> - # at the time , when the target was created . These correspond to the target properties BINARY_DIR and SOURCE_DIR <nl> - # which are only available with CMake 3 . 4 or later . <nl> - get_target_property ( _targetDirs $ { _linkedTarget } BINARY_DIR ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _dirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - get_target_property ( _targetDirs $ { _linkedTarget } SOURCE_DIR ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _dirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - get_target_property ( _targetDirs $ { _linkedTarget } INTERFACE_INCLUDE_DIRECTORIES ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _dirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - get_target_property ( _targetDirs $ { _linkedTarget } INTERFACE_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - if ( _targetDirs ) <nl> - list ( APPEND _systemIncludeDirs $ { _targetDirs } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - if ( dirs ) <nl> - list ( REMOVE_DUPLICATES _dirs ) <nl> - endif ( ) <nl> - list ( LENGTH _includeDirs _projectInsertIndex ) <nl> - foreach ( _dir $ { _dirs } ) <nl> - if ( CMAKE_INCLUDE_DIRECTORIES_PROJECT_BEFORE ) <nl> - cotire_check_is_path_relative_to ( " $ { _dir } " _isRelative " $ { CMAKE_SOURCE_DIR } " " $ { CMAKE_BINARY_DIR } " ) <nl> - if ( _isRelative ) <nl> - list ( LENGTH _includeDirs _len ) <nl> - if ( _len EQUAL _projectInsertIndex ) <nl> - list ( APPEND _includeDirs " $ { _dir } " ) <nl> - else ( ) <nl> - list ( INSERT _includeDirs _projectInsertIndex " $ { _dir } " ) <nl> - endif ( ) <nl> - math ( EXPR _projectInsertIndex " $ { _projectInsertIndex } + 1 " ) <nl> - else ( ) <nl> - list ( APPEND _includeDirs " $ { _dir } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - list ( APPEND _includeDirs " $ { _dir } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - list ( REMOVE_DUPLICATES _includeDirs ) <nl> - list ( REMOVE_DUPLICATES _systemIncludeDirs ) <nl> - if ( CMAKE_ $ { _language } _IMPLICIT_INCLUDE_DIRECTORIES ) <nl> - list ( REMOVE_ITEM _includeDirs $ { CMAKE_ $ { _language } _IMPLICIT_INCLUDE_DIRECTORIES } ) <nl> - endif ( ) <nl> - if ( WIN32 ) <nl> - # convert Windows paths in include directories to CMake paths <nl> - if ( _includeDirs ) <nl> - set ( _paths " " ) <nl> - foreach ( _dir $ { _includeDirs } ) <nl> - file ( TO_CMAKE_PATH " $ { _dir } " _path ) <nl> - list ( APPEND _paths " $ { _path } " ) <nl> - endforeach ( ) <nl> - set ( _includeDirs $ { _paths } ) <nl> - endif ( ) <nl> - if ( _systemIncludeDirs ) <nl> - set ( _paths " " ) <nl> - foreach ( _dir $ { _systemIncludeDirs } ) <nl> - file ( TO_CMAKE_PATH " $ { _dir } " _path ) <nl> - list ( APPEND _paths " $ { _path } " ) <nl> - endforeach ( ) <nl> - set ( _systemIncludeDirs $ { _paths } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _includeDirs ) <nl> - message ( STATUS " Target $ { _target } include dirs : $ { _includeDirs } " ) <nl> - endif ( ) <nl> - set ( $ { _includeDirsVar } $ { _includeDirs } PARENT_SCOPE ) <nl> - if ( COTIRE_DEBUG AND _systemIncludeDirs ) <nl> - message ( STATUS " Target $ { _target } system include dirs : $ { _systemIncludeDirs } " ) <nl> - endif ( ) <nl> - set ( $ { _systemIncludeDirsVar } $ { _systemIncludeDirs } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_export_symbol _target _exportSymbolVar ) <nl> - set ( _exportSymbol " " ) <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - get_target_property ( _enableExports $ { _target } ENABLE_EXPORTS ) <nl> - if ( _targetType MATCHES " ( SHARED | MODULE ) _LIBRARY " OR <nl> - ( _targetType STREQUAL " EXECUTABLE " AND _enableExports ) ) <nl> - get_target_property ( _exportSymbol $ { _target } DEFINE_SYMBOL ) <nl> - if ( NOT _exportSymbol ) <nl> - set ( _exportSymbol " $ { _target } _EXPORTS " ) <nl> - endif ( ) <nl> - string ( MAKE_C_IDENTIFIER " $ { _exportSymbol } " _exportSymbol ) <nl> - endif ( ) <nl> - set ( $ { _exportSymbolVar } $ { _exportSymbol } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_compile_definitions _config _language _target _definitionsVar ) <nl> - string ( TOUPPER " $ { _config } " _upperConfig ) <nl> - set ( _configDefinitions " " ) <nl> - # CMAKE_INTDIR for multi - configuration build systems <nl> - if ( NOT " $ { CMAKE_CFG_INTDIR } " STREQUAL " . " ) <nl> - list ( APPEND _configDefinitions " CMAKE_INTDIR = \ " $ { _config } \ " " ) <nl> - endif ( ) <nl> - # target export define symbol <nl> - cotire_get_target_export_symbol ( " $ { _target } " _defineSymbol ) <nl> - if ( _defineSymbol ) <nl> - list ( APPEND _configDefinitions " $ { _defineSymbol } " ) <nl> - endif ( ) <nl> - # directory compile definitions <nl> - get_directory_property ( _definitions DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " COMPILE_DEFINITIONS ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - get_directory_property ( _definitions DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " COMPILE_DEFINITIONS_ $ { _upperConfig } ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - # target compile definitions <nl> - get_target_property ( _definitions $ { _target } COMPILE_DEFINITIONS ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - get_target_property ( _definitions $ { _target } COMPILE_DEFINITIONS_ $ { _upperConfig } ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - # interface compile definitions from linked library targets <nl> - set ( _linkedTargets " " ) <nl> - cotire_get_target_usage_requirements ( $ { _target } _linkedTargets ) <nl> - foreach ( _linkedTarget $ { _linkedTargets } ) <nl> - get_target_property ( _definitions $ { _linkedTarget } INTERFACE_COMPILE_DEFINITIONS ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - # parse additional compile definitions from target compile flags <nl> - # and don ' t look at directory compile definitions , which we already handled <nl> - set ( _targetFlags " " ) <nl> - cotire_get_target_compile_flags ( " $ { _config } " " $ { _language } " " $ { _target } " _targetFlags ) <nl> - cotire_filter_compile_flags ( " $ { _language } " " D " _definitions _ignore $ { _targetFlags } ) <nl> - if ( _definitions ) <nl> - list ( APPEND _configDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - list ( REMOVE_DUPLICATES _configDefinitions ) <nl> - if ( COTIRE_DEBUG AND _configDefinitions ) <nl> - message ( STATUS " Target $ { _target } compile definitions : $ { _configDefinitions } " ) <nl> - endif ( ) <nl> - set ( $ { _definitionsVar } $ { _configDefinitions } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_target_compiler_flags _config _language _target _compilerFlagsVar ) <nl> - # parse target compile flags omitting compile definitions and include directives <nl> - set ( _targetFlags " " ) <nl> - cotire_get_target_compile_flags ( " $ { _config } " " $ { _language } " " $ { _target } " _targetFlags ) <nl> - set ( _flagFilter " D " ) <nl> - if ( CMAKE_INCLUDE_FLAG_ $ { _language } ) <nl> - string ( STRIP " $ { CMAKE_INCLUDE_FLAG_ $ { _language } } " _includeFlag ) <nl> - string ( REGEX REPLACE " ^ [ - / ] + " " " _includeFlag " $ { _includeFlag } " ) <nl> - if ( _includeFlag ) <nl> - set ( _flagFilter " $ { _flagFilter } | $ { _includeFlag } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } ) <nl> - string ( STRIP " $ { CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } } " _includeFlag ) <nl> - string ( REGEX REPLACE " ^ [ - / ] + " " " _includeFlag " $ { _includeFlag } " ) <nl> - if ( _includeFlag ) <nl> - set ( _flagFilter " $ { _flagFilter } | $ { _includeFlag } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _compilerFlags " " ) <nl> - cotire_filter_compile_flags ( " $ { _language } " " $ { _flagFilter } " _ignore _compilerFlags $ { _targetFlags } ) <nl> - if ( COTIRE_DEBUG AND _compilerFlags ) <nl> - message ( STATUS " Target $ { _target } compiler flags : $ { _compilerFlags } " ) <nl> - endif ( ) <nl> - set ( $ { _compilerFlagsVar } $ { _compilerFlags } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_sys_root_paths _pathsVar ) <nl> - if ( APPLE ) <nl> - if ( CMAKE_OSX_SYSROOT AND CMAKE_ $ { _language } _HAS_ISYSROOT ) <nl> - foreach ( _path IN LISTS $ { _pathsVar } ) <nl> - if ( IS_ABSOLUTE " $ { _path } " ) <nl> - get_filename_component ( _path " $ { CMAKE_OSX_SYSROOT } / $ { _path } " ABSOLUTE ) <nl> - if ( EXISTS " $ { _path } " ) <nl> - list ( APPEND $ { _pathsVar } " $ { _path } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _pathsVar } $ { $ { _pathsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_extra_properties _sourceFile _pattern _resultVar ) <nl> - set ( _extraProperties $ { ARGN } ) <nl> - set ( _result " " ) <nl> - if ( _extraProperties ) <nl> - list ( FIND _extraProperties " $ { _sourceFile } " _index ) <nl> - if ( _index GREATER - 1 ) <nl> - math ( EXPR _index " $ { _index } + 1 " ) <nl> - list ( LENGTH _extraProperties _len ) <nl> - math ( EXPR _len " $ { _len } - 1 " ) <nl> - foreach ( _index RANGE $ { _index } $ { _len } ) <nl> - list ( GET _extraProperties $ { _index } _value ) <nl> - if ( _value MATCHES " $ { _pattern } " ) <nl> - list ( APPEND _result " $ { _value } " ) <nl> - else ( ) <nl> - break ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _resultVar } $ { _result } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_compile_definitions _config _language _sourceFile _definitionsVar ) <nl> - set ( _compileDefinitions " " ) <nl> - if ( NOT CMAKE_SCRIPT_MODE_FILE ) <nl> - string ( TOUPPER " $ { _config } " _upperConfig ) <nl> - get_source_file_property ( _definitions " $ { _sourceFile } " COMPILE_DEFINITIONS ) <nl> - if ( _definitions ) <nl> - list ( APPEND _compileDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - get_source_file_property ( _definitions " $ { _sourceFile } " COMPILE_DEFINITIONS_ $ { _upperConfig } ) <nl> - if ( _definitions ) <nl> - list ( APPEND _compileDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - cotire_get_source_extra_properties ( " $ { _sourceFile } " " ^ [ a - zA - Z0 - 9_ ] + ( = . * ) ? $ " _definitions $ { ARGN } ) <nl> - if ( _definitions ) <nl> - list ( APPEND _compileDefinitions $ { _definitions } ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _compileDefinitions ) <nl> - message ( STATUS " Source $ { _sourceFile } compile definitions : $ { _compileDefinitions } " ) <nl> - endif ( ) <nl> - set ( $ { _definitionsVar } $ { _compileDefinitions } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_files_compile_definitions _config _language _definitionsVar ) <nl> - set ( _configDefinitions " " ) <nl> - foreach ( _sourceFile $ { ARGN } ) <nl> - cotire_get_source_compile_definitions ( " $ { _config } " " $ { _language } " " $ { _sourceFile } " _sourceDefinitions ) <nl> - if ( _sourceDefinitions ) <nl> - list ( APPEND _configDefinitions " $ { _sourceFile } " $ { _sourceDefinitions } " - " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _definitionsVar } $ { _configDefinitions } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_undefs _sourceFile _property _sourceUndefsVar ) <nl> - set ( _sourceUndefs " " ) <nl> - if ( NOT CMAKE_SCRIPT_MODE_FILE ) <nl> - get_source_file_property ( _undefs " $ { _sourceFile } " $ { _property } ) <nl> - if ( _undefs ) <nl> - list ( APPEND _sourceUndefs $ { _undefs } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - cotire_get_source_extra_properties ( " $ { _sourceFile } " " ^ [ a - zA - Z0 - 9_ ] + $ " _undefs $ { ARGN } ) <nl> - if ( _undefs ) <nl> - list ( APPEND _sourceUndefs $ { _undefs } ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _sourceUndefs ) <nl> - message ( STATUS " Source $ { _sourceFile } $ { _property } undefs : $ { _sourceUndefs } " ) <nl> - endif ( ) <nl> - set ( $ { _sourceUndefsVar } $ { _sourceUndefs } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_source_files_undefs _property _sourceUndefsVar ) <nl> - set ( _sourceUndefs " " ) <nl> - foreach ( _sourceFile $ { ARGN } ) <nl> - cotire_get_source_undefs ( " $ { _sourceFile } " $ { _property } _undefs ) <nl> - if ( _undefs ) <nl> - list ( APPEND _sourceUndefs " $ { _sourceFile } " $ { _undefs } " - " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _sourceUndefsVar } $ { _sourceUndefs } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_set_cmd_to_prologue _cmdVar ) <nl> - set ( $ { _cmdVar } " $ { CMAKE_COMMAND } " ) <nl> - if ( COTIRE_DEBUG ) <nl> - list ( APPEND $ { _cmdVar } " - - warn - uninitialized " ) <nl> - endif ( ) <nl> - list ( APPEND $ { _cmdVar } " - DCOTIRE_BUILD_TYPE : STRING = $ < CONFIGURATION > " ) <nl> - if ( COTIRE_VERBOSE ) <nl> - list ( APPEND $ { _cmdVar } " - DCOTIRE_VERBOSE : BOOL = ON " ) <nl> - elseif ( " $ { CMAKE_GENERATOR } " MATCHES " Makefiles " ) <nl> - list ( APPEND $ { _cmdVar } " - DCOTIRE_VERBOSE : BOOL = $ ( VERBOSE ) " ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_init_compile_cmd _cmdVar _language _compilerLauncher _compilerExe _compilerArg1 ) <nl> - if ( NOT _compilerLauncher ) <nl> - set ( _compilerLauncher $ { CMAKE_ $ { _language } _COMPILER_LAUNCHER } ) <nl> - endif ( ) <nl> - if ( NOT _compilerExe ) <nl> - set ( _compilerExe " $ { CMAKE_ $ { _language } _COMPILER } " ) <nl> - endif ( ) <nl> - if ( NOT _compilerArg1 ) <nl> - set ( _compilerArg1 $ { CMAKE_ $ { _language } _COMPILER_ARG1 } ) <nl> - endif ( ) <nl> - string ( STRIP " $ { _compilerArg1 } " _compilerArg1 ) <nl> - if ( " $ { CMAKE_GENERATOR } " MATCHES " Make | Ninja " ) <nl> - # compiler launcher is only supported for Makefile and Ninja <nl> - set ( $ { _cmdVar } $ { _compilerLauncher } " $ { _compilerExe } " $ { _compilerArg1 } PARENT_SCOPE ) <nl> - else ( ) <nl> - set ( $ { _cmdVar } " $ { _compilerExe } " $ { _compilerArg1 } PARENT_SCOPE ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_add_definitions_to_cmd _cmdVar _language ) <nl> - foreach ( _definition $ { ARGN } ) <nl> - if ( WIN32 AND CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - list ( APPEND $ { _cmdVar } " / D $ { _definition } " ) <nl> - else ( ) <nl> - list ( APPEND $ { _cmdVar } " - D $ { _definition } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_add_includes_to_cmd _cmdVar _language _includesVar _systemIncludesVar ) <nl> - set ( _includeDirs $ { $ { _includesVar } } $ { $ { _systemIncludesVar } } ) <nl> - if ( _includeDirs ) <nl> - list ( REMOVE_DUPLICATES _includeDirs ) <nl> - foreach ( _include $ { _includeDirs } ) <nl> - if ( WIN32 AND CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - file ( TO_NATIVE_PATH " $ { _include } " _include ) <nl> - list ( APPEND $ { _cmdVar } " $ { CMAKE_INCLUDE_FLAG_ $ { _language } } $ { CMAKE_INCLUDE_FLAG_ $ { _language } _SEP } $ { _include } " ) <nl> - else ( ) <nl> - set ( _index - 1 ) <nl> - if ( " $ { CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } } " MATCHES " . + " ) <nl> - list ( FIND $ { _systemIncludesVar } " $ { _include } " _index ) <nl> - endif ( ) <nl> - if ( _index GREATER - 1 ) <nl> - list ( APPEND $ { _cmdVar } " $ { CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } } $ { _include } " ) <nl> - else ( ) <nl> - list ( APPEND $ { _cmdVar } " $ { CMAKE_INCLUDE_FLAG_ $ { _language } } $ { CMAKE_INCLUDE_FLAG_ $ { _language } _SEP } $ { _include } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - set ( $ { _cmdVar } $ { $ { _cmdVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_frameworks_to_cmd _cmdVar _language _includesVar _systemIncludesVar ) <nl> - if ( APPLE ) <nl> - set ( _frameworkDirs " " ) <nl> - foreach ( _include $ { $ { _includesVar } } ) <nl> - if ( IS_ABSOLUTE " $ { _include } " AND _include MATCHES " \ \ . framework $ " ) <nl> - get_filename_component ( _frameworkDir " $ { _include } " DIRECTORY ) <nl> - list ( APPEND _frameworkDirs " $ { _frameworkDir } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( _systemFrameworkDirs " " ) <nl> - foreach ( _include $ { $ { _systemIncludesVar } } ) <nl> - if ( IS_ABSOLUTE " $ { _include } " AND _include MATCHES " \ \ . framework $ " ) <nl> - get_filename_component ( _frameworkDir " $ { _include } " DIRECTORY ) <nl> - list ( APPEND _systemFrameworkDirs " $ { _frameworkDir } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _systemFrameworkDirs ) <nl> - list ( APPEND _frameworkDirs $ { _systemFrameworkDirs } ) <nl> - endif ( ) <nl> - if ( _frameworkDirs ) <nl> - list ( REMOVE_DUPLICATES _frameworkDirs ) <nl> - foreach ( _frameworkDir $ { _frameworkDirs } ) <nl> - set ( _index - 1 ) <nl> - if ( " $ { CMAKE_ $ { _language } _SYSTEM_FRAMEWORK_SEARCH_FLAG } " MATCHES " . + " ) <nl> - list ( FIND _systemFrameworkDirs " $ { _frameworkDir } " _index ) <nl> - endif ( ) <nl> - if ( _index GREATER - 1 ) <nl> - list ( APPEND $ { _cmdVar } " $ { CMAKE_ $ { _language } _SYSTEM_FRAMEWORK_SEARCH_FLAG } $ { _frameworkDir } " ) <nl> - else ( ) <nl> - list ( APPEND $ { _cmdVar } " $ { CMAKE_ $ { _language } _FRAMEWORK_SEARCH_FLAG } $ { _frameworkDir } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _cmdVar } $ { $ { _cmdVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_add_compile_flags_to_cmd _cmdVar ) <nl> - foreach ( _flag $ { ARGN } ) <nl> - list ( APPEND $ { _cmdVar } " $ { _flag } " ) <nl> - endforeach ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_check_file_up_to_date _fileIsUpToDateVar _file ) <nl> - if ( EXISTS " $ { _file } " ) <nl> - set ( _triggerFile " " ) <nl> - foreach ( _dependencyFile $ { ARGN } ) <nl> - if ( EXISTS " $ { _dependencyFile } " ) <nl> - # IS_NEWER_THAN returns TRUE if both files have the same timestamp <nl> - # thus we do the comparison in both directions to exclude ties <nl> - if ( " $ { _dependencyFile } " IS_NEWER_THAN " $ { _file } " AND <nl> - NOT " $ { _file } " IS_NEWER_THAN " $ { _dependencyFile } " ) <nl> - set ( _triggerFile " $ { _dependencyFile } " ) <nl> - break ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _triggerFile ) <nl> - if ( COTIRE_VERBOSE ) <nl> - get_filename_component ( _fileName " $ { _file } " NAME ) <nl> - message ( STATUS " $ { _fileName } update triggered by $ { _triggerFile } change . " ) <nl> - endif ( ) <nl> - set ( $ { _fileIsUpToDateVar } FALSE PARENT_SCOPE ) <nl> - else ( ) <nl> - if ( COTIRE_VERBOSE ) <nl> - get_filename_component ( _fileName " $ { _file } " NAME ) <nl> - message ( STATUS " $ { _fileName } is up - to - date . " ) <nl> - endif ( ) <nl> - set ( $ { _fileIsUpToDateVar } TRUE PARENT_SCOPE ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( COTIRE_VERBOSE ) <nl> - get_filename_component ( _fileName " $ { _file } " NAME ) <nl> - message ( STATUS " $ { _fileName } does not exist yet . " ) <nl> - endif ( ) <nl> - set ( $ { _fileIsUpToDateVar } FALSE PARENT_SCOPE ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_find_closest_relative_path _headerFile _includeDirs _relPathVar ) <nl> - set ( $ { _relPathVar } " " ) <nl> - foreach ( _includeDir $ { _includeDirs } ) <nl> - if ( IS_DIRECTORY " $ { _includeDir } " ) <nl> - file ( RELATIVE_PATH _relPath " $ { _includeDir } " " $ { _headerFile } " ) <nl> - if ( NOT IS_ABSOLUTE " $ { _relPath } " AND NOT " $ { _relPath } " MATCHES " ^ \ \ . \ \ . " ) <nl> - string ( LENGTH " $ { $ { _relPathVar } } " _closestLen ) <nl> - string ( LENGTH " $ { _relPath } " _relLen ) <nl> - if ( _closestLen EQUAL 0 OR _relLen LESS _closestLen ) <nl> - set ( $ { _relPathVar } " $ { _relPath } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - elseif ( " $ { _includeDir } " STREQUAL " $ { _headerFile } " ) <nl> - # if path matches exactly , return short non - empty string <nl> - set ( $ { _relPathVar } " 1 " ) <nl> - break ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_check_header_file_location _headerFile _insideIncludeDirs _outsideIncludeDirs _headerIsInside ) <nl> - # check header path against ignored and honored include directories <nl> - cotire_find_closest_relative_path ( " $ { _headerFile } " " $ { _insideIncludeDirs } " _insideRelPath ) <nl> - if ( _insideRelPath ) <nl> - # header is inside , but could be become outside if there is a shorter outside match <nl> - cotire_find_closest_relative_path ( " $ { _headerFile } " " $ { _outsideIncludeDirs } " _outsideRelPath ) <nl> - if ( _outsideRelPath ) <nl> - string ( LENGTH " $ { _insideRelPath } " _insideRelPathLen ) <nl> - string ( LENGTH " $ { _outsideRelPath } " _outsideRelPathLen ) <nl> - if ( _outsideRelPathLen LESS _insideRelPathLen ) <nl> - set ( $ { _headerIsInside } FALSE ) <nl> - else ( ) <nl> - set ( $ { _headerIsInside } TRUE ) <nl> - endif ( ) <nl> - else ( ) <nl> - set ( $ { _headerIsInside } TRUE ) <nl> - endif ( ) <nl> - else ( ) <nl> - # header is outside <nl> - set ( $ { _headerIsInside } FALSE ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_check_ignore_header_file_path _headerFile _headerIsIgnoredVar ) <nl> - if ( NOT EXISTS " $ { _headerFile } " ) <nl> - set ( $ { _headerIsIgnoredVar } TRUE ) <nl> - elseif ( IS_DIRECTORY " $ { _headerFile } " ) <nl> - set ( $ { _headerIsIgnoredVar } TRUE ) <nl> - elseif ( " $ { _headerFile } " MATCHES " \ \ . \ \ . | [ _ - ] fixed " AND " $ { _headerFile } " MATCHES " \ \ . h $ " ) <nl> - # heuristic : ignore C headers with embedded parent directory references or " - fixed " or " _fixed " in path <nl> - # these often stem from using GCC # include_next tricks , which may break the precompiled header compilation <nl> - # with the error message " error : no include path in which to search for header . h " <nl> - set ( $ { _headerIsIgnoredVar } TRUE ) <nl> - else ( ) <nl> - set ( $ { _headerIsIgnoredVar } FALSE ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_check_ignore_header_file_ext _headerFile _ignoreExtensionsVar _headerIsIgnoredVar ) <nl> - # check header file extension <nl> - cotire_get_source_file_extension ( " $ { _headerFile } " _headerFileExt ) <nl> - set ( $ { _headerIsIgnoredVar } FALSE ) <nl> - if ( _headerFileExt ) <nl> - list ( FIND $ { _ignoreExtensionsVar } " $ { _headerFileExt } " _index ) <nl> - if ( _index GREATER - 1 ) <nl> - set ( $ { _headerIsIgnoredVar } TRUE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_parse_line _line _headerFileVar _headerDepthVar ) <nl> - if ( MSVC ) <nl> - # cl . exe / showIncludes output looks different depending on the language pack used , e . g . : <nl> - # English : " Note : including file : C : \ directory \ file " <nl> - # German : " Hinweis : Einlesen der Datei : C : \ directory \ file " <nl> - # We use a very general regular expression , relying on the presence of the : characters <nl> - if ( _line MATCHES " ( + ) ( [ a - zA - Z ] : [ ^ : ] + ) $ " ) <nl> - # Visual Studio compiler output <nl> - string ( LENGTH " $ { CMAKE_MATCH_1 } " $ { _headerDepthVar } ) <nl> - get_filename_component ( $ { _headerFileVar } " $ { CMAKE_MATCH_2 } " ABSOLUTE ) <nl> - else ( ) <nl> - set ( $ { _headerFileVar } " " ) <nl> - set ( $ { _headerDepthVar } 0 ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( _line MATCHES " ^ ( \ \ . + ) ( . * ) $ " ) <nl> - # GCC like output <nl> - string ( LENGTH " $ { CMAKE_MATCH_1 } " $ { _headerDepthVar } ) <nl> - if ( IS_ABSOLUTE " $ { CMAKE_MATCH_2 } " ) <nl> - set ( $ { _headerFileVar } " $ { CMAKE_MATCH_2 } " ) <nl> - else ( ) <nl> - get_filename_component ( $ { _headerFileVar } " $ { CMAKE_MATCH_2 } " REALPATH ) <nl> - endif ( ) <nl> - else ( ) <nl> - set ( $ { _headerFileVar } " " ) <nl> - set ( $ { _headerDepthVar } 0 ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_parse_includes _language _scanOutput _ignoredIncludeDirs _honoredIncludeDirs _ignoredExtensions _selectedIncludesVar _unparsedLinesVar ) <nl> - if ( WIN32 ) <nl> - # prevent CMake macro invocation errors due to backslash characters in Windows paths <nl> - string ( REPLACE " \ \ " " / " _scanOutput " $ { _scanOutput } " ) <nl> - endif ( ) <nl> - # canonize slashes <nl> - string ( REPLACE " / / " " / " _scanOutput " $ { _scanOutput } " ) <nl> - # prevent semicolon from being interpreted as a line separator <nl> - string ( REPLACE " ; " " \ \ ; " _scanOutput " $ { _scanOutput } " ) <nl> - # then separate lines <nl> - string ( REGEX REPLACE " \ n " " ; " _scanOutput " $ { _scanOutput } " ) <nl> - list ( LENGTH _scanOutput _len ) <nl> - # remove duplicate lines to speed up parsing <nl> - list ( REMOVE_DUPLICATES _scanOutput ) <nl> - list ( LENGTH _scanOutput _uniqueLen ) <nl> - if ( COTIRE_VERBOSE OR COTIRE_DEBUG ) <nl> - message ( STATUS " Scanning $ { _uniqueLen } unique lines of $ { _len } for includes " ) <nl> - if ( _ignoredExtensions ) <nl> - message ( STATUS " Ignored extensions : $ { _ignoredExtensions } " ) <nl> - endif ( ) <nl> - if ( _ignoredIncludeDirs ) <nl> - message ( STATUS " Ignored paths : $ { _ignoredIncludeDirs } " ) <nl> - endif ( ) <nl> - if ( _honoredIncludeDirs ) <nl> - message ( STATUS " Included paths : $ { _honoredIncludeDirs } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - set ( _selectedIncludes " " ) <nl> - set ( _unparsedLines " " ) <nl> - # stack keeps track of inside / outside project status of processed header files <nl> - set ( _headerIsInsideStack " " ) <nl> - foreach ( _line IN LISTS _scanOutput ) <nl> - if ( _line ) <nl> - cotire_parse_line ( " $ { _line } " _headerFile _headerDepth ) <nl> - if ( _headerFile ) <nl> - cotire_check_header_file_location ( " $ { _headerFile } " " $ { _ignoredIncludeDirs } " " $ { _honoredIncludeDirs } " _headerIsInside ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _headerDepth } : $ { _headerFile } $ { _headerIsInside } " ) <nl> - endif ( ) <nl> - # update stack <nl> - list ( LENGTH _headerIsInsideStack _stackLen ) <nl> - if ( _headerDepth GREATER _stackLen ) <nl> - math ( EXPR _stackLen " $ { _stackLen } + 1 " ) <nl> - foreach ( _index RANGE $ { _stackLen } $ { _headerDepth } ) <nl> - list ( APPEND _headerIsInsideStack $ { _headerIsInside } ) <nl> - endforeach ( ) <nl> - else ( ) <nl> - foreach ( _index RANGE $ { _headerDepth } $ { _stackLen } ) <nl> - list ( REMOVE_AT _headerIsInsideStack - 1 ) <nl> - endforeach ( ) <nl> - list ( APPEND _headerIsInsideStack $ { _headerIsInside } ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _headerIsInsideStack } " ) <nl> - endif ( ) <nl> - # header is a candidate if it is outside project <nl> - if ( NOT _headerIsInside ) <nl> - # get parent header file ' s inside / outside status <nl> - if ( _headerDepth GREATER 1 ) <nl> - math ( EXPR _index " $ { _headerDepth } - 2 " ) <nl> - list ( GET _headerIsInsideStack $ { _index } _parentHeaderIsInside ) <nl> - else ( ) <nl> - set ( _parentHeaderIsInside TRUE ) <nl> - endif ( ) <nl> - # select header file if parent header file is inside project <nl> - # ( e . g . , a project header file that includes a standard header file ) <nl> - if ( _parentHeaderIsInside ) <nl> - cotire_check_ignore_header_file_path ( " $ { _headerFile } " _headerIsIgnored ) <nl> - if ( NOT _headerIsIgnored ) <nl> - cotire_check_ignore_header_file_ext ( " $ { _headerFile } " _ignoredExtensions _headerIsIgnored ) <nl> - if ( NOT _headerIsIgnored ) <nl> - list ( APPEND _selectedIncludes " $ { _headerFile } " ) <nl> - else ( ) <nl> - # fix header ' s inside status on stack , it is ignored by extension now <nl> - list ( REMOVE_AT _headerIsInsideStack - 1 ) <nl> - list ( APPEND _headerIsInsideStack TRUE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _headerFile } $ { _ignoredExtensions } $ { _headerIsIgnored } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( MSVC ) <nl> - # for cl . exe do not keep unparsed lines which solely consist of a source file name <nl> - string ( FIND " $ { _sourceFiles } " " $ { _line } " _index ) <nl> - if ( _index LESS 0 ) <nl> - list ( APPEND _unparsedLines " $ { _line } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - list ( APPEND _unparsedLines " $ { _line } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - list ( REMOVE_DUPLICATES _selectedIncludes ) <nl> - set ( $ { _selectedIncludesVar } $ { _selectedIncludes } PARENT_SCOPE ) <nl> - set ( $ { _unparsedLinesVar } $ { _unparsedLines } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_scan_includes _includesVar ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs COMPILER_ID COMPILER_EXECUTABLE COMPILER_ARG1 COMPILER_VERSION LANGUAGE UNPARSED_LINES SCAN_RESULT ) <nl> - set ( _multiValueArgs COMPILE_DEFINITIONS COMPILE_FLAGS INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES <nl> - IGNORE_PATH INCLUDE_PATH IGNORE_EXTENSIONS INCLUDE_PRIORITY_PATH COMPILER_LAUNCHER ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - set ( _sourceFiles $ { _option_UNPARSED_ARGUMENTS } ) <nl> - if ( NOT _option_LANGUAGE ) <nl> - set ( _option_LANGUAGE " CXX " ) <nl> - endif ( ) <nl> - if ( NOT _option_COMPILER_ID ) <nl> - set ( _option_COMPILER_ID " $ { CMAKE_ $ { _option_LANGUAGE } _ID } " ) <nl> - endif ( ) <nl> - if ( NOT _option_COMPILER_VERSION ) <nl> - set ( _option_COMPILER_VERSION " $ { CMAKE_ $ { _option_LANGUAGE } _COMPILER_VERSION } " ) <nl> - endif ( ) <nl> - cotire_init_compile_cmd ( _cmd " $ { _option_LANGUAGE } " " $ { _option_COMPILER_LAUNCHER } " " $ { _option_COMPILER_EXECUTABLE } " " $ { _option_COMPILER_ARG1 } " ) <nl> - cotire_add_definitions_to_cmd ( _cmd " $ { _option_LANGUAGE } " $ { _option_COMPILE_DEFINITIONS } ) <nl> - cotire_add_compile_flags_to_cmd ( _cmd $ { _option_COMPILE_FLAGS } ) <nl> - cotire_add_includes_to_cmd ( _cmd " $ { _option_LANGUAGE } " _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - cotire_add_frameworks_to_cmd ( _cmd " $ { _option_LANGUAGE } " _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - cotire_add_makedep_flags ( " $ { _option_LANGUAGE } " " $ { _option_COMPILER_ID } " " $ { _option_COMPILER_VERSION } " _cmd ) <nl> - # only consider existing source files for scanning <nl> - set ( _existingSourceFiles " " ) <nl> - foreach ( _sourceFile $ { _sourceFiles } ) <nl> - if ( EXISTS " $ { _sourceFile } " ) <nl> - list ( APPEND _existingSourceFiles " $ { _sourceFile } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( NOT _existingSourceFiles ) <nl> - set ( $ { _includesVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - list ( APPEND _cmd $ { _existingSourceFiles } ) <nl> - if ( COTIRE_VERBOSE ) <nl> - message ( STATUS " execute_process : $ { _cmd } " ) <nl> - endif ( ) <nl> - if ( _option_COMPILER_ID MATCHES " MSVC " ) <nl> - # cl . exe messes with the output streams unless the environment variable VS_UNICODE_OUTPUT is cleared <nl> - unset ( ENV { VS_UNICODE_OUTPUT } ) <nl> - endif ( ) <nl> - execute_process ( <nl> - COMMAND $ { _cmd } <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - RESULT_VARIABLE _result <nl> - OUTPUT_QUIET <nl> - ERROR_VARIABLE _output ) <nl> - if ( _result ) <nl> - message ( STATUS " Result $ { _result } scanning includes of $ { _existingSourceFiles } . " ) <nl> - endif ( ) <nl> - cotire_parse_includes ( <nl> - " $ { _option_LANGUAGE } " " $ { _output } " <nl> - " $ { _option_IGNORE_PATH } " " $ { _option_INCLUDE_PATH } " <nl> - " $ { _option_IGNORE_EXTENSIONS } " <nl> - _includes _unparsedLines <nl> - $ { _sourceFiles } ) <nl> - if ( _option_INCLUDE_PRIORITY_PATH ) <nl> - set ( _sortedIncludes " " ) <nl> - foreach ( _priorityPath $ { _option_INCLUDE_PRIORITY_PATH } ) <nl> - foreach ( _include $ { _includes } ) <nl> - string ( FIND $ { _include } $ { _priorityPath } _position ) <nl> - if ( _position GREATER - 1 ) <nl> - list ( APPEND _sortedIncludes $ { _include } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endforeach ( ) <nl> - if ( _sortedIncludes ) <nl> - list ( INSERT _includes 0 $ { _sortedIncludes } ) <nl> - list ( REMOVE_DUPLICATES _includes ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _includesVar } $ { _includes } PARENT_SCOPE ) <nl> - if ( _option_UNPARSED_LINES ) <nl> - set ( $ { _option_UNPARSED_LINES } $ { _unparsedLines } PARENT_SCOPE ) <nl> - endif ( ) <nl> - if ( _option_SCAN_RESULT ) <nl> - set ( $ { _option_SCAN_RESULT } $ { _result } PARENT_SCOPE ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_append_undefs _contentsVar ) <nl> - set ( _undefs $ { ARGN } ) <nl> - if ( _undefs ) <nl> - list ( REMOVE_DUPLICATES _undefs ) <nl> - foreach ( _definition $ { _undefs } ) <nl> - list ( APPEND $ { _contentsVar } " # undef $ { _definition } " ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_comment_str _language _commentText _commentVar ) <nl> - if ( " $ { _language } " STREQUAL " CMAKE " ) <nl> - set ( $ { _commentVar } " # $ { _commentText } " ) <nl> - else ( ) <nl> - set ( $ { _commentVar } " / * $ { _commentText } * / " ) <nl> - endif ( ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_write_file _language _file _contents _force ) <nl> - get_filename_component ( _moduleName " $ { COTIRE_CMAKE_MODULE_FILE } " NAME ) <nl> - cotire_comment_str ( " $ { _language } " " $ { _moduleName } $ { COTIRE_CMAKE_MODULE_VERSION } generated file " _header1 ) <nl> - cotire_comment_str ( " $ { _language } " " $ { _file } " _header2 ) <nl> - set ( _contents " $ { _header1 } \ n $ { _header2 } \ n $ { _contents } " ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _contents } " ) <nl> - endif ( ) <nl> - if ( _force OR NOT EXISTS " $ { _file } " ) <nl> - file ( WRITE " $ { _file } " " $ { _contents } " ) <nl> - else ( ) <nl> - file ( READ " $ { _file } " _oldContents ) <nl> - if ( NOT " $ { _oldContents } " STREQUAL " $ { _contents } " ) <nl> - file ( WRITE " $ { _file } " " $ { _contents } " ) <nl> - else ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _file } unchanged " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_generate_unity_source _unityFile ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs LANGUAGE ) <nl> - set ( _multiValueArgs <nl> - DEPENDS SOURCES_COMPILE_DEFINITIONS <nl> - PRE_UNDEFS SOURCES_PRE_UNDEFS POST_UNDEFS SOURCES_POST_UNDEFS PROLOGUE EPILOGUE ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - if ( _option_DEPENDS ) <nl> - cotire_check_file_up_to_date ( _unityFileIsUpToDate " $ { _unityFile } " $ { _option_DEPENDS } ) <nl> - if ( _unityFileIsUpToDate ) <nl> - return ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _sourceFiles $ { _option_UNPARSED_ARGUMENTS } ) <nl> - if ( NOT _option_PRE_UNDEFS ) <nl> - set ( _option_PRE_UNDEFS " " ) <nl> - endif ( ) <nl> - if ( NOT _option_SOURCES_PRE_UNDEFS ) <nl> - set ( _option_SOURCES_PRE_UNDEFS " " ) <nl> - endif ( ) <nl> - if ( NOT _option_POST_UNDEFS ) <nl> - set ( _option_POST_UNDEFS " " ) <nl> - endif ( ) <nl> - if ( NOT _option_SOURCES_POST_UNDEFS ) <nl> - set ( _option_SOURCES_POST_UNDEFS " " ) <nl> - endif ( ) <nl> - set ( _contents " " ) <nl> - if ( _option_PROLOGUE ) <nl> - list ( APPEND _contents $ { _option_PROLOGUE } ) <nl> - endif ( ) <nl> - if ( _option_LANGUAGE AND _sourceFiles ) <nl> - if ( " $ { _option_LANGUAGE } " STREQUAL " CXX " ) <nl> - list ( APPEND _contents " # ifdef __cplusplus " ) <nl> - elseif ( " $ { _option_LANGUAGE } " STREQUAL " C " ) <nl> - list ( APPEND _contents " # ifndef __cplusplus " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _compileUndefinitions " " ) <nl> - foreach ( _sourceFile $ { _sourceFiles } ) <nl> - cotire_get_source_compile_definitions ( <nl> - " $ { _option_CONFIGURATION } " " $ { _option_LANGUAGE } " " $ { _sourceFile } " _compileDefinitions <nl> - $ { _option_SOURCES_COMPILE_DEFINITIONS } ) <nl> - cotire_get_source_undefs ( " $ { _sourceFile } " COTIRE_UNITY_SOURCE_PRE_UNDEFS _sourcePreUndefs $ { _option_SOURCES_PRE_UNDEFS } ) <nl> - cotire_get_source_undefs ( " $ { _sourceFile } " COTIRE_UNITY_SOURCE_POST_UNDEFS _sourcePostUndefs $ { _option_SOURCES_POST_UNDEFS } ) <nl> - if ( _option_PRE_UNDEFS ) <nl> - list ( APPEND _compileUndefinitions $ { _option_PRE_UNDEFS } ) <nl> - endif ( ) <nl> - if ( _sourcePreUndefs ) <nl> - list ( APPEND _compileUndefinitions $ { _sourcePreUndefs } ) <nl> - endif ( ) <nl> - if ( _compileUndefinitions ) <nl> - cotire_append_undefs ( _contents $ { _compileUndefinitions } ) <nl> - set ( _compileUndefinitions " " ) <nl> - endif ( ) <nl> - if ( _sourcePostUndefs ) <nl> - list ( APPEND _compileUndefinitions $ { _sourcePostUndefs } ) <nl> - endif ( ) <nl> - if ( _option_POST_UNDEFS ) <nl> - list ( APPEND _compileUndefinitions $ { _option_POST_UNDEFS } ) <nl> - endif ( ) <nl> - foreach ( _definition $ { _compileDefinitions } ) <nl> - if ( _definition MATCHES " ^ ( [ a - zA - Z0 - 9_ ] + ) = ( . + ) $ " ) <nl> - list ( APPEND _contents " # define $ { CMAKE_MATCH_1 } $ { CMAKE_MATCH_2 } " ) <nl> - list ( INSERT _compileUndefinitions 0 " $ { CMAKE_MATCH_1 } " ) <nl> - else ( ) <nl> - list ( APPEND _contents " # define $ { _definition } " ) <nl> - list ( INSERT _compileUndefinitions 0 " $ { _definition } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - # use absolute path as source file location <nl> - get_filename_component ( _sourceFileLocation " $ { _sourceFile } " ABSOLUTE ) <nl> - if ( WIN32 ) <nl> - file ( TO_NATIVE_PATH " $ { _sourceFileLocation } " _sourceFileLocation ) <nl> - endif ( ) <nl> - list ( APPEND _contents " # include \ " $ { _sourceFileLocation } \ " " ) <nl> - endforeach ( ) <nl> - if ( _compileUndefinitions ) <nl> - cotire_append_undefs ( _contents $ { _compileUndefinitions } ) <nl> - set ( _compileUndefinitions " " ) <nl> - endif ( ) <nl> - if ( _option_LANGUAGE AND _sourceFiles ) <nl> - list ( APPEND _contents " # endif " ) <nl> - endif ( ) <nl> - if ( _option_EPILOGUE ) <nl> - list ( APPEND _contents $ { _option_EPILOGUE } ) <nl> - endif ( ) <nl> - list ( APPEND _contents " " ) <nl> - string ( REPLACE " ; " " \ n " _contents " $ { _contents } " ) <nl> - if ( COTIRE_VERBOSE ) <nl> - message ( " $ { _contents } " ) <nl> - endif ( ) <nl> - cotire_write_file ( " $ { _option_LANGUAGE } " " $ { _unityFile } " " $ { _contents } " TRUE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_generate_prefix_header _prefixFile ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs LANGUAGE COMPILER_EXECUTABLE COMPILER_ARG1 COMPILER_ID COMPILER_VERSION ) <nl> - set ( _multiValueArgs DEPENDS COMPILE_DEFINITIONS COMPILE_FLAGS <nl> - INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES IGNORE_PATH INCLUDE_PATH <nl> - IGNORE_EXTENSIONS INCLUDE_PRIORITY_PATH COMPILER_LAUNCHER ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - if ( NOT _option_COMPILER_ID ) <nl> - set ( _option_COMPILER_ID " $ { CMAKE_ $ { _option_LANGUAGE } _ID } " ) <nl> - endif ( ) <nl> - if ( NOT _option_COMPILER_VERSION ) <nl> - set ( _option_COMPILER_VERSION " $ { CMAKE_ $ { _option_LANGUAGE } _COMPILER_VERSION } " ) <nl> - endif ( ) <nl> - if ( _option_DEPENDS ) <nl> - cotire_check_file_up_to_date ( _prefixFileIsUpToDate " $ { _prefixFile } " $ { _option_DEPENDS } ) <nl> - if ( _prefixFileIsUpToDate ) <nl> - # create empty log file <nl> - set ( _unparsedLinesFile " $ { _prefixFile } . log " ) <nl> - file ( WRITE " $ { _unparsedLinesFile } " " " ) <nl> - return ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _prologue " " ) <nl> - set ( _epilogue " " ) <nl> - if ( _option_COMPILER_ID MATCHES " Clang " ) <nl> - set ( _prologue " # pragma clang system_header " ) <nl> - elseif ( _option_COMPILER_ID MATCHES " GNU " ) <nl> - set ( _prologue " # pragma GCC system_header " ) <nl> - elseif ( _option_COMPILER_ID MATCHES " MSVC " ) <nl> - set ( _prologue " # pragma warning ( push , 0 ) " ) <nl> - set ( _epilogue " # pragma warning ( pop ) " ) <nl> - elseif ( _option_COMPILER_ID MATCHES " Intel " ) <nl> - # Intel compiler requires hdrstop pragma to stop generating PCH file <nl> - set ( _epilogue " # pragma hdrstop " ) <nl> - endif ( ) <nl> - set ( _sourceFiles $ { _option_UNPARSED_ARGUMENTS } ) <nl> - cotire_scan_includes ( _selectedHeaders $ { _sourceFiles } <nl> - LANGUAGE " $ { _option_LANGUAGE } " <nl> - COMPILER_LAUNCHER " $ { _option_COMPILER_LAUNCHER } " <nl> - COMPILER_EXECUTABLE " $ { _option_COMPILER_EXECUTABLE } " <nl> - COMPILER_ARG1 " $ { _option_COMPILER_ARG1 } " <nl> - COMPILER_ID " $ { _option_COMPILER_ID } " <nl> - COMPILER_VERSION " $ { _option_COMPILER_VERSION } " <nl> - COMPILE_DEFINITIONS $ { _option_COMPILE_DEFINITIONS } <nl> - COMPILE_FLAGS $ { _option_COMPILE_FLAGS } <nl> - INCLUDE_DIRECTORIES $ { _option_INCLUDE_DIRECTORIES } <nl> - SYSTEM_INCLUDE_DIRECTORIES $ { _option_SYSTEM_INCLUDE_DIRECTORIES } <nl> - IGNORE_PATH $ { _option_IGNORE_PATH } <nl> - INCLUDE_PATH $ { _option_INCLUDE_PATH } <nl> - IGNORE_EXTENSIONS $ { _option_IGNORE_EXTENSIONS } <nl> - INCLUDE_PRIORITY_PATH $ { _option_INCLUDE_PRIORITY_PATH } <nl> - UNPARSED_LINES _unparsedLines <nl> - SCAN_RESULT _scanResult ) <nl> - cotire_generate_unity_source ( " $ { _prefixFile } " <nl> - PROLOGUE $ { _prologue } EPILOGUE $ { _epilogue } LANGUAGE " $ { _option_LANGUAGE } " $ { _selectedHeaders } ) <nl> - set ( _unparsedLinesFile " $ { _prefixFile } . log " ) <nl> - if ( _unparsedLines ) <nl> - if ( COTIRE_VERBOSE OR _scanResult OR NOT _selectedHeaders ) <nl> - list ( LENGTH _unparsedLines _skippedLineCount ) <nl> - message ( STATUS " $ { _skippedLineCount } line ( s ) skipped , see $ { _unparsedLinesFile } " ) <nl> - endif ( ) <nl> - string ( REPLACE " ; " " \ n " _unparsedLines " $ { _unparsedLines } " ) <nl> - endif ( ) <nl> - file ( WRITE " $ { _unparsedLinesFile } " " $ { _unparsedLines } " ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_makedep_flags _language _compilerID _compilerVersion _flagsVar ) <nl> - set ( _flags $ { $ { _flagsVar } } ) <nl> - if ( _compilerID MATCHES " MSVC " ) <nl> - # cl . exe options used <nl> - # / nologo suppresses display of sign - on banner <nl> - # / TC treat all files named on the command line as C source files <nl> - # / TP treat all files named on the command line as C + + source files <nl> - # / EP preprocess to stdout without # line directives <nl> - # / showIncludes list include files <nl> - set ( _sourceFileTypeC " / TC " ) <nl> - set ( _sourceFileTypeCXX " / TP " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags / nologo " $ { _sourceFileType $ { _language } } " / EP / showIncludes ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " $ { _sourceFileType $ { _language } } / EP / showIncludes " ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " GNU " ) <nl> - # GCC options used <nl> - # - H print the name of each header file used <nl> - # - E invoke preprocessor <nl> - # - fdirectives - only do not expand macros , requires GCC > = 4 . 3 <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags - H - E ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 4 . 3 . 0 " ) <nl> - list ( APPEND _flags " - fdirectives - only " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - H - E " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 4 . 3 . 0 " ) <nl> - set ( _flags " $ { _flags } - fdirectives - only " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " Clang " ) <nl> - # Clang options used <nl> - # - H print the name of each header file used <nl> - # - E invoke preprocessor <nl> - # - fno - color - diagnostics don ' t prints diagnostics in color <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags - H - E - fno - color - diagnostics ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - H - E - fno - color - diagnostics " ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " Intel " ) <nl> - if ( WIN32 ) <nl> - # Windows Intel options used <nl> - # / nologo do not display compiler version information <nl> - # / QH display the include file order <nl> - # / EP preprocess to stdout , omitting # line directives <nl> - # / TC process all source or unrecognized file types as C source files <nl> - # / TP process all source or unrecognized file types as C + + source files <nl> - set ( _sourceFileTypeC " / TC " ) <nl> - set ( _sourceFileTypeCXX " / TP " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags / nologo " $ { _sourceFileType $ { _language } } " / EP / QH ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " $ { _sourceFileType $ { _language } } / EP / QH " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # Linux / Mac OS X Intel options used <nl> - # - H print the name of each header file used <nl> - # - EP preprocess to stdout , omitting # line directives <nl> - # - Kc + + process all source or unrecognized file types as C + + source files <nl> - if ( _flags ) <nl> - # append to list <nl> - if ( " $ { _language } " STREQUAL " CXX " ) <nl> - list ( APPEND _flags - Kc + + ) <nl> - endif ( ) <nl> - list ( APPEND _flags - H - EP ) <nl> - else ( ) <nl> - # return as a flag string <nl> - if ( " $ { _language } " STREQUAL " CXX " ) <nl> - set ( _flags " - Kc + + " ) <nl> - endif ( ) <nl> - set ( _flags " $ { _flags } - H - EP " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - message ( FATAL_ERROR " cotire : unsupported $ { _language } compiler $ { _compilerID } version $ { _compilerVersion } . " ) <nl> - endif ( ) <nl> - set ( $ { _flagsVar } $ { _flags } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_pch_compilation_flags _language _compilerID _compilerVersion _prefixFile _pchFile _hostFile _flagsVar ) <nl> - set ( _flags $ { $ { _flagsVar } } ) <nl> - if ( _compilerID MATCHES " MSVC " ) <nl> - file ( TO_NATIVE_PATH " $ { _prefixFile } " _prefixFileNative ) <nl> - file ( TO_NATIVE_PATH " $ { _pchFile } " _pchFileNative ) <nl> - file ( TO_NATIVE_PATH " $ { _hostFile } " _hostFileNative ) <nl> - # cl . exe options used <nl> - # / Yc creates a precompiled header file <nl> - # / Fp specifies precompiled header binary file name <nl> - # / FI forces inclusion of file <nl> - # / TC treat all files named on the command line as C source files <nl> - # / TP treat all files named on the command line as C + + source files <nl> - # / Zs syntax check only <nl> - # / Zm precompiled header memory allocation scaling factor <nl> - set ( _sourceFileTypeC " / TC " ) <nl> - set ( _sourceFileTypeCXX " / TP " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags / nologo " $ { _sourceFileType $ { _language } } " <nl> - " / Yc $ { _prefixFileNative } " " / Fp $ { _pchFileNative } " " / FI $ { _prefixFileNative } " / Zs " $ { _hostFileNative } " ) <nl> - if ( COTIRE_PCH_MEMORY_SCALING_FACTOR ) <nl> - list ( APPEND _flags " / Zm $ { COTIRE_PCH_MEMORY_SCALING_FACTOR } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / Yc \ " $ { _prefixFileNative } \ " / Fp \ " $ { _pchFileNative } \ " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - if ( COTIRE_PCH_MEMORY_SCALING_FACTOR ) <nl> - set ( _flags " $ { _flags } / Zm $ { COTIRE_PCH_MEMORY_SCALING_FACTOR } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " GNU | Clang " ) <nl> - # GCC / Clang options used <nl> - # - x specify the source language <nl> - # - c compile but do not link <nl> - # - o place output in file <nl> - # note that we cannot use - w to suppress all warnings upon pre - compiling , because turning off a warning may <nl> - # alter compile flags as a side effect ( e . g . , - Wwrite - string implies - fconst - strings ) <nl> - set ( _xLanguage_C " c - header " ) <nl> - set ( _xLanguage_CXX " c + + - header " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " - x " " $ { _xLanguage_ $ { _language } } " " - c " " $ { _prefixFile } " - o " $ { _pchFile } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - x $ { _xLanguage_ $ { _language } } - c \ " $ { _prefixFile } \ " - o \ " $ { _pchFile } \ " " ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " Intel " ) <nl> - if ( WIN32 ) <nl> - file ( TO_NATIVE_PATH " $ { _prefixFile } " _prefixFileNative ) <nl> - file ( TO_NATIVE_PATH " $ { _pchFile } " _pchFileNative ) <nl> - file ( TO_NATIVE_PATH " $ { _hostFile } " _hostFileNative ) <nl> - # Windows Intel options used <nl> - # / nologo do not display compiler version information <nl> - # / Yc create a precompiled header ( PCH ) file <nl> - # / Fp specify a path or file name for precompiled header files <nl> - # / FI tells the preprocessor to include a specified file name as the header file <nl> - # / TC process all source or unrecognized file types as C source files <nl> - # / TP process all source or unrecognized file types as C + + source files <nl> - # / Zs syntax check only <nl> - # / Wpch - messages enable diagnostics related to pre - compiled headers ( requires Intel XE 2013 Update 2 ) <nl> - set ( _sourceFileTypeC " / TC " ) <nl> - set ( _sourceFileTypeCXX " / TP " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags / nologo " $ { _sourceFileType $ { _language } } " <nl> - " / Yc " " / Fp $ { _pchFileNative } " " / FI $ { _prefixFileNative } " / Zs " $ { _hostFileNative } " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - list ( APPEND _flags " / Wpch - messages " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / Yc / Fp \ " $ { _pchFileNative } \ " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - set ( _flags " $ { _flags } / Wpch - messages " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - # Linux / Mac OS X Intel options used <nl> - # - pch - dir location for precompiled header files <nl> - # - pch - create name of the precompiled header ( PCH ) to create <nl> - # - Kc + + process all source or unrecognized file types as C + + source files <nl> - # - fsyntax - only check only for correct syntax <nl> - # - Wpch - messages enable diagnostics related to pre - compiled headers ( requires Intel XE 2013 Update 2 ) <nl> - get_filename_component ( _pchDir " $ { _pchFile } " DIRECTORY ) <nl> - get_filename_component ( _pchName " $ { _pchFile } " NAME ) <nl> - set ( _xLanguage_C " c - header " ) <nl> - set ( _xLanguage_CXX " c + + - header " ) <nl> - if ( _flags ) <nl> - # append to list <nl> - if ( " $ { _language } " STREQUAL " CXX " ) <nl> - list ( APPEND _flags - Kc + + ) <nl> - endif ( ) <nl> - list ( APPEND _flags " - include " " $ { _prefixFile } " " - pch - dir " " $ { _pchDir } " " - pch - create " " $ { _pchName } " " - fsyntax - only " " $ { _hostFile } " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - list ( APPEND _flags " - Wpch - messages " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - include \ " $ { _prefixFile } \ " - pch - dir \ " $ { _pchDir } \ " - pch - create \ " $ { _pchName } \ " " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - set ( _flags " $ { _flags } - Wpch - messages " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - message ( FATAL_ERROR " cotire : unsupported $ { _language } compiler $ { _compilerID } version $ { _compilerVersion } . " ) <nl> - endif ( ) <nl> - set ( $ { _flagsVar } $ { _flags } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_prefix_pch_inclusion_flags _language _compilerID _compilerVersion _prefixFile _pchFile _flagsVar ) <nl> - set ( _flags $ { $ { _flagsVar } } ) <nl> - if ( _compilerID MATCHES " MSVC " ) <nl> - file ( TO_NATIVE_PATH " $ { _prefixFile } " _prefixFileNative ) <nl> - # cl . exe options used <nl> - # / Yu uses a precompiled header file during build <nl> - # / Fp specifies precompiled header binary file name <nl> - # / FI forces inclusion of file <nl> - # / Zm precompiled header memory allocation scaling factor <nl> - if ( _pchFile ) <nl> - file ( TO_NATIVE_PATH " $ { _pchFile } " _pchFileNative ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " / Yu $ { _prefixFileNative } " " / Fp $ { _pchFileNative } " " / FI $ { _prefixFileNative } " ) <nl> - if ( COTIRE_PCH_MEMORY_SCALING_FACTOR ) <nl> - list ( APPEND _flags " / Zm $ { COTIRE_PCH_MEMORY_SCALING_FACTOR } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / Yu \ " $ { _prefixFileNative } \ " / Fp \ " $ { _pchFileNative } \ " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - if ( COTIRE_PCH_MEMORY_SCALING_FACTOR ) <nl> - set ( _flags " $ { _flags } / Zm $ { COTIRE_PCH_MEMORY_SCALING_FACTOR } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - # no precompiled header , force inclusion of prefix header <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " / FI $ { _prefixFileNative } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " GNU " ) <nl> - # GCC options used <nl> - # - include process include file as the first line of the primary source file <nl> - # - Winvalid - pch warns if precompiled header is found but cannot be used <nl> - # note : ccache requires the - include flag to be used in order to process precompiled header correctly <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " - Winvalid - pch " " - include " " $ { _prefixFile } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - Winvalid - pch - include \ " $ { _prefixFile } \ " " ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " Clang " ) <nl> - # Clang options used <nl> - # - include process include file as the first line of the primary source file <nl> - # - include - pch include precompiled header file <nl> - # - Qunused - arguments don ' t emit warning for unused driver arguments <nl> - # note : ccache requires the - include flag to be used in order to process precompiled header correctly <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " - Qunused - arguments " " - include " " $ { _prefixFile } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - Qunused - arguments - include \ " $ { _prefixFile } \ " " ) <nl> - endif ( ) <nl> - elseif ( _compilerID MATCHES " Intel " ) <nl> - if ( WIN32 ) <nl> - file ( TO_NATIVE_PATH " $ { _prefixFile } " _prefixFileNative ) <nl> - # Windows Intel options used <nl> - # / Yu use a precompiled header ( PCH ) file <nl> - # / Fp specify a path or file name for precompiled header files <nl> - # / FI tells the preprocessor to include a specified file name as the header file <nl> - # / Wpch - messages enable diagnostics related to pre - compiled headers ( requires Intel XE 2013 Update 2 ) <nl> - if ( _pchFile ) <nl> - file ( TO_NATIVE_PATH " $ { _pchFile } " _pchFileNative ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " / Yu " " / Fp $ { _pchFileNative } " " / FI $ { _prefixFileNative } " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - list ( APPEND _flags " / Wpch - messages " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / Yu / Fp \ " $ { _pchFileNative } \ " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - set ( _flags " $ { _flags } / Wpch - messages " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - # no precompiled header , force inclusion of prefix header <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " / FI $ { _prefixFileNative } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " / FI \ " $ { _prefixFileNative } \ " " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - # Linux / Mac OS X Intel options used <nl> - # - pch - dir location for precompiled header files <nl> - # - pch - use name of the precompiled header ( PCH ) to use <nl> - # - include process include file as the first line of the primary source file <nl> - # - Wpch - messages enable diagnostics related to pre - compiled headers ( requires Intel XE 2013 Update 2 ) <nl> - if ( _pchFile ) <nl> - get_filename_component ( _pchDir " $ { _pchFile } " DIRECTORY ) <nl> - get_filename_component ( _pchName " $ { _pchFile } " NAME ) <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " - include " " $ { _prefixFile } " " - pch - dir " " $ { _pchDir } " " - pch - use " " $ { _pchName } " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - list ( APPEND _flags " - Wpch - messages " ) <nl> - endif ( ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - include \ " $ { _prefixFile } \ " - pch - dir \ " $ { _pchDir } \ " - pch - use \ " $ { _pchName } \ " " ) <nl> - if ( NOT " $ { _compilerVersion } " VERSION_LESS " 13 . 1 . 0 " ) <nl> - set ( _flags " $ { _flags } - Wpch - messages " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - # no precompiled header , force inclusion of prefix header <nl> - if ( _flags ) <nl> - # append to list <nl> - list ( APPEND _flags " - include " " $ { _prefixFile } " ) <nl> - else ( ) <nl> - # return as a flag string <nl> - set ( _flags " - include \ " $ { _prefixFile } \ " " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - else ( ) <nl> - message ( FATAL_ERROR " cotire : unsupported $ { _language } compiler $ { _compilerID } version $ { _compilerVersion } . " ) <nl> - endif ( ) <nl> - set ( $ { _flagsVar } $ { _flags } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_precompile_prefix_header _prefixFile _pchFile _hostFile ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs COMPILER_EXECUTABLE COMPILER_ARG1 COMPILER_ID COMPILER_VERSION LANGUAGE ) <nl> - set ( _multiValueArgs COMPILE_DEFINITIONS COMPILE_FLAGS INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES SYS COMPILER_LAUNCHER ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - if ( NOT _option_LANGUAGE ) <nl> - set ( _option_LANGUAGE " CXX " ) <nl> - endif ( ) <nl> - if ( NOT _option_COMPILER_ID ) <nl> - set ( _option_COMPILER_ID " $ { CMAKE_ $ { _option_LANGUAGE } _ID } " ) <nl> - endif ( ) <nl> - if ( NOT _option_COMPILER_VERSION ) <nl> - set ( _option_COMPILER_VERSION " $ { CMAKE_ $ { _option_LANGUAGE } _COMPILER_VERSION } " ) <nl> - endif ( ) <nl> - cotire_init_compile_cmd ( _cmd " $ { _option_LANGUAGE } " " $ { _option_COMPILER_LAUNCHER } " " $ { _option_COMPILER_EXECUTABLE } " " $ { _option_COMPILER_ARG1 } " ) <nl> - cotire_add_definitions_to_cmd ( _cmd " $ { _option_LANGUAGE } " $ { _option_COMPILE_DEFINITIONS } ) <nl> - cotire_add_compile_flags_to_cmd ( _cmd $ { _option_COMPILE_FLAGS } ) <nl> - cotire_add_includes_to_cmd ( _cmd " $ { _option_LANGUAGE } " _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - cotire_add_frameworks_to_cmd ( _cmd " $ { _option_LANGUAGE } " _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES ) <nl> - cotire_add_pch_compilation_flags ( <nl> - " $ { _option_LANGUAGE } " " $ { _option_COMPILER_ID } " " $ { _option_COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " " $ { _hostFile } " _cmd ) <nl> - if ( COTIRE_VERBOSE ) <nl> - message ( STATUS " execute_process : $ { _cmd } " ) <nl> - endif ( ) <nl> - if ( _option_COMPILER_ID MATCHES " MSVC " ) <nl> - # cl . exe messes with the output streams unless the environment variable VS_UNICODE_OUTPUT is cleared <nl> - unset ( ENV { VS_UNICODE_OUTPUT } ) <nl> - endif ( ) <nl> - execute_process ( <nl> - COMMAND $ { _cmd } <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - RESULT_VARIABLE _result ) <nl> - if ( _result ) <nl> - message ( FATAL_ERROR " cotire : error $ { _result } precompiling $ { _prefixFile } . " ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_check_precompiled_header_support _language _target _msgVar ) <nl> - set ( _unsupportedCompiler <nl> - " Precompiled headers not supported for $ { _language } compiler $ { CMAKE_ $ { _language } _COMPILER_ID } " ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC " ) <nl> - # supported since Visual Studio C + + 6 . 0 <nl> - # and CMake does not support an earlier version <nl> - set ( $ { _msgVar } " " PARENT_SCOPE ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU " ) <nl> - # GCC PCH support requires version > = 3 . 4 <nl> - if ( " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " VERSION_LESS " 3 . 4 . 0 " ) <nl> - set ( $ { _msgVar } " $ { _unsupportedCompiler } version $ { CMAKE_ $ { _language } _COMPILER_VERSION } . " PARENT_SCOPE ) <nl> - else ( ) <nl> - set ( $ { _msgVar } " " PARENT_SCOPE ) <nl> - endif ( ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " Clang " ) <nl> - # all Clang versions have PCH support <nl> - set ( $ { _msgVar } " " PARENT_SCOPE ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " Intel " ) <nl> - # Intel PCH support requires version > = 8 . 0 . 0 <nl> - if ( " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " VERSION_LESS " 8 . 0 . 0 " ) <nl> - set ( $ { _msgVar } " $ { _unsupportedCompiler } version $ { CMAKE_ $ { _language } _COMPILER_VERSION } . " PARENT_SCOPE ) <nl> - else ( ) <nl> - set ( $ { _msgVar } " " PARENT_SCOPE ) <nl> - endif ( ) <nl> - else ( ) <nl> - set ( $ { _msgVar } " $ { _unsupportedCompiler } . " PARENT_SCOPE ) <nl> - endif ( ) <nl> - get_target_property ( _launcher $ { _target } $ { _language } _COMPILER_LAUNCHER ) <nl> - if ( CMAKE_ $ { _language } _COMPILER MATCHES " ccache " OR _launcher MATCHES " ccache " ) <nl> - if ( DEFINED ENV { CCACHE_SLOPPINESS } ) <nl> - if ( NOT " $ ENV { CCACHE_SLOPPINESS } " MATCHES " pch_defines " OR NOT " $ ENV { CCACHE_SLOPPINESS } " MATCHES " time_macros " ) <nl> - set ( $ { _msgVar } <nl> - " ccache requires the environment variable CCACHE_SLOPPINESS to be set to \ " pch_defines , time_macros \ " . " <nl> - PARENT_SCOPE ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( _launcher MATCHES " ccache " ) <nl> - get_filename_component ( _ccacheExe " $ { _launcher } " REALPATH ) <nl> - else ( ) <nl> - get_filename_component ( _ccacheExe " $ { CMAKE_ $ { _language } _COMPILER } " REALPATH ) <nl> - endif ( ) <nl> - execute_process ( <nl> - COMMAND " $ { _ccacheExe } " " - - print - config " <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_BINARY_DIR } " <nl> - RESULT_VARIABLE _result <nl> - OUTPUT_VARIABLE _ccacheConfig OUTPUT_STRIP_TRAILING_WHITESPACE <nl> - ERROR_QUIET ) <nl> - if ( _result OR NOT <nl> - _ccacheConfig MATCHES " sloppiness . * = . * time_macros " OR NOT <nl> - _ccacheConfig MATCHES " sloppiness . * = . * pch_defines " ) <nl> - set ( $ { _msgVar } <nl> - " ccache requires configuration setting \ " sloppiness \ " to be set to \ " pch_defines , time_macros \ " . " <nl> - PARENT_SCOPE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( APPLE ) <nl> - # PCH compilation not supported by GCC / Clang for multi - architecture builds ( e . g . , i386 , x86_64 ) <nl> - cotire_get_configuration_types ( _configs ) <nl> - foreach ( _config $ { _configs } ) <nl> - set ( _targetFlags " " ) <nl> - cotire_get_target_compile_flags ( " $ { _config } " " $ { _language } " " $ { _target } " _targetFlags ) <nl> - cotire_filter_compile_flags ( " $ { _language } " " arch " _architectures _ignore $ { _targetFlags } ) <nl> - list ( LENGTH _architectures _numberOfArchitectures ) <nl> - if ( _numberOfArchitectures GREATER 1 ) <nl> - string ( REPLACE " ; " " , " _architectureStr " $ { _architectures } " ) <nl> - set ( $ { _msgVar } <nl> - " Precompiled headers not supported on Darwin for multi - architecture builds ( $ { _architectureStr } ) . " <nl> - PARENT_SCOPE ) <nl> - break ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - macro ( cotire_get_intermediate_dir _cotireDir ) <nl> - # $ { CMAKE_CFG_INTDIR } may reference a build - time variable when using a generator which supports configuration types <nl> - get_filename_component ( $ { _cotireDir } " $ { CMAKE_CURRENT_BINARY_DIR } / $ { CMAKE_CFG_INTDIR } / $ { COTIRE_INTDIR } " ABSOLUTE ) <nl> - endmacro ( ) <nl> - <nl> - macro ( cotire_setup_file_extension_variables ) <nl> - set ( _unityFileExt_C " . c " ) <nl> - set ( _unityFileExt_CXX " . cxx " ) <nl> - set ( _prefixFileExt_C " . h " ) <nl> - set ( _prefixFileExt_CXX " . hxx " ) <nl> - set ( _prefixSourceFileExt_C " . c " ) <nl> - set ( _prefixSourceFileExt_CXX " . cxx " ) <nl> - endmacro ( ) <nl> - <nl> - function ( cotire_make_single_unity_source_file_path _language _target _unityFileVar ) <nl> - cotire_setup_file_extension_variables ( ) <nl> - if ( NOT DEFINED _unityFileExt_ $ { _language } ) <nl> - set ( $ { _unityFileVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( _unityFileBaseName " $ { _target } _ $ { _language } $ { COTIRE_UNITY_SOURCE_FILENAME_SUFFIX } " ) <nl> - set ( _unityFileName " $ { _unityFileBaseName } $ { _unityFileExt_ $ { _language } } " ) <nl> - cotire_get_intermediate_dir ( _baseDir ) <nl> - set ( _unityFile " $ { _baseDir } / $ { _unityFileName } " ) <nl> - set ( $ { _unityFileVar } " $ { _unityFile } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_make_unity_source_file_paths _language _target _maxIncludes _unityFilesVar ) <nl> - cotire_setup_file_extension_variables ( ) <nl> - if ( NOT DEFINED _unityFileExt_ $ { _language } ) <nl> - set ( $ { _unityFileVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( _unityFileBaseName " $ { _target } _ $ { _language } $ { COTIRE_UNITY_SOURCE_FILENAME_SUFFIX } " ) <nl> - cotire_get_intermediate_dir ( _baseDir ) <nl> - set ( _startIndex 0 ) <nl> - set ( _index 0 ) <nl> - set ( _unityFiles " " ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - foreach ( _sourceFile $ { _sourceFiles } ) <nl> - get_source_file_property ( _startNew " $ { _sourceFile } " COTIRE_START_NEW_UNITY_SOURCE ) <nl> - math ( EXPR _unityFileCount " $ { _index } - $ { _startIndex } " ) <nl> - if ( _startNew OR ( _maxIncludes GREATER 0 AND NOT _unityFileCount LESS _maxIncludes ) ) <nl> - if ( _index GREATER 0 ) <nl> - # start new unity file segment <nl> - math ( EXPR _endIndex " $ { _index } - 1 " ) <nl> - set ( _unityFileName " $ { _unityFileBaseName } _ $ { _startIndex } _ $ { _endIndex } $ { _unityFileExt_ $ { _language } } " ) <nl> - list ( APPEND _unityFiles " $ { _baseDir } / $ { _unityFileName } " ) <nl> - endif ( ) <nl> - set ( _startIndex $ { _index } ) <nl> - endif ( ) <nl> - math ( EXPR _index " $ { _index } + 1 " ) <nl> - endforeach ( ) <nl> - list ( LENGTH _sourceFiles _numberOfSources ) <nl> - if ( _startIndex EQUAL 0 ) <nl> - # there is only a single unity file <nl> - cotire_make_single_unity_source_file_path ( $ { _language } $ { _target } _unityFiles ) <nl> - elseif ( _startIndex LESS _numberOfSources ) <nl> - # end with final unity file segment <nl> - math ( EXPR _endIndex " $ { _index } - 1 " ) <nl> - set ( _unityFileName " $ { _unityFileBaseName } _ $ { _startIndex } _ $ { _endIndex } $ { _unityFileExt_ $ { _language } } " ) <nl> - list ( APPEND _unityFiles " $ { _baseDir } / $ { _unityFileName } " ) <nl> - endif ( ) <nl> - set ( $ { _unityFilesVar } $ { _unityFiles } PARENT_SCOPE ) <nl> - if ( COTIRE_DEBUG AND _unityFiles ) <nl> - message ( STATUS " unity files : $ { _unityFiles } " ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_unity_to_prefix_file_path _language _target _unityFile _prefixFileVar ) <nl> - cotire_setup_file_extension_variables ( ) <nl> - if ( NOT DEFINED _unityFileExt_ $ { _language } ) <nl> - set ( $ { _prefixFileVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( _unityFileBaseName " $ { _target } _ $ { _language } $ { COTIRE_UNITY_SOURCE_FILENAME_SUFFIX } " ) <nl> - set ( _prefixFileBaseName " $ { _target } _ $ { _language } $ { COTIRE_PREFIX_HEADER_FILENAME_SUFFIX } " ) <nl> - string ( REPLACE " $ { _unityFileBaseName } " " $ { _prefixFileBaseName } " _prefixFile " $ { _unityFile } " ) <nl> - string ( REGEX REPLACE " $ { _unityFileExt_ $ { _language } } $ " " $ { _prefixFileExt_ $ { _language } } " _prefixFile " $ { _prefixFile } " ) <nl> - set ( $ { _prefixFileVar } " $ { _prefixFile } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_prefix_header_to_source_file_path _language _prefixHeaderFile _prefixSourceFileVar ) <nl> - cotire_setup_file_extension_variables ( ) <nl> - if ( NOT DEFINED _prefixSourceFileExt_ $ { _language } ) <nl> - set ( $ { _prefixSourceFileVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - string ( REGEX REPLACE " $ { _prefixFileExt_ $ { _language } } $ " " $ { _prefixSourceFileExt_ $ { _language } } " _prefixSourceFile " $ { _prefixHeaderFile } " ) <nl> - set ( $ { _prefixSourceFileVar } " $ { _prefixSourceFile } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_make_prefix_file_name _language _target _prefixFileBaseNameVar _prefixFileNameVar ) <nl> - cotire_setup_file_extension_variables ( ) <nl> - if ( NOT _language ) <nl> - set ( _prefixFileBaseName " $ { _target } $ { COTIRE_PREFIX_HEADER_FILENAME_SUFFIX } " ) <nl> - set ( _prefixFileName " $ { _prefixFileBaseName } $ { _prefixFileExt_C } " ) <nl> - elseif ( DEFINED _prefixFileExt_ $ { _language } ) <nl> - set ( _prefixFileBaseName " $ { _target } _ $ { _language } $ { COTIRE_PREFIX_HEADER_FILENAME_SUFFIX } " ) <nl> - set ( _prefixFileName " $ { _prefixFileBaseName } $ { _prefixFileExt_ $ { _language } } " ) <nl> - else ( ) <nl> - set ( _prefixFileBaseName " " ) <nl> - set ( _prefixFileName " " ) <nl> - endif ( ) <nl> - set ( $ { _prefixFileBaseNameVar } " $ { _prefixFileBaseName } " PARENT_SCOPE ) <nl> - set ( $ { _prefixFileNameVar } " $ { _prefixFileName } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_make_prefix_file_path _language _target _prefixFileVar ) <nl> - cotire_make_prefix_file_name ( " $ { _language } " " $ { _target } " _prefixFileBaseName _prefixFileName ) <nl> - set ( $ { _prefixFileVar } " " PARENT_SCOPE ) <nl> - if ( _prefixFileName ) <nl> - if ( NOT _language ) <nl> - set ( _language " C " ) <nl> - endif ( ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU | Clang | Intel | MSVC " ) <nl> - cotire_get_intermediate_dir ( _baseDir ) <nl> - set ( $ { _prefixFileVar } " $ { _baseDir } / $ { _prefixFileName } " PARENT_SCOPE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_make_pch_file_path _language _target _pchFileVar ) <nl> - cotire_make_prefix_file_name ( " $ { _language } " " $ { _target } " _prefixFileBaseName _prefixFileName ) <nl> - set ( $ { _pchFileVar } " " PARENT_SCOPE ) <nl> - if ( _prefixFileBaseName AND _prefixFileName ) <nl> - cotire_check_precompiled_header_support ( " $ { _language } " " $ { _target } " _msg ) <nl> - if ( NOT _msg ) <nl> - if ( XCODE ) <nl> - # For Xcode , we completely hand off the compilation of the prefix header to the IDE <nl> - return ( ) <nl> - endif ( ) <nl> - cotire_get_intermediate_dir ( _baseDir ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC " ) <nl> - # MSVC uses the extension . pch added to the prefix header base name <nl> - set ( $ { _pchFileVar } " $ { _baseDir } / $ { _prefixFileBaseName } . pch " PARENT_SCOPE ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " Clang " ) <nl> - # Clang looks for a precompiled header corresponding to the prefix header with the extension . pch appended <nl> - set ( $ { _pchFileVar } " $ { _baseDir } / $ { _prefixFileName } . pch " PARENT_SCOPE ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU " ) <nl> - # GCC looks for a precompiled header corresponding to the prefix header with the extension . gch appended <nl> - set ( $ { _pchFileVar } " $ { _baseDir } / $ { _prefixFileName } . gch " PARENT_SCOPE ) <nl> - elseif ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " Intel " ) <nl> - # Intel uses the extension . pchi added to the prefix header base name <nl> - set ( $ { _pchFileVar } " $ { _baseDir } / $ { _prefixFileBaseName } . pchi " PARENT_SCOPE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_select_unity_source_files _unityFile _sourcesVar ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - if ( _sourceFiles AND " $ { _unityFile } " MATCHES " $ { COTIRE_UNITY_SOURCE_FILENAME_SUFFIX } _ ( [ 0 - 9 ] + ) _ ( [ 0 - 9 ] + ) " ) <nl> - set ( _startIndex $ { CMAKE_MATCH_1 } ) <nl> - set ( _endIndex $ { CMAKE_MATCH_2 } ) <nl> - list ( LENGTH _sourceFiles _numberOfSources ) <nl> - if ( NOT _startIndex LESS _numberOfSources ) <nl> - math ( EXPR _startIndex " $ { _numberOfSources } - 1 " ) <nl> - endif ( ) <nl> - if ( NOT _endIndex LESS _numberOfSources ) <nl> - math ( EXPR _endIndex " $ { _numberOfSources } - 1 " ) <nl> - endif ( ) <nl> - set ( _files " " ) <nl> - foreach ( _index RANGE $ { _startIndex } $ { _endIndex } ) <nl> - list ( GET _sourceFiles $ { _index } _file ) <nl> - list ( APPEND _files " $ { _file } " ) <nl> - endforeach ( ) <nl> - else ( ) <nl> - set ( _files $ { _sourceFiles } ) <nl> - endif ( ) <nl> - set ( $ { _sourcesVar } $ { _files } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_unity_source_dependencies _language _target _dependencySourcesVar ) <nl> - set ( _dependencySources " " ) <nl> - # depend on target ' s generated source files <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - cotire_get_objects_with_property_on ( _generatedSources GENERATED SOURCE $ { _targetSourceFiles } ) <nl> - if ( _generatedSources ) <nl> - # but omit all generated source files that have the COTIRE_EXCLUDED property set to true <nl> - cotire_get_objects_with_property_on ( _excludedGeneratedSources COTIRE_EXCLUDED SOURCE $ { _generatedSources } ) <nl> - if ( _excludedGeneratedSources ) <nl> - list ( REMOVE_ITEM _generatedSources $ { _excludedGeneratedSources } ) <nl> - endif ( ) <nl> - # and omit all generated source files that have the COTIRE_DEPENDENCY property set to false explicitly <nl> - cotire_get_objects_with_property_off ( _excludedNonDependencySources COTIRE_DEPENDENCY SOURCE $ { _generatedSources } ) <nl> - if ( _excludedNonDependencySources ) <nl> - list ( REMOVE_ITEM _generatedSources $ { _excludedNonDependencySources } ) <nl> - endif ( ) <nl> - if ( _generatedSources ) <nl> - list ( APPEND _dependencySources $ { _generatedSources } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG AND _dependencySources ) <nl> - message ( STATUS " $ { _language } $ { _target } unity source dependencies : $ { _dependencySources } " ) <nl> - endif ( ) <nl> - set ( $ { _dependencySourcesVar } $ { _dependencySources } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_prefix_header_dependencies _language _target _dependencySourcesVar ) <nl> - set ( _dependencySources " " ) <nl> - # depend on target source files marked with custom COTIRE_DEPENDENCY property <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - cotire_get_objects_with_property_on ( _dependencySources COTIRE_DEPENDENCY SOURCE $ { _targetSourceFiles } ) <nl> - if ( COTIRE_DEBUG AND _dependencySources ) <nl> - message ( STATUS " $ { _language } $ { _target } prefix header dependencies : $ { _dependencySources } " ) <nl> - endif ( ) <nl> - set ( $ { _dependencySourcesVar } $ { _dependencySources } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_generate_target_script _language _configurations _target _targetScriptVar _targetConfigScriptVar ) <nl> - set ( _targetSources $ { ARGN } ) <nl> - cotire_get_prefix_header_dependencies ( $ { _language } $ { _target } COTIRE_TARGET_PREFIX_DEPENDS $ { _targetSources } ) <nl> - cotire_get_unity_source_dependencies ( $ { _language } $ { _target } COTIRE_TARGET_UNITY_DEPENDS $ { _targetSources } ) <nl> - # set up variables to be configured <nl> - set ( COTIRE_TARGET_LANGUAGE " $ { _language } " ) <nl> - get_target_property ( COTIRE_TARGET_IGNORE_PATH $ { _target } COTIRE_PREFIX_HEADER_IGNORE_PATH ) <nl> - cotire_add_sys_root_paths ( COTIRE_TARGET_IGNORE_PATH ) <nl> - get_target_property ( COTIRE_TARGET_INCLUDE_PATH $ { _target } COTIRE_PREFIX_HEADER_INCLUDE_PATH ) <nl> - cotire_add_sys_root_paths ( COTIRE_TARGET_INCLUDE_PATH ) <nl> - get_target_property ( COTIRE_TARGET_PRE_UNDEFS $ { _target } COTIRE_UNITY_SOURCE_PRE_UNDEFS ) <nl> - get_target_property ( COTIRE_TARGET_POST_UNDEFS $ { _target } COTIRE_UNITY_SOURCE_POST_UNDEFS ) <nl> - get_target_property ( COTIRE_TARGET_MAXIMUM_NUMBER_OF_INCLUDES $ { _target } COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES ) <nl> - get_target_property ( COTIRE_TARGET_INCLUDE_PRIORITY_PATH $ { _target } COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH ) <nl> - cotire_get_source_files_undefs ( COTIRE_UNITY_SOURCE_PRE_UNDEFS COTIRE_TARGET_SOURCES_PRE_UNDEFS $ { _targetSources } ) <nl> - cotire_get_source_files_undefs ( COTIRE_UNITY_SOURCE_POST_UNDEFS COTIRE_TARGET_SOURCES_POST_UNDEFS $ { _targetSources } ) <nl> - set ( COTIRE_TARGET_CONFIGURATION_TYPES " $ { _configurations } " ) <nl> - foreach ( _config $ { _configurations } ) <nl> - string ( TOUPPER " $ { _config } " _upperConfig ) <nl> - cotire_get_target_include_directories ( <nl> - " $ { _config } " " $ { _language } " " $ { _target } " COTIRE_TARGET_INCLUDE_DIRECTORIES_ $ { _upperConfig } COTIRE_TARGET_SYSTEM_INCLUDE_DIRECTORIES_ $ { _upperConfig } ) <nl> - cotire_get_target_compile_definitions ( <nl> - " $ { _config } " " $ { _language } " " $ { _target } " COTIRE_TARGET_COMPILE_DEFINITIONS_ $ { _upperConfig } ) <nl> - cotire_get_target_compiler_flags ( <nl> - " $ { _config } " " $ { _language } " " $ { _target } " COTIRE_TARGET_COMPILE_FLAGS_ $ { _upperConfig } ) <nl> - cotire_get_source_files_compile_definitions ( <nl> - " $ { _config } " " $ { _language } " COTIRE_TARGET_SOURCES_COMPILE_DEFINITIONS_ $ { _upperConfig } $ { _targetSources } ) <nl> - endforeach ( ) <nl> - get_target_property ( COTIRE_TARGET_ $ { _language } _COMPILER_LAUNCHER $ { _target } $ { _language } _COMPILER_LAUNCHER ) <nl> - # set up COTIRE_TARGET_SOURCES <nl> - set ( COTIRE_TARGET_SOURCES " " ) <nl> - foreach ( _sourceFile $ { _targetSources } ) <nl> - get_source_file_property ( _generated " $ { _sourceFile } " GENERATED ) <nl> - if ( _generated ) <nl> - # use absolute paths for generated files only , retrieving the LOCATION property is an expensive operation <nl> - get_source_file_property ( _sourceLocation " $ { _sourceFile } " LOCATION ) <nl> - list ( APPEND COTIRE_TARGET_SOURCES " $ { _sourceLocation } " ) <nl> - else ( ) <nl> - list ( APPEND COTIRE_TARGET_SOURCES " $ { _sourceFile } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - # copy variable definitions to cotire target script <nl> - get_cmake_property ( _vars VARIABLES ) <nl> - string ( REGEX MATCHALL " COTIRE_ [ A - Za - z0 - 9_ ] + " _matchVars " $ { _vars } " ) <nl> - # omit COTIRE_ * _INIT variables <nl> - string ( REGEX MATCHALL " COTIRE_ [ A - Za - z0 - 9_ ] + _INIT " _initVars " $ { _matchVars } " ) <nl> - if ( _initVars ) <nl> - list ( REMOVE_ITEM _matchVars $ { _initVars } ) <nl> - endif ( ) <nl> - # omit COTIRE_VERBOSE which is passed as a CMake define on command line <nl> - list ( REMOVE_ITEM _matchVars COTIRE_VERBOSE ) <nl> - set ( _contents " " ) <nl> - set ( _contentsHasGeneratorExpressions FALSE ) <nl> - foreach ( _var IN LISTS _matchVars ITEMS <nl> - XCODE MSVC CMAKE_GENERATOR CMAKE_BUILD_TYPE CMAKE_CONFIGURATION_TYPES <nl> - CMAKE_ $ { _language } _COMPILER_ID CMAKE_ $ { _language } _COMPILER_VERSION <nl> - CMAKE_ $ { _language } _COMPILER_LAUNCHER CMAKE_ $ { _language } _COMPILER CMAKE_ $ { _language } _COMPILER_ARG1 <nl> - CMAKE_INCLUDE_FLAG_ $ { _language } CMAKE_INCLUDE_FLAG_ $ { _language } _SEP <nl> - CMAKE_INCLUDE_SYSTEM_FLAG_ $ { _language } <nl> - CMAKE_ $ { _language } _FRAMEWORK_SEARCH_FLAG <nl> - CMAKE_ $ { _language } _SYSTEM_FRAMEWORK_SEARCH_FLAG <nl> - CMAKE_ $ { _language } _SOURCE_FILE_EXTENSIONS ) <nl> - if ( DEFINED $ { _var } ) <nl> - string ( REPLACE " \ " " " \ \ \ " " _value " $ { $ { _var } } " ) <nl> - set ( _contents " $ { _contents } set ( $ { _var } \ " $ { _value } \ " ) \ n " ) <nl> - if ( NOT _contentsHasGeneratorExpressions ) <nl> - if ( " $ { _value } " MATCHES " \ \ $ < . * > " ) <nl> - set ( _contentsHasGeneratorExpressions TRUE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - # generate target script file <nl> - get_filename_component ( _moduleName " $ { COTIRE_CMAKE_MODULE_FILE } " NAME ) <nl> - set ( _targetCotireScript " $ { CMAKE_CURRENT_BINARY_DIR } / $ { _target } _ $ { _language } _ $ { _moduleName } " ) <nl> - cotire_write_file ( " CMAKE " " $ { _targetCotireScript } " " $ { _contents } " FALSE ) <nl> - if ( _contentsHasGeneratorExpressions ) <nl> - # use file ( GENERATE . . . ) to expand generator expressions in the target script at CMake generate - time <nl> - set ( _configNameOrNoneGeneratorExpression " $ < $ < CONFIG : > : None > $ < $ < NOT : $ < CONFIG : > > : $ < CONFIGURATION > > " ) <nl> - set ( _targetCotireConfigScript " $ { CMAKE_CURRENT_BINARY_DIR } / $ { _target } _ $ { _language } _ $ { _configNameOrNoneGeneratorExpression } _ $ { _moduleName } " ) <nl> - file ( GENERATE OUTPUT " $ { _targetCotireConfigScript } " INPUT " $ { _targetCotireScript } " ) <nl> - else ( ) <nl> - set ( _targetCotireConfigScript " $ { _targetCotireScript } " ) <nl> - endif ( ) <nl> - set ( $ { _targetScriptVar } " $ { _targetCotireScript } " PARENT_SCOPE ) <nl> - set ( $ { _targetConfigScriptVar } " $ { _targetCotireConfigScript } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_pch_file_compilation _language _target _targetScript _prefixFile _pchFile _hostFile ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - # for Visual Studio and Intel , we attach the precompiled header compilation to the host file <nl> - # the remaining files include the precompiled header , see cotire_setup_pch_file_inclusion <nl> - if ( _sourceFiles ) <nl> - set ( _flags " " ) <nl> - cotire_add_pch_compilation_flags ( <nl> - " $ { _language } " " $ { CMAKE_ $ { _language } _COMPILER_ID } " " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " " $ { _hostFile } " _flags ) <nl> - set_property ( SOURCE $ { _hostFile } APPEND_STRING PROPERTY COMPILE_FLAGS " $ { _flags } " ) <nl> - set_property ( SOURCE $ { _hostFile } APPEND PROPERTY OBJECT_OUTPUTS " $ { _pchFile } " ) <nl> - # make object file generated from host file depend on prefix header <nl> - set_property ( SOURCE $ { _hostFile } APPEND PROPERTY OBJECT_DEPENDS " $ { _prefixFile } " ) <nl> - # mark host file as cotired to prevent it from being used in another cotired target <nl> - set_property ( SOURCE $ { _hostFile } PROPERTY COTIRE_TARGET " $ { _target } " ) <nl> - endif ( ) <nl> - elseif ( " $ { CMAKE_GENERATOR } " MATCHES " Make | Ninja " ) <nl> - # for makefile based generator , we add a custom command to precompile the prefix header <nl> - if ( _targetScript ) <nl> - cotire_set_cmd_to_prologue ( _cmds ) <nl> - list ( APPEND _cmds - P " $ { COTIRE_CMAKE_MODULE_FILE } " " precompile " " $ { _targetScript } " " $ { _prefixFile } " " $ { _pchFile } " " $ { _hostFile } " ) <nl> - if ( MSVC_IDE ) <nl> - file ( TO_NATIVE_PATH " $ { _pchFile } " _pchFileLogPath ) <nl> - else ( ) <nl> - file ( RELATIVE_PATH _pchFileLogPath " $ { CMAKE_BINARY_DIR } " " $ { _pchFile } " ) <nl> - endif ( ) <nl> - # make precompiled header compilation depend on the actual compiler executable used to force <nl> - # re - compilation when the compiler executable is updated . This prevents " created by a different GCC executable " <nl> - # warnings when the precompiled header is included . <nl> - get_filename_component ( _realCompilerExe " $ { CMAKE_ $ { _language } _COMPILER } " ABSOLUTE ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add_custom_command : OUTPUT $ { _pchFile } $ { _cmds } DEPENDS $ { _prefixFile } $ { _realCompilerExe } IMPLICIT_DEPENDS $ { _language } $ { _prefixFile } " ) <nl> - endif ( ) <nl> - set_property ( SOURCE " $ { _pchFile } " PROPERTY GENERATED TRUE ) <nl> - add_custom_command ( <nl> - OUTPUT " $ { _pchFile } " <nl> - COMMAND $ { _cmds } <nl> - DEPENDS " $ { _prefixFile } " " $ { _realCompilerExe } " <nl> - IMPLICIT_DEPENDS $ { _language } " $ { _prefixFile } " <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - COMMENT " Building $ { _language } precompiled header $ { _pchFileLogPath } " <nl> - VERBATIM ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_pch_file_inclusion _language _target _wholeTarget _prefixFile _pchFile _hostFile ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - # for Visual Studio and Intel , we include the precompiled header in all but the host file <nl> - # the host file does the precompiled header compilation , see cotire_setup_pch_file_compilation <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - list ( LENGTH _sourceFiles _numberOfSourceFiles ) <nl> - if ( _numberOfSourceFiles GREATER 0 ) <nl> - # mark sources as cotired to prevent them from being used in another cotired target <nl> - set_source_files_properties ( $ { _sourceFiles } PROPERTIES COTIRE_TARGET " $ { _target } " ) <nl> - set ( _flags " " ) <nl> - cotire_add_prefix_pch_inclusion_flags ( <nl> - " $ { _language } " " $ { CMAKE_ $ { _language } _COMPILER_ID } " " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " _flags ) <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND_STRING PROPERTY COMPILE_FLAGS " $ { _flags } " ) <nl> - # make object files generated from source files depend on precompiled header <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND PROPERTY OBJECT_DEPENDS " $ { _pchFile } " ) <nl> - endif ( ) <nl> - elseif ( " $ { CMAKE_GENERATOR } " MATCHES " Make | Ninja " ) <nl> - set ( _sourceFiles $ { _hostFile } $ { ARGN } ) <nl> - if ( NOT _wholeTarget ) <nl> - # for makefile based generator , we force the inclusion of the prefix header for a subset <nl> - # of the source files , if this is a multi - language target or has excluded files <nl> - set ( _flags " " ) <nl> - cotire_add_prefix_pch_inclusion_flags ( <nl> - " $ { _language } " " $ { CMAKE_ $ { _language } _COMPILER_ID } " " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " _flags ) <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND_STRING PROPERTY COMPILE_FLAGS " $ { _flags } " ) <nl> - # mark sources as cotired to prevent them from being used in another cotired target <nl> - set_source_files_properties ( $ { _sourceFiles } PROPERTIES COTIRE_TARGET " $ { _target } " ) <nl> - endif ( ) <nl> - # make object files generated from source files depend on precompiled header <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND PROPERTY OBJECT_DEPENDS " $ { _pchFile } " ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_prefix_file_inclusion _language _target _prefixFile ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - # force the inclusion of the prefix header for the given source files <nl> - set ( _flags " " ) <nl> - set ( _pchFile " " ) <nl> - cotire_add_prefix_pch_inclusion_flags ( <nl> - " $ { _language } " " $ { CMAKE_ $ { _language } _COMPILER_ID } " " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " _flags ) <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND_STRING PROPERTY COMPILE_FLAGS " $ { _flags } " ) <nl> - # mark sources as cotired to prevent them from being used in another cotired target <nl> - set_source_files_properties ( $ { _sourceFiles } PROPERTIES COTIRE_TARGET " $ { _target } " ) <nl> - # make object files generated from source files depend on prefix header <nl> - set_property ( SOURCE $ { _sourceFiles } APPEND PROPERTY OBJECT_DEPENDS " $ { _prefixFile } " ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_get_first_set_property_value _propertyValueVar _type _object ) <nl> - set ( _properties $ { ARGN } ) <nl> - foreach ( _property $ { _properties } ) <nl> - get_property ( _propertyValue $ { _type } " $ { _object } " PROPERTY $ { _property } ) <nl> - if ( _propertyValue ) <nl> - set ( $ { _propertyValueVar } $ { _propertyValue } PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _propertyValueVar } " " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_combine_command _language _targetScript _joinedFile _cmdsVar ) <nl> - set ( _files $ { ARGN } ) <nl> - set ( _filesPaths " " ) <nl> - foreach ( _file $ { _files } ) <nl> - get_filename_component ( _filePath " $ { _file } " ABSOLUTE ) <nl> - list ( APPEND _filesPaths " $ { _filePath } " ) <nl> - endforeach ( ) <nl> - cotire_set_cmd_to_prologue ( _prefixCmd ) <nl> - list ( APPEND _prefixCmd - P " $ { COTIRE_CMAKE_MODULE_FILE } " " combine " ) <nl> - if ( _targetScript ) <nl> - list ( APPEND _prefixCmd " $ { _targetScript } " ) <nl> - endif ( ) <nl> - list ( APPEND _prefixCmd " $ { _joinedFile } " $ { _filesPaths } ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add_custom_command : OUTPUT $ { _joinedFile } COMMAND $ { _prefixCmd } DEPENDS $ { _files } " ) <nl> - endif ( ) <nl> - set_property ( SOURCE " $ { _joinedFile } " PROPERTY GENERATED TRUE ) <nl> - if ( MSVC_IDE ) <nl> - file ( TO_NATIVE_PATH " $ { _joinedFile } " _joinedFileLogPath ) <nl> - else ( ) <nl> - file ( RELATIVE_PATH _joinedFileLogPath " $ { CMAKE_BINARY_DIR } " " $ { _joinedFile } " ) <nl> - endif ( ) <nl> - get_filename_component ( _joinedFileBaseName " $ { _joinedFile } " NAME_WE ) <nl> - get_filename_component ( _joinedFileExt " $ { _joinedFile } " EXT ) <nl> - if ( _language AND _joinedFileBaseName MATCHES " $ { COTIRE_UNITY_SOURCE_FILENAME_SUFFIX } $ " ) <nl> - set ( _comment " Generating $ { _language } unity source $ { _joinedFileLogPath } " ) <nl> - elseif ( _language AND _joinedFileBaseName MATCHES " $ { COTIRE_PREFIX_HEADER_FILENAME_SUFFIX } $ " ) <nl> - if ( _joinedFileExt MATCHES " ^ \ \ . c " ) <nl> - set ( _comment " Generating $ { _language } prefix source $ { _joinedFileLogPath } " ) <nl> - else ( ) <nl> - set ( _comment " Generating $ { _language } prefix header $ { _joinedFileLogPath } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - set ( _comment " Generating $ { _joinedFileLogPath } " ) <nl> - endif ( ) <nl> - add_custom_command ( <nl> - OUTPUT " $ { _joinedFile } " <nl> - COMMAND $ { _prefixCmd } <nl> - DEPENDS $ { _files } <nl> - COMMENT " $ { _comment } " <nl> - WORKING_DIRECTORY " $ { CMAKE_BINARY_DIR } " <nl> - VERBATIM ) <nl> - list ( APPEND $ { _cmdsVar } COMMAND $ { _prefixCmd } ) <nl> - set ( $ { _cmdsVar } $ { $ { _cmdsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_target_pch_usage _languages _target _wholeTarget ) <nl> - if ( XCODE ) <nl> - # for Xcode , we attach a pre - build action to generate the unity sources and prefix headers <nl> - set ( _prefixFiles " " ) <nl> - foreach ( _language $ { _languages } ) <nl> - get_property ( _prefixFile TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER ) <nl> - if ( _prefixFile ) <nl> - list ( APPEND _prefixFiles " $ { _prefixFile } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( _cmds $ { ARGN } ) <nl> - list ( LENGTH _prefixFiles _numberOfPrefixFiles ) <nl> - if ( _numberOfPrefixFiles GREATER 1 ) <nl> - # we also generate a generic , single prefix header which includes all language specific prefix headers <nl> - set ( _language " " ) <nl> - set ( _targetScript " " ) <nl> - cotire_make_prefix_file_path ( " $ { _language } " $ { _target } _prefixHeader ) <nl> - cotire_setup_combine_command ( " $ { _language } " " $ { _targetScript } " " $ { _prefixHeader } " _cmds $ { _prefixFiles } ) <nl> - else ( ) <nl> - set ( _prefixHeader " $ { _prefixFiles } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add_custom_command : TARGET $ { _target } PRE_BUILD $ { _cmds } " ) <nl> - endif ( ) <nl> - # because CMake PRE_BUILD command does not support dependencies , <nl> - # we check dependencies explicity in cotire script mode when the pre - build action is run <nl> - add_custom_command ( <nl> - TARGET " $ { _target } " <nl> - PRE_BUILD $ { _cmds } <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - COMMENT " Updating target $ { _target } prefix headers " <nl> - VERBATIM ) <nl> - # make Xcode precompile the generated prefix header with ProcessPCH and ProcessPCH + + <nl> - set_target_properties ( $ { _target } PROPERTIES XCODE_ATTRIBUTE_GCC_PRECOMPILE_PREFIX_HEADER " YES " ) <nl> - set_target_properties ( $ { _target } PROPERTIES XCODE_ATTRIBUTE_GCC_PREFIX_HEADER " $ { _prefixHeader } " ) <nl> - elseif ( " $ { CMAKE_GENERATOR } " MATCHES " Make | Ninja " ) <nl> - # for makefile based generator , we force inclusion of the prefix header for all target source files <nl> - # if this is a single - language target without any excluded files <nl> - if ( _wholeTarget ) <nl> - set ( _language " $ { _languages } " ) <nl> - # for Visual Studio and Intel , precompiled header inclusion is always done on the source file level <nl> - # see cotire_setup_pch_file_inclusion <nl> - if ( NOT CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - get_property ( _prefixFile TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER ) <nl> - if ( _prefixFile ) <nl> - get_property ( _pchFile TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PRECOMPILED_HEADER ) <nl> - set ( _options COMPILE_OPTIONS ) <nl> - cotire_add_prefix_pch_inclusion_flags ( <nl> - " $ { _language } " " $ { CMAKE_ $ { _language } _COMPILER_ID } " " $ { CMAKE_ $ { _language } _COMPILER_VERSION } " <nl> - " $ { _prefixFile } " " $ { _pchFile } " _options ) <nl> - set_property ( TARGET $ { _target } APPEND PROPERTY $ { _options } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_unity_generation_commands _language _target _targetScript _targetConfigScript _unityFiles _cmdsVar ) <nl> - set ( _dependencySources " " ) <nl> - cotire_get_unity_source_dependencies ( $ { _language } $ { _target } _dependencySources $ { ARGN } ) <nl> - foreach ( _unityFile $ { _unityFiles } ) <nl> - set_property ( SOURCE " $ { _unityFile } " PROPERTY GENERATED TRUE ) <nl> - # set up compiled unity source dependencies via OBJECT_DEPENDS <nl> - # this ensures that missing source files are generated before the unity file is compiled <nl> - if ( COTIRE_DEBUG AND _dependencySources ) <nl> - message ( STATUS " $ { _unityFile } OBJECT_DEPENDS $ { _dependencySources } " ) <nl> - endif ( ) <nl> - if ( _dependencySources ) <nl> - # the OBJECT_DEPENDS property requires a list of full paths <nl> - set ( _objectDependsPaths " " ) <nl> - foreach ( _sourceFile $ { _dependencySources } ) <nl> - get_source_file_property ( _sourceLocation " $ { _sourceFile } " LOCATION ) <nl> - list ( APPEND _objectDependsPaths " $ { _sourceLocation } " ) <nl> - endforeach ( ) <nl> - set_property ( SOURCE " $ { _unityFile } " PROPERTY OBJECT_DEPENDS $ { _objectDependsPaths } ) <nl> - endif ( ) <nl> - if ( WIN32 AND CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - # unity file compilation results in potentially huge object file , thus use / bigobj by default unter MSVC and Windows Intel <nl> - set_property ( SOURCE " $ { _unityFile } " APPEND_STRING PROPERTY COMPILE_FLAGS " / bigobj " ) <nl> - endif ( ) <nl> - cotire_set_cmd_to_prologue ( _unityCmd ) <nl> - list ( APPEND _unityCmd - P " $ { COTIRE_CMAKE_MODULE_FILE } " " unity " " $ { _targetConfigScript } " " $ { _unityFile } " ) <nl> - if ( CMAKE_VERSION VERSION_LESS " 3 . 1 . 0 " ) <nl> - set ( _unityCmdDepends " $ { _targetScript } " ) <nl> - else ( ) <nl> - # CMake 3 . 1 . 0 supports generator expressions in arguments to DEPENDS <nl> - set ( _unityCmdDepends " $ { _targetConfigScript } " ) <nl> - endif ( ) <nl> - if ( MSVC_IDE ) <nl> - file ( TO_NATIVE_PATH " $ { _unityFile } " _unityFileLogPath ) <nl> - else ( ) <nl> - file ( RELATIVE_PATH _unityFileLogPath " $ { CMAKE_BINARY_DIR } " " $ { _unityFile } " ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add_custom_command : OUTPUT $ { _unityFile } COMMAND $ { _unityCmd } DEPENDS $ { _unityCmdDepends } " ) <nl> - endif ( ) <nl> - add_custom_command ( <nl> - OUTPUT " $ { _unityFile } " <nl> - COMMAND $ { _unityCmd } <nl> - DEPENDS $ { _unityCmdDepends } <nl> - COMMENT " Generating $ { _language } unity source $ { _unityFileLogPath } " <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - VERBATIM ) <nl> - list ( APPEND $ { _cmdsVar } COMMAND $ { _unityCmd } ) <nl> - endforeach ( ) <nl> - set ( $ { _cmdsVar } $ { $ { _cmdsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_prefix_generation_command _language _target _targetScript _prefixFile _unityFiles _cmdsVar ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - set ( _dependencySources " " ) <nl> - cotire_get_prefix_header_dependencies ( $ { _language } $ { _target } _dependencySources $ { _sourceFiles } ) <nl> - cotire_set_cmd_to_prologue ( _prefixCmd ) <nl> - list ( APPEND _prefixCmd - P " $ { COTIRE_CMAKE_MODULE_FILE } " " prefix " " $ { _targetScript } " " $ { _prefixFile } " $ { _unityFiles } ) <nl> - set_property ( SOURCE " $ { _prefixFile } " PROPERTY GENERATED TRUE ) <nl> - # make prefix header generation depend on the actual compiler executable used to force <nl> - # re - generation when the compiler executable is updated . This prevents " file not found " <nl> - # errors for compiler version specific system header files . <nl> - get_filename_component ( _realCompilerExe " $ { CMAKE_ $ { _language } _COMPILER } " ABSOLUTE ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add_custom_command : OUTPUT $ { _prefixFile } COMMAND $ { _prefixCmd } DEPENDS $ { _unityFile } $ { _dependencySources } $ { _realCompilerExe } " ) <nl> - endif ( ) <nl> - if ( MSVC_IDE ) <nl> - file ( TO_NATIVE_PATH " $ { _prefixFile } " _prefixFileLogPath ) <nl> - else ( ) <nl> - file ( RELATIVE_PATH _prefixFileLogPath " $ { CMAKE_BINARY_DIR } " " $ { _prefixFile } " ) <nl> - endif ( ) <nl> - get_filename_component ( _prefixFileExt " $ { _prefixFile } " EXT ) <nl> - if ( _prefixFileExt MATCHES " ^ \ \ . c " ) <nl> - set ( _comment " Generating $ { _language } prefix source $ { _prefixFileLogPath } " ) <nl> - else ( ) <nl> - set ( _comment " Generating $ { _language } prefix header $ { _prefixFileLogPath } " ) <nl> - endif ( ) <nl> - # prevent pre - processing errors upon generating the prefix header when a target ' s generated include file does not yet exist <nl> - # we do not add a file - level dependency for the target ' s generated files though , because we only want to depend on their existence <nl> - # thus we make the prefix header generation depend on a custom helper target which triggers the generation of the files <nl> - set ( _preTargetName " $ { _target } $ { COTIRE_PCH_TARGET_SUFFIX } _pre " ) <nl> - if ( TARGET $ { _preTargetName } ) <nl> - # custom helper target has already been generated while processing a different language <nl> - list ( APPEND _dependencySources $ { _preTargetName } ) <nl> - else ( ) <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - cotire_get_objects_with_property_on ( _generatedSources GENERATED SOURCE $ { _targetSourceFiles } ) <nl> - if ( _generatedSources ) <nl> - add_custom_target ( " $ { _preTargetName } " DEPENDS $ { _generatedSources } ) <nl> - cotire_init_target ( " $ { _preTargetName } " ) <nl> - list ( APPEND _dependencySources $ { _preTargetName } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - add_custom_command ( <nl> - OUTPUT " $ { _prefixFile } " " $ { _prefixFile } . log " <nl> - COMMAND $ { _prefixCmd } <nl> - DEPENDS $ { _unityFiles } $ { _dependencySources } " $ { _realCompilerExe } " <nl> - COMMENT " $ { _comment } " <nl> - WORKING_DIRECTORY " $ { CMAKE_CURRENT_SOURCE_DIR } " <nl> - VERBATIM ) <nl> - list ( APPEND $ { _cmdsVar } COMMAND $ { _prefixCmd } ) <nl> - set ( $ { _cmdsVar } $ { $ { _cmdsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_prefix_generation_from_unity_command _language _target _targetScript _prefixFile _unityFiles _cmdsVar ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU | Clang " ) <nl> - # GNU and Clang require indirect compilation of the prefix header to make them honor the system_header pragma <nl> - cotire_prefix_header_to_source_file_path ( $ { _language } " $ { _prefixFile } " _prefixSourceFile ) <nl> - else ( ) <nl> - set ( _prefixSourceFile " $ { _prefixFile } " ) <nl> - endif ( ) <nl> - cotire_setup_prefix_generation_command ( <nl> - $ { _language } $ { _target } " $ { _targetScript } " <nl> - " $ { _prefixSourceFile } " " $ { _unityFiles } " $ { _cmdsVar } $ { _sourceFiles } ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU | Clang " ) <nl> - # set up generation of a prefix source file which includes the prefix header <nl> - cotire_setup_combine_command ( $ { _language } " $ { _targetScript } " " $ { _prefixFile } " _cmds $ { _prefixSourceFile } ) <nl> - endif ( ) <nl> - set ( $ { _cmdsVar } $ { $ { _cmdsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_prefix_generation_from_provided_command _language _target _targetScript _prefixFile _cmdsVar ) <nl> - set ( _prefixHeaderFiles $ { ARGN } ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU | Clang " ) <nl> - # GNU and Clang require indirect compilation of the prefix header to make them honor the system_header pragma <nl> - cotire_prefix_header_to_source_file_path ( $ { _language } " $ { _prefixFile } " _prefixSourceFile ) <nl> - else ( ) <nl> - set ( _prefixSourceFile " $ { _prefixFile } " ) <nl> - endif ( ) <nl> - cotire_setup_combine_command ( $ { _language } " $ { _targetScript } " " $ { _prefixSourceFile } " _cmds $ { _prefixHeaderFiles } ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID MATCHES " GNU | Clang " ) <nl> - # set up generation of a prefix source file which includes the prefix header <nl> - cotire_setup_combine_command ( $ { _language } " $ { _targetScript } " " $ { _prefixFile } " _cmds $ { _prefixSourceFile } ) <nl> - endif ( ) <nl> - set ( $ { _cmdsVar } $ { $ { _cmdsVar } } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_init_cotire_target_properties _target ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER TRUE ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_ADD_UNITY_BUILD SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ADD_UNITY_BUILD TRUE ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_ADD_CLEAN SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ADD_CLEAN FALSE ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH " $ { CMAKE_SOURCE_DIR } " ) <nl> - cotire_check_is_path_relative_to ( " $ { CMAKE_BINARY_DIR } " _isRelative " $ { CMAKE_SOURCE_DIR } " ) <nl> - if ( NOT _isRelative ) <nl> - set_property ( TARGET $ { _target } APPEND PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH " $ { CMAKE_BINARY_DIR } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PATH SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PATH " " ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH " " ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_PRE_UNDEFS SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_PRE_UNDEFS " " ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_POST_UNDEFS SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_POST_UNDEFS " " ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_UNITY_LINK_LIBRARIES_INIT SET ) <nl> - if ( NOT _isSet ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_LINK_LIBRARIES_INIT " COPY_UNITY " ) <nl> - endif ( ) <nl> - get_property ( _isSet TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES SET ) <nl> - if ( NOT _isSet ) <nl> - if ( COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES " $ { COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES } " ) <nl> - else ( ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES " " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_make_target_message _target _languages _disableMsg _targetMsgVar ) <nl> - get_target_property ( _targetUsePCH $ { _target } COTIRE_ENABLE_PRECOMPILED_HEADER ) <nl> - get_target_property ( _targetAddSCU $ { _target } COTIRE_ADD_UNITY_BUILD ) <nl> - string ( REPLACE " ; " " " _languagesStr " $ { _languages } " ) <nl> - math ( EXPR _numberOfExcludedFiles " $ { ARGC } - 4 " ) <nl> - if ( _numberOfExcludedFiles EQUAL 0 ) <nl> - set ( _excludedStr " " ) <nl> - elseif ( COTIRE_VERBOSE OR _numberOfExcludedFiles LESS 4 ) <nl> - string ( REPLACE " ; " " , " _excludedStr " excluding $ { ARGN } " ) <nl> - else ( ) <nl> - set ( _excludedStr " excluding $ { _numberOfExcludedFiles } files " ) <nl> - endif ( ) <nl> - set ( _targetMsg " " ) <nl> - if ( NOT _languages ) <nl> - set ( _targetMsg " Target $ { _target } cannot be cotired . " ) <nl> - if ( _disableMsg ) <nl> - set ( _targetMsg " $ { _targetMsg } $ { _disableMsg } " ) <nl> - endif ( ) <nl> - elseif ( NOT _targetUsePCH AND NOT _targetAddSCU ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired without unity build and precompiled header . " ) <nl> - if ( _disableMsg ) <nl> - set ( _targetMsg " $ { _targetMsg } $ { _disableMsg } " ) <nl> - endif ( ) <nl> - elseif ( NOT _targetUsePCH ) <nl> - if ( _excludedStr ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired without precompiled header $ { _excludedStr } . " ) <nl> - else ( ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired without precompiled header . " ) <nl> - endif ( ) <nl> - if ( _disableMsg ) <nl> - set ( _targetMsg " $ { _targetMsg } $ { _disableMsg } " ) <nl> - endif ( ) <nl> - elseif ( NOT _targetAddSCU ) <nl> - if ( _excludedStr ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired without unity build $ { _excludedStr } . " ) <nl> - else ( ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired without unity build . " ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( _excludedStr ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired $ { _excludedStr } . " ) <nl> - else ( ) <nl> - set ( _targetMsg " $ { _languagesStr } target $ { _target } cotired . " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _targetMsgVar } " $ { _targetMsg } " PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_choose_target_languages _target _targetLanguagesVar _wholeTargetVar ) <nl> - set ( _languages $ { ARGN } ) <nl> - set ( _allSourceFiles " " ) <nl> - set ( _allExcludedSourceFiles " " ) <nl> - set ( _allCotiredSourceFiles " " ) <nl> - set ( _targetLanguages " " ) <nl> - set ( _pchEligibleTargetLanguages " " ) <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - get_target_property ( _targetUsePCH $ { _target } COTIRE_ENABLE_PRECOMPILED_HEADER ) <nl> - get_target_property ( _targetAddSCU $ { _target } COTIRE_ADD_UNITY_BUILD ) <nl> - set ( _disableMsg " " ) <nl> - foreach ( _language $ { _languages } ) <nl> - get_target_property ( _prefixHeader $ { _target } COTIRE_ $ { _language } _PREFIX_HEADER ) <nl> - get_target_property ( _unityBuildFile $ { _target } COTIRE_ $ { _language } _UNITY_SOURCE ) <nl> - if ( _prefixHeader OR _unityBuildFile ) <nl> - message ( STATUS " cotire : target $ { _target } has already been cotired . " ) <nl> - set ( $ { _targetLanguagesVar } " " PARENT_SCOPE ) <nl> - return ( ) <nl> - endif ( ) <nl> - if ( _targetUsePCH AND " $ { _language } " MATCHES " ^ C | CXX $ " AND DEFINED CMAKE_ $ { _language } _COMPILER_ID ) <nl> - if ( CMAKE_ $ { _language } _COMPILER_ID ) <nl> - cotire_check_precompiled_header_support ( " $ { _language } " " $ { _target } " _disableMsg ) <nl> - if ( _disableMsg ) <nl> - set ( _targetUsePCH FALSE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( _sourceFiles " " ) <nl> - set ( _excludedSources " " ) <nl> - set ( _cotiredSources " " ) <nl> - cotire_filter_language_source_files ( $ { _language } $ { _target } _sourceFiles _excludedSources _cotiredSources $ { _targetSourceFiles } ) <nl> - if ( _sourceFiles OR _excludedSources OR _cotiredSources ) <nl> - list ( APPEND _targetLanguages $ { _language } ) <nl> - endif ( ) <nl> - if ( _sourceFiles ) <nl> - list ( APPEND _allSourceFiles $ { _sourceFiles } ) <nl> - endif ( ) <nl> - list ( LENGTH _sourceFiles _numberOfSources ) <nl> - if ( NOT _numberOfSources LESS $ { COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES } ) <nl> - list ( APPEND _pchEligibleTargetLanguages $ { _language } ) <nl> - endif ( ) <nl> - if ( _excludedSources ) <nl> - list ( APPEND _allExcludedSourceFiles $ { _excludedSources } ) <nl> - endif ( ) <nl> - if ( _cotiredSources ) <nl> - list ( APPEND _allCotiredSourceFiles $ { _cotiredSources } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( _targetMsgLevel STATUS ) <nl> - if ( NOT _targetLanguages ) <nl> - string ( REPLACE " ; " " or " _languagesStr " $ { _languages } " ) <nl> - set ( _disableMsg " No $ { _languagesStr } source files . " ) <nl> - set ( _targetUsePCH FALSE ) <nl> - set ( _targetAddSCU FALSE ) <nl> - endif ( ) <nl> - if ( _targetUsePCH ) <nl> - if ( _allCotiredSourceFiles ) <nl> - cotire_get_source_file_property_values ( _cotireTargets COTIRE_TARGET $ { _allCotiredSourceFiles } ) <nl> - list ( REMOVE_DUPLICATES _cotireTargets ) <nl> - string ( REPLACE " ; " " , " _cotireTargetsStr " $ { _cotireTargets } " ) <nl> - set ( _disableMsg " Target sources already include a precompiled header for target ( s ) $ { _cotireTargets } . " ) <nl> - set ( _disableMsg " $ { _disableMsg } Set target property COTIRE_ENABLE_PRECOMPILED_HEADER to FALSE for targets $ { _target } , " ) <nl> - set ( _disableMsg " $ { _disableMsg } $ { _cotireTargetsStr } to get a workable build system . " ) <nl> - set ( _targetMsgLevel SEND_ERROR ) <nl> - set ( _targetUsePCH FALSE ) <nl> - elseif ( NOT _pchEligibleTargetLanguages ) <nl> - set ( _disableMsg " Too few applicable sources . " ) <nl> - set ( _targetUsePCH FALSE ) <nl> - elseif ( XCODE AND _allExcludedSourceFiles ) <nl> - # for Xcode , we cannot apply the precompiled header to individual sources , only to the whole target <nl> - set ( _disableMsg " Exclusion of source files not supported for generator Xcode . " ) <nl> - set ( _targetUsePCH FALSE ) <nl> - elseif ( XCODE AND " $ { _targetType } " STREQUAL " OBJECT_LIBRARY " ) <nl> - # for Xcode , we cannot apply the required PRE_BUILD action to generate the prefix header to an OBJECT_LIBRARY target <nl> - set ( _disableMsg " Required PRE_BUILD action not supported for OBJECT_LIBRARY targets for generator Xcode . " ) <nl> - set ( _targetUsePCH FALSE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER $ { _targetUsePCH } ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ADD_UNITY_BUILD $ { _targetAddSCU } ) <nl> - cotire_make_target_message ( $ { _target } " $ { _targetLanguages } " " $ { _disableMsg } " _targetMsg $ { _allExcludedSourceFiles } ) <nl> - if ( _targetMsg ) <nl> - if ( NOT DEFINED COTIREMSG_ $ { _target } ) <nl> - set ( COTIREMSG_ $ { _target } " " ) <nl> - endif ( ) <nl> - if ( COTIRE_VERBOSE OR NOT " $ { _targetMsgLevel } " STREQUAL " STATUS " OR <nl> - NOT " $ { COTIREMSG_ $ { _target } } " STREQUAL " $ { _targetMsg } " ) <nl> - # cache message to avoid redundant messages on re - configure <nl> - set ( COTIREMSG_ $ { _target } " $ { _targetMsg } " CACHE INTERNAL " $ { _target } cotire message . " ) <nl> - message ( $ { _targetMsgLevel } " $ { _targetMsg } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - list ( LENGTH _targetLanguages _numberOfLanguages ) <nl> - if ( _numberOfLanguages GREATER 1 OR _allExcludedSourceFiles ) <nl> - set ( $ { _wholeTargetVar } FALSE PARENT_SCOPE ) <nl> - else ( ) <nl> - set ( $ { _wholeTargetVar } TRUE PARENT_SCOPE ) <nl> - endif ( ) <nl> - set ( $ { _targetLanguagesVar } $ { _targetLanguages } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_compute_unity_max_number_of_includes _target _maxIncludesVar ) <nl> - set ( _sourceFiles $ { ARGN } ) <nl> - get_target_property ( _maxIncludes $ { _target } COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES ) <nl> - if ( _maxIncludes MATCHES " ( - j | - - parallel | - - jobs ) ? ( [ 0 - 9 ] * ) " ) <nl> - set ( _numberOfThreads " $ { CMAKE_MATCH_2 } " ) <nl> - if ( NOT _numberOfThreads ) <nl> - # use all available cores <nl> - ProcessorCount ( _numberOfThreads ) <nl> - endif ( ) <nl> - list ( LENGTH _sourceFiles _numberOfSources ) <nl> - math ( EXPR _maxIncludes " ( $ { _numberOfSources } + $ { _numberOfThreads } - 1 ) / $ { _numberOfThreads } " ) <nl> - elseif ( NOT _maxIncludes MATCHES " [ 0 - 9 ] + " ) <nl> - set ( _maxIncludes 0 ) <nl> - endif ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _target } unity source max includes : $ { _maxIncludes } " ) <nl> - endif ( ) <nl> - set ( $ { _maxIncludesVar } $ { _maxIncludes } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_process_target_language _language _configurations _target _wholeTarget _cmdsVar ) <nl> - set ( $ { _cmdsVar } " " PARENT_SCOPE ) <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - set ( _sourceFiles " " ) <nl> - set ( _excludedSources " " ) <nl> - set ( _cotiredSources " " ) <nl> - cotire_filter_language_source_files ( $ { _language } $ { _target } _sourceFiles _excludedSources _cotiredSources $ { _targetSourceFiles } ) <nl> - if ( NOT _sourceFiles AND NOT _cotiredSources ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( _cmds " " ) <nl> - # check for user provided unity source file list <nl> - get_property ( _unitySourceFiles TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _UNITY_SOURCE_INIT ) <nl> - if ( NOT _unitySourceFiles ) <nl> - set ( _unitySourceFiles $ { _sourceFiles } $ { _cotiredSources } ) <nl> - endif ( ) <nl> - cotire_generate_target_script ( <nl> - $ { _language } " $ { _configurations } " $ { _target } _targetScript _targetConfigScript $ { _unitySourceFiles } ) <nl> - # set up unity files for parallel compilation <nl> - cotire_compute_unity_max_number_of_includes ( $ { _target } _maxIncludes $ { _unitySourceFiles } ) <nl> - cotire_make_unity_source_file_paths ( $ { _language } $ { _target } $ { _maxIncludes } _unityFiles $ { _unitySourceFiles } ) <nl> - list ( LENGTH _unityFiles _numberOfUnityFiles ) <nl> - if ( _numberOfUnityFiles EQUAL 0 ) <nl> - return ( ) <nl> - elseif ( _numberOfUnityFiles GREATER 1 ) <nl> - cotire_setup_unity_generation_commands ( <nl> - $ { _language } $ { _target } " $ { _targetScript } " " $ { _targetConfigScript } " " $ { _unityFiles } " _cmds $ { _unitySourceFiles } ) <nl> - endif ( ) <nl> - # set up single unity file for prefix header generation <nl> - cotire_make_single_unity_source_file_path ( $ { _language } $ { _target } _unityFile ) <nl> - cotire_setup_unity_generation_commands ( <nl> - $ { _language } $ { _target } " $ { _targetScript } " " $ { _targetConfigScript } " " $ { _unityFile } " _cmds $ { _unitySourceFiles } ) <nl> - cotire_make_prefix_file_path ( $ { _language } $ { _target } _prefixFile ) <nl> - # set up prefix header <nl> - if ( _prefixFile ) <nl> - # check for user provided prefix header files <nl> - get_property ( _prefixHeaderFiles TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER_INIT ) <nl> - if ( _prefixHeaderFiles ) <nl> - cotire_setup_prefix_generation_from_provided_command ( <nl> - $ { _language } $ { _target } " $ { _targetConfigScript } " " $ { _prefixFile } " _cmds $ { _prefixHeaderFiles } ) <nl> - else ( ) <nl> - cotire_setup_prefix_generation_from_unity_command ( <nl> - $ { _language } $ { _target } " $ { _targetConfigScript } " " $ { _prefixFile } " " $ { _unityFile } " _cmds $ { _unitySourceFiles } ) <nl> - endif ( ) <nl> - # check if selected language has enough sources at all <nl> - list ( LENGTH _sourceFiles _numberOfSources ) <nl> - if ( _numberOfSources LESS $ { COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES } ) <nl> - set ( _targetUsePCH FALSE ) <nl> - else ( ) <nl> - get_target_property ( _targetUsePCH $ { _target } COTIRE_ENABLE_PRECOMPILED_HEADER ) <nl> - endif ( ) <nl> - if ( _targetUsePCH ) <nl> - cotire_make_pch_file_path ( $ { _language } $ { _target } _pchFile ) <nl> - if ( _pchFile ) <nl> - # first file in _sourceFiles is passed as the host file <nl> - cotire_setup_pch_file_compilation ( <nl> - $ { _language } $ { _target } " $ { _targetConfigScript } " " $ { _prefixFile } " " $ { _pchFile } " $ { _sourceFiles } ) <nl> - cotire_setup_pch_file_inclusion ( <nl> - $ { _language } $ { _target } $ { _wholeTarget } " $ { _prefixFile } " " $ { _pchFile } " $ { _sourceFiles } ) <nl> - endif ( ) <nl> - elseif ( _prefixHeaderFiles ) <nl> - # user provided prefix header must be included unconditionally <nl> - cotire_setup_prefix_file_inclusion ( $ { _language } $ { _target } " $ { _prefixFile } " $ { _sourceFiles } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # mark target as cotired for language <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _UNITY_SOURCE " $ { _unityFiles } " ) <nl> - if ( _prefixFile ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER " $ { _prefixFile } " ) <nl> - if ( _targetUsePCH AND _pchFile ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PRECOMPILED_HEADER " $ { _pchFile } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _cmdsVar } $ { _cmds } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_clean_target _target ) <nl> - set ( _cleanTargetName " $ { _target } $ { COTIRE_CLEAN_TARGET_SUFFIX } " ) <nl> - if ( NOT TARGET " $ { _cleanTargetName } " ) <nl> - cotire_set_cmd_to_prologue ( _cmds ) <nl> - get_filename_component ( _outputDir " $ { CMAKE_CURRENT_BINARY_DIR } / $ { CMAKE_CFG_INTDIR } " ABSOLUTE ) <nl> - list ( APPEND _cmds - P " $ { COTIRE_CMAKE_MODULE_FILE } " " cleanup " " $ { _outputDir } " " $ { COTIRE_INTDIR } " " $ { _target } " ) <nl> - add_custom_target ( $ { _cleanTargetName } <nl> - COMMAND $ { _cmds } <nl> - WORKING_DIRECTORY " $ { CMAKE_BINARY_DIR } " <nl> - COMMENT " Cleaning up target $ { _target } cotire generated files " <nl> - VERBATIM ) <nl> - cotire_init_target ( " $ { _cleanTargetName } " ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_pch_target _languages _configurations _target ) <nl> - if ( " $ { CMAKE_GENERATOR } " MATCHES " Make | Ninja " ) <nl> - # for makefile based generators , we add a custom target to trigger the generation of the cotire related files <nl> - set ( _dependsFiles " " ) <nl> - foreach ( _language $ { _languages } ) <nl> - set ( _props COTIRE_ $ { _language } _PREFIX_HEADER COTIRE_ $ { _language } _UNITY_SOURCE ) <nl> - if ( NOT CMAKE_ $ { _language } _COMPILER_ID MATCHES " MSVC | Intel " ) <nl> - # Visual Studio and Intel only create precompiled header as a side effect <nl> - list ( INSERT _props 0 COTIRE_ $ { _language } _PRECOMPILED_HEADER ) <nl> - endif ( ) <nl> - cotire_get_first_set_property_value ( _dependsFile TARGET $ { _target } $ { _props } ) <nl> - if ( _dependsFile ) <nl> - list ( APPEND _dependsFiles " $ { _dependsFile } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _dependsFiles ) <nl> - set ( _pchTargetName " $ { _target } $ { COTIRE_PCH_TARGET_SUFFIX } " ) <nl> - add_custom_target ( " $ { _pchTargetName } " DEPENDS $ { _dependsFiles } ) <nl> - cotire_init_target ( " $ { _pchTargetName } " ) <nl> - cotire_add_to_pch_all_target ( $ { _pchTargetName } ) <nl> - endif ( ) <nl> - else ( ) <nl> - # for other generators , we add the " clean all " target to clean up the precompiled header <nl> - cotire_setup_clean_all_target ( ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_filter_object_libraries _target _objectLibrariesVar ) <nl> - set ( _objectLibraries " " ) <nl> - foreach ( _source $ { ARGN } ) <nl> - if ( _source MATCHES " ^ \ \ $ < TARGET_OBJECTS : . + > $ " ) <nl> - list ( APPEND _objectLibraries " $ { _source } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - set ( $ { _objectLibrariesVar } $ { _objectLibraries } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_collect_unity_target_sources _target _languages _unityTargetSourcesVar ) <nl> - get_target_property ( _targetSourceFiles $ { _target } SOURCES ) <nl> - set ( _unityTargetSources $ { _targetSourceFiles } ) <nl> - foreach ( _language $ { _languages } ) <nl> - get_property ( _unityFiles TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _UNITY_SOURCE ) <nl> - if ( _unityFiles ) <nl> - # remove source files that are included in the unity source <nl> - set ( _sourceFiles " " ) <nl> - set ( _excludedSources " " ) <nl> - set ( _cotiredSources " " ) <nl> - cotire_filter_language_source_files ( $ { _language } $ { _target } _sourceFiles _excludedSources _cotiredSources $ { _targetSourceFiles } ) <nl> - if ( _sourceFiles OR _cotiredSources ) <nl> - list ( REMOVE_ITEM _unityTargetSources $ { _sourceFiles } $ { _cotiredSources } ) <nl> - endif ( ) <nl> - # add unity source files instead <nl> - list ( APPEND _unityTargetSources $ { _unityFiles } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - get_target_property ( _linkLibrariesStrategy $ { _target } COTIRE_UNITY_LINK_LIBRARIES_INIT ) <nl> - if ( " $ { _linkLibrariesStrategy } " MATCHES " ^ COPY_UNITY $ " ) <nl> - cotire_filter_object_libraries ( $ { _target } _objectLibraries $ { _targetSourceFiles } ) <nl> - if ( _objectLibraries ) <nl> - cotire_map_libraries ( " $ { _linkLibrariesStrategy } " _unityObjectLibraries $ { _objectLibraries } ) <nl> - list ( REMOVE_ITEM _unityTargetSources $ { _objectLibraries } ) <nl> - list ( APPEND _unityTargetSources $ { _unityObjectLibraries } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( $ { _unityTargetSourcesVar } $ { _unityTargetSources } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_unity_target_pch_usage _languages _target ) <nl> - foreach ( _language $ { _languages } ) <nl> - get_property ( _unityFiles TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _UNITY_SOURCE ) <nl> - if ( _unityFiles ) <nl> - get_property ( _userPrefixFile TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER_INIT ) <nl> - get_property ( _prefixFile TARGET $ { _target } PROPERTY COTIRE_ $ { _language } _PREFIX_HEADER ) <nl> - if ( _userPrefixFile AND _prefixFile ) <nl> - # user provided prefix header must be included unconditionally by unity sources <nl> - cotire_setup_prefix_file_inclusion ( $ { _language } $ { _target } " $ { _prefixFile } " $ { _unityFiles } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_unity_build_target _languages _configurations _target ) <nl> - get_target_property ( _unityTargetName $ { _target } COTIRE_UNITY_TARGET_NAME ) <nl> - if ( NOT _unityTargetName ) <nl> - set ( _unityTargetName " $ { _target } $ { COTIRE_UNITY_BUILD_TARGET_SUFFIX } " ) <nl> - endif ( ) <nl> - # determine unity target sub type <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - if ( " $ { _targetType } " STREQUAL " EXECUTABLE " ) <nl> - set ( _unityTargetSubType " " ) <nl> - elseif ( _targetType MATCHES " ( STATIC | SHARED | MODULE | OBJECT ) _LIBRARY " ) <nl> - set ( _unityTargetSubType " $ { CMAKE_MATCH_1 } " ) <nl> - else ( ) <nl> - message ( WARNING " cotire : target $ { _target } has unknown target type $ { _targetType } . " ) <nl> - return ( ) <nl> - endif ( ) <nl> - # determine unity target sources <nl> - set ( _unityTargetSources " " ) <nl> - cotire_collect_unity_target_sources ( $ { _target } " $ { _languages } " _unityTargetSources ) <nl> - # handle automatic Qt processing <nl> - get_target_property ( _targetAutoMoc $ { _target } AUTOMOC ) <nl> - get_target_property ( _targetAutoUic $ { _target } AUTOUIC ) <nl> - get_target_property ( _targetAutoRcc $ { _target } AUTORCC ) <nl> - if ( _targetAutoMoc OR _targetAutoUic OR _targetAutoRcc ) <nl> - # if the original target sources are subject to CMake ' s automatic Qt processing , <nl> - # also include implicitly generated < targetname > _automoc . cpp file <nl> - list ( APPEND _unityTargetSources " $ { _target } _automoc . cpp " ) <nl> - set_property ( SOURCE " $ { _target } _automoc . cpp " PROPERTY GENERATED TRUE ) <nl> - endif ( ) <nl> - # prevent AUTOMOC , AUTOUIC and AUTORCC properties from being set when the unity target is created <nl> - set ( CMAKE_AUTOMOC OFF ) <nl> - set ( CMAKE_AUTOUIC OFF ) <nl> - set ( CMAKE_AUTORCC OFF ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " add target $ { _targetType } $ { _unityTargetName } $ { _unityTargetSubType } EXCLUDE_FROM_ALL $ { _unityTargetSources } " ) <nl> - endif ( ) <nl> - # generate unity target <nl> - if ( " $ { _targetType } " STREQUAL " EXECUTABLE " ) <nl> - add_executable ( $ { _unityTargetName } $ { _unityTargetSubType } EXCLUDE_FROM_ALL $ { _unityTargetSources } ) <nl> - else ( ) <nl> - add_library ( $ { _unityTargetName } $ { _unityTargetSubType } EXCLUDE_FROM_ALL $ { _unityTargetSources } ) <nl> - endif ( ) <nl> - if ( " $ { CMAKE_GENERATOR } " MATCHES " Visual Studio " ) <nl> - # depend on original target ' s automoc target , if it exists <nl> - if ( TARGET $ { _target } _automoc ) <nl> - add_dependencies ( $ { _unityTargetName } $ { _target } _automoc ) <nl> - endif ( ) <nl> - else ( ) <nl> - if ( _targetAutoMoc OR _targetAutoUic OR _targetAutoRcc ) <nl> - # depend on the original target ' s implicity generated < targetname > _automoc target <nl> - add_dependencies ( $ { _unityTargetName } $ { _target } _automoc ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # copy output location properties <nl> - set ( _outputDirProperties <nl> - ARCHIVE_OUTPUT_DIRECTORY ARCHIVE_OUTPUT_DIRECTORY_ < CONFIG > <nl> - LIBRARY_OUTPUT_DIRECTORY LIBRARY_OUTPUT_DIRECTORY_ < CONFIG > <nl> - RUNTIME_OUTPUT_DIRECTORY RUNTIME_OUTPUT_DIRECTORY_ < CONFIG > ) <nl> - if ( COTIRE_UNITY_OUTPUT_DIRECTORY ) <nl> - set ( _setDefaultOutputDir TRUE ) <nl> - if ( IS_ABSOLUTE " $ { COTIRE_UNITY_OUTPUT_DIRECTORY } " ) <nl> - set ( _outputDir " $ { COTIRE_UNITY_OUTPUT_DIRECTORY } " ) <nl> - else ( ) <nl> - # append relative COTIRE_UNITY_OUTPUT_DIRECTORY to target ' s actual output directory <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } $ { _outputDirProperties } ) <nl> - cotire_resolve_config_properites ( " $ { _configurations } " _properties $ { _outputDirProperties } ) <nl> - foreach ( _property $ { _properties } ) <nl> - get_property ( _outputDir TARGET $ { _target } PROPERTY $ { _property } ) <nl> - if ( _outputDir ) <nl> - get_filename_component ( _outputDir " $ { _outputDir } / $ { COTIRE_UNITY_OUTPUT_DIRECTORY } " ABSOLUTE ) <nl> - set_property ( TARGET $ { _unityTargetName } PROPERTY $ { _property } " $ { _outputDir } " ) <nl> - set ( _setDefaultOutputDir FALSE ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _setDefaultOutputDir ) <nl> - get_filename_component ( _outputDir " $ { CMAKE_CURRENT_BINARY_DIR } / $ { COTIRE_UNITY_OUTPUT_DIRECTORY } " ABSOLUTE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( _setDefaultOutputDir ) <nl> - set_target_properties ( $ { _unityTargetName } PROPERTIES <nl> - ARCHIVE_OUTPUT_DIRECTORY " $ { _outputDir } " <nl> - LIBRARY_OUTPUT_DIRECTORY " $ { _outputDir } " <nl> - RUNTIME_OUTPUT_DIRECTORY " $ { _outputDir } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - $ { _outputDirProperties } ) <nl> - endif ( ) <nl> - # copy output name <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - ARCHIVE_OUTPUT_NAME ARCHIVE_OUTPUT_NAME_ < CONFIG > <nl> - LIBRARY_OUTPUT_NAME LIBRARY_OUTPUT_NAME_ < CONFIG > <nl> - OUTPUT_NAME OUTPUT_NAME_ < CONFIG > <nl> - RUNTIME_OUTPUT_NAME RUNTIME_OUTPUT_NAME_ < CONFIG > <nl> - PREFIX < CONFIG > _POSTFIX SUFFIX <nl> - IMPORT_PREFIX IMPORT_SUFFIX ) <nl> - # copy compile stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - COMPILE_DEFINITIONS COMPILE_DEFINITIONS_ < CONFIG > <nl> - COMPILE_FLAGS COMPILE_OPTIONS <nl> - Fortran_FORMAT Fortran_MODULE_DIRECTORY <nl> - INCLUDE_DIRECTORIES <nl> - INTERPROCEDURAL_OPTIMIZATION INTERPROCEDURAL_OPTIMIZATION_ < CONFIG > <nl> - POSITION_INDEPENDENT_CODE <nl> - C_COMPILER_LAUNCHER CXX_COMPILER_LAUNCHER <nl> - C_INCLUDE_WHAT_YOU_USE CXX_INCLUDE_WHAT_YOU_USE <nl> - C_VISIBILITY_PRESET CXX_VISIBILITY_PRESET VISIBILITY_INLINES_HIDDEN <nl> - C_CLANG_TIDY CXX_CLANG_TIDY ) <nl> - # copy compile features <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - C_EXTENSIONS C_STANDARD C_STANDARD_REQUIRED <nl> - CXX_EXTENSIONS CXX_STANDARD CXX_STANDARD_REQUIRED <nl> - COMPILE_FEATURES ) <nl> - # copy interface stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - COMPATIBLE_INTERFACE_BOOL COMPATIBLE_INTERFACE_NUMBER_MAX COMPATIBLE_INTERFACE_NUMBER_MIN <nl> - COMPATIBLE_INTERFACE_STRING <nl> - INTERFACE_COMPILE_DEFINITIONS INTERFACE_COMPILE_FEATURES INTERFACE_COMPILE_OPTIONS <nl> - INTERFACE_INCLUDE_DIRECTORIES INTERFACE_SOURCES <nl> - INTERFACE_POSITION_INDEPENDENT_CODE INTERFACE_SYSTEM_INCLUDE_DIRECTORIES <nl> - INTERFACE_AUTOUIC_OPTIONS NO_SYSTEM_FROM_IMPORTED ) <nl> - # copy link stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - BUILD_WITH_INSTALL_RPATH INSTALL_RPATH INSTALL_RPATH_USE_LINK_PATH SKIP_BUILD_RPATH <nl> - LINKER_LANGUAGE LINK_DEPENDS LINK_DEPENDS_NO_SHARED <nl> - LINK_FLAGS LINK_FLAGS_ < CONFIG > <nl> - LINK_INTERFACE_LIBRARIES LINK_INTERFACE_LIBRARIES_ < CONFIG > <nl> - LINK_INTERFACE_MULTIPLICITY LINK_INTERFACE_MULTIPLICITY_ < CONFIG > <nl> - LINK_SEARCH_START_STATIC LINK_SEARCH_END_STATIC <nl> - STATIC_LIBRARY_FLAGS STATIC_LIBRARY_FLAGS_ < CONFIG > <nl> - NO_SONAME SOVERSION VERSION <nl> - LINK_WHAT_YOU_USE ) <nl> - # copy cmake stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - IMPLICIT_DEPENDS_INCLUDE_TRANSFORM RULE_LAUNCH_COMPILE RULE_LAUNCH_CUSTOM RULE_LAUNCH_LINK ) <nl> - # copy Apple platform specific stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - BUNDLE BUNDLE_EXTENSION FRAMEWORK FRAMEWORK_VERSION INSTALL_NAME_DIR <nl> - MACOSX_BUNDLE MACOSX_BUNDLE_INFO_PLIST MACOSX_FRAMEWORK_INFO_PLIST MACOSX_RPATH <nl> - OSX_ARCHITECTURES OSX_ARCHITECTURES_ < CONFIG > PRIVATE_HEADER PUBLIC_HEADER RESOURCE XCTEST <nl> - IOS_INSTALL_COMBINED ) <nl> - # copy Windows platform specific stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - GNUtoMS <nl> - COMPILE_PDB_NAME COMPILE_PDB_NAME_ < CONFIG > <nl> - COMPILE_PDB_OUTPUT_DIRECTORY COMPILE_PDB_OUTPUT_DIRECTORY_ < CONFIG > <nl> - PDB_NAME PDB_NAME_ < CONFIG > PDB_OUTPUT_DIRECTORY PDB_OUTPUT_DIRECTORY_ < CONFIG > <nl> - VS_DESKTOP_EXTENSIONS_VERSION VS_DOTNET_REFERENCES VS_DOTNET_TARGET_FRAMEWORK_VERSION <nl> - VS_GLOBAL_KEYWORD VS_GLOBAL_PROJECT_TYPES VS_GLOBAL_ROOTNAMESPACE <nl> - VS_IOT_EXTENSIONS_VERSION VS_IOT_STARTUP_TASK <nl> - VS_KEYWORD VS_MOBILE_EXTENSIONS_VERSION <nl> - VS_SCC_AUXPATH VS_SCC_LOCALPATH VS_SCC_PROJECTNAME VS_SCC_PROVIDER <nl> - VS_WINDOWS_TARGET_PLATFORM_MIN_VERSION <nl> - VS_WINRT_COMPONENT VS_WINRT_EXTENSIONS VS_WINRT_REFERENCES <nl> - WIN32_EXECUTABLE WINDOWS_EXPORT_ALL_SYMBOLS <nl> - DEPLOYMENT_REMOTE_DIRECTORY VS_CONFIGURATION_TYPE <nl> - VS_SDK_REFERENCES ) <nl> - # copy Android platform specific stuff <nl> - cotire_copy_set_properites ( " $ { _configurations } " TARGET $ { _target } $ { _unityTargetName } <nl> - ANDROID_API ANDROID_API_MIN ANDROID_GUI <nl> - ANDROID_ANT_ADDITIONAL_OPTIONS ANDROID_ARCH ANDROID_ASSETS_DIRECTORIES <nl> - ANDROID_JAR_DEPENDENCIES ANDROID_JAR_DIRECTORIES ANDROID_JAVA_SOURCE_DIR <nl> - ANDROID_NATIVE_LIB_DEPENDENCIES ANDROID_NATIVE_LIB_DIRECTORIES <nl> - ANDROID_PROCESS_MAX ANDROID_PROGUARD ANDROID_PROGUARD_CONFIG_PATH <nl> - ANDROID_SECURE_PROPS_PATH ANDROID_SKIP_ANT_STEP ANDROID_STL_TYPE ) <nl> - # use output name from original target <nl> - get_target_property ( _targetOutputName $ { _unityTargetName } OUTPUT_NAME ) <nl> - if ( NOT _targetOutputName ) <nl> - set_property ( TARGET $ { _unityTargetName } PROPERTY OUTPUT_NAME " $ { _target } " ) <nl> - endif ( ) <nl> - # use export symbol from original target <nl> - cotire_get_target_export_symbol ( " $ { _target } " _defineSymbol ) <nl> - if ( _defineSymbol ) <nl> - set_property ( TARGET $ { _unityTargetName } PROPERTY DEFINE_SYMBOL " $ { _defineSymbol } " ) <nl> - if ( " $ { _targetType } " STREQUAL " EXECUTABLE " ) <nl> - set_property ( TARGET $ { _unityTargetName } PROPERTY ENABLE_EXPORTS TRUE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - cotire_init_target ( $ { _unityTargetName } ) <nl> - cotire_add_to_unity_all_target ( $ { _unityTargetName } ) <nl> - set_property ( TARGET $ { _target } PROPERTY COTIRE_UNITY_TARGET_NAME " $ { _unityTargetName } " ) <nl> - endfunction ( cotire_setup_unity_build_target ) <nl> - <nl> - function ( cotire_target _target ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs " " ) <nl> - set ( _multiValueArgs LANGUAGES CONFIGURATIONS ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - if ( NOT _option_LANGUAGES ) <nl> - get_property ( _option_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES ) <nl> - endif ( ) <nl> - if ( NOT _option_CONFIGURATIONS ) <nl> - cotire_get_configuration_types ( _option_CONFIGURATIONS ) <nl> - endif ( ) <nl> - # check if cotire can be applied to target at all <nl> - cotire_is_target_supported ( $ { _target } _isSupported ) <nl> - if ( NOT _isSupported ) <nl> - get_target_property ( _imported $ { _target } IMPORTED ) <nl> - get_target_property ( _targetType $ { _target } TYPE ) <nl> - if ( _imported ) <nl> - message ( WARNING " cotire : imported $ { _targetType } target $ { _target } cannot be cotired . " ) <nl> - else ( ) <nl> - message ( STATUS " cotire : $ { _targetType } target $ { _target } cannot be cotired . " ) <nl> - endif ( ) <nl> - return ( ) <nl> - endif ( ) <nl> - # resolve alias <nl> - get_target_property ( _aliasName $ { _target } ALIASED_TARGET ) <nl> - if ( _aliasName ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { _target } is an alias . Applying cotire to aliased target $ { _aliasName } instead . " ) <nl> - endif ( ) <nl> - set ( _target $ { _aliasName } ) <nl> - endif ( ) <nl> - # check if target needs to be cotired for build type <nl> - # when using configuration types , the test is performed at build time <nl> - cotire_init_cotire_target_properties ( $ { _target } ) <nl> - if ( NOT CMAKE_CONFIGURATION_TYPES ) <nl> - if ( CMAKE_BUILD_TYPE ) <nl> - list ( FIND _option_CONFIGURATIONS " $ { CMAKE_BUILD_TYPE } " _index ) <nl> - else ( ) <nl> - list ( FIND _option_CONFIGURATIONS " None " _index ) <nl> - endif ( ) <nl> - if ( _index EQUAL - 1 ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " CMAKE_BUILD_TYPE = $ { CMAKE_BUILD_TYPE } not cotired ( $ { _option_CONFIGURATIONS } ) " ) <nl> - endif ( ) <nl> - return ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - # when not using configuration types , immediately create cotire intermediate dir <nl> - if ( NOT CMAKE_CONFIGURATION_TYPES ) <nl> - cotire_get_intermediate_dir ( _baseDir ) <nl> - file ( MAKE_DIRECTORY " $ { _baseDir } " ) <nl> - endif ( ) <nl> - # choose languages that apply to the target <nl> - cotire_choose_target_languages ( " $ { _target } " _targetLanguages _wholeTarget $ { _option_LANGUAGES } ) <nl> - if ( NOT _targetLanguages ) <nl> - return ( ) <nl> - endif ( ) <nl> - set ( _cmds " " ) <nl> - foreach ( _language $ { _targetLanguages } ) <nl> - cotire_process_target_language ( " $ { _language } " " $ { _option_CONFIGURATIONS } " $ { _target } $ { _wholeTarget } _cmd ) <nl> - if ( _cmd ) <nl> - list ( APPEND _cmds $ { _cmd } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - get_target_property ( _targetAddSCU $ { _target } COTIRE_ADD_UNITY_BUILD ) <nl> - if ( _targetAddSCU ) <nl> - cotire_setup_unity_build_target ( " $ { _targetLanguages } " " $ { _option_CONFIGURATIONS } " $ { _target } ) <nl> - endif ( ) <nl> - get_target_property ( _targetUsePCH $ { _target } COTIRE_ENABLE_PRECOMPILED_HEADER ) <nl> - if ( _targetUsePCH ) <nl> - cotire_setup_target_pch_usage ( " $ { _targetLanguages } " $ { _target } $ { _wholeTarget } $ { _cmds } ) <nl> - cotire_setup_pch_target ( " $ { _targetLanguages } " " $ { _option_CONFIGURATIONS } " $ { _target } ) <nl> - if ( _targetAddSCU ) <nl> - cotire_setup_unity_target_pch_usage ( " $ { _targetLanguages } " $ { _target } ) <nl> - endif ( ) <nl> - endif ( ) <nl> - get_target_property ( _targetAddCleanTarget $ { _target } COTIRE_ADD_CLEAN ) <nl> - if ( _targetAddCleanTarget ) <nl> - cotire_setup_clean_target ( $ { _target } ) <nl> - endif ( ) <nl> - endfunction ( cotire_target ) <nl> - <nl> - function ( cotire_map_libraries _strategy _mappedLibrariesVar ) <nl> - set ( _mappedLibraries " " ) <nl> - foreach ( _library $ { ARGN } ) <nl> - if ( _library MATCHES " ^ \ \ $ < LINK_ONLY : ( . + ) > $ " ) <nl> - set ( _libraryName " $ { CMAKE_MATCH_1 } " ) <nl> - set ( _linkOnly TRUE ) <nl> - set ( _objectLibrary FALSE ) <nl> - elseif ( _library MATCHES " ^ \ \ $ < TARGET_OBJECTS : ( . + ) > $ " ) <nl> - set ( _libraryName " $ { CMAKE_MATCH_1 } " ) <nl> - set ( _linkOnly FALSE ) <nl> - set ( _objectLibrary TRUE ) <nl> - else ( ) <nl> - set ( _libraryName " $ { _library } " ) <nl> - set ( _linkOnly FALSE ) <nl> - set ( _objectLibrary FALSE ) <nl> - endif ( ) <nl> - if ( " $ { _strategy } " MATCHES " COPY_UNITY " ) <nl> - cotire_is_target_supported ( $ { _libraryName } _isSupported ) <nl> - if ( _isSupported ) <nl> - # use target ' s corresponding unity target , if available <nl> - get_target_property ( _libraryUnityTargetName $ { _libraryName } COTIRE_UNITY_TARGET_NAME ) <nl> - if ( TARGET " $ { _libraryUnityTargetName } " ) <nl> - if ( _linkOnly ) <nl> - list ( APPEND _mappedLibraries " $ < LINK_ONLY : $ { _libraryUnityTargetName } > " ) <nl> - elseif ( _objectLibrary ) <nl> - list ( APPEND _mappedLibraries " $ < TARGET_OBJECTS : $ { _libraryUnityTargetName } > " ) <nl> - else ( ) <nl> - list ( APPEND _mappedLibraries " $ { _libraryUnityTargetName } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - list ( APPEND _mappedLibraries " $ { _library } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - list ( APPEND _mappedLibraries " $ { _library } " ) <nl> - endif ( ) <nl> - else ( ) <nl> - list ( APPEND _mappedLibraries " $ { _library } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - list ( REMOVE_DUPLICATES _mappedLibraries ) <nl> - set ( $ { _mappedLibrariesVar } $ { _mappedLibraries } PARENT_SCOPE ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_target_link_libraries _target ) <nl> - cotire_is_target_supported ( $ { _target } _isSupported ) <nl> - if ( NOT _isSupported ) <nl> - return ( ) <nl> - endif ( ) <nl> - get_target_property ( _unityTargetName $ { _target } COTIRE_UNITY_TARGET_NAME ) <nl> - if ( TARGET " $ { _unityTargetName } " ) <nl> - get_target_property ( _linkLibrariesStrategy $ { _target } COTIRE_UNITY_LINK_LIBRARIES_INIT ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " unity target $ { _unityTargetName } link strategy : $ { _linkLibrariesStrategy } " ) <nl> - endif ( ) <nl> - if ( " $ { _linkLibrariesStrategy } " MATCHES " ^ ( COPY | COPY_UNITY ) $ " ) <nl> - get_target_property ( _linkLibraries $ { _target } LINK_LIBRARIES ) <nl> - if ( _linkLibraries ) <nl> - cotire_map_libraries ( " $ { _linkLibrariesStrategy } " _unityLinkLibraries $ { _linkLibraries } ) <nl> - set_target_properties ( $ { _unityTargetName } PROPERTIES LINK_LIBRARIES " $ { _unityLinkLibraries } " ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " unity target $ { _unityTargetName } link libraries : $ { _unityLinkLibraries } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - get_target_property ( _interfaceLinkLibraries $ { _target } INTERFACE_LINK_LIBRARIES ) <nl> - if ( _interfaceLinkLibraries ) <nl> - cotire_map_libraries ( " $ { _linkLibrariesStrategy } " _unityLinkInterfaceLibraries $ { _interfaceLinkLibraries } ) <nl> - set_target_properties ( $ { _unityTargetName } PROPERTIES INTERFACE_LINK_LIBRARIES " $ { _unityLinkInterfaceLibraries } " ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " unity target $ { _unityTargetName } interface link libraries : $ { _unityLinkInterfaceLibraries } " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endif ( ) <nl> - endfunction ( cotire_target_link_libraries ) <nl> - <nl> - function ( cotire_cleanup _binaryDir _cotireIntermediateDirName _targetName ) <nl> - if ( _targetName ) <nl> - file ( GLOB_RECURSE _cotireFiles " $ { _binaryDir } / $ { _targetName } * . * " ) <nl> - else ( ) <nl> - file ( GLOB_RECURSE _cotireFiles " $ { _binaryDir } / * . * " ) <nl> - endif ( ) <nl> - # filter files in intermediate directory <nl> - set ( _filesToRemove " " ) <nl> - foreach ( _file $ { _cotireFiles } ) <nl> - get_filename_component ( _dir " $ { _file } " DIRECTORY ) <nl> - get_filename_component ( _dirName " $ { _dir } " NAME ) <nl> - if ( " $ { _dirName } " STREQUAL " $ { _cotireIntermediateDirName } " ) <nl> - list ( APPEND _filesToRemove " $ { _file } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - if ( _filesToRemove ) <nl> - if ( COTIRE_VERBOSE ) <nl> - message ( STATUS " cleaning up $ { _filesToRemove } " ) <nl> - endif ( ) <nl> - file ( REMOVE $ { _filesToRemove } ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_init_target _targetName ) <nl> - if ( COTIRE_TARGETS_FOLDER ) <nl> - set_target_properties ( $ { _targetName } PROPERTIES FOLDER " $ { COTIRE_TARGETS_FOLDER } " ) <nl> - endif ( ) <nl> - set_target_properties ( $ { _targetName } PROPERTIES EXCLUDE_FROM_ALL TRUE ) <nl> - if ( MSVC_IDE ) <nl> - set_target_properties ( $ { _targetName } PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD TRUE ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_to_pch_all_target _pchTargetName ) <nl> - set ( _targetName " $ { COTIRE_PCH_ALL_TARGET_NAME } " ) <nl> - if ( NOT TARGET " $ { _targetName } " ) <nl> - add_custom_target ( " $ { _targetName } " <nl> - WORKING_DIRECTORY " $ { CMAKE_BINARY_DIR } " <nl> - VERBATIM ) <nl> - cotire_init_target ( " $ { _targetName } " ) <nl> - endif ( ) <nl> - cotire_setup_clean_all_target ( ) <nl> - add_dependencies ( $ { _targetName } $ { _pchTargetName } ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_add_to_unity_all_target _unityTargetName ) <nl> - set ( _targetName " $ { COTIRE_UNITY_BUILD_ALL_TARGET_NAME } " ) <nl> - if ( NOT TARGET " $ { _targetName } " ) <nl> - add_custom_target ( " $ { _targetName } " <nl> - WORKING_DIRECTORY " $ { CMAKE_BINARY_DIR } " <nl> - VERBATIM ) <nl> - cotire_init_target ( " $ { _targetName } " ) <nl> - endif ( ) <nl> - cotire_setup_clean_all_target ( ) <nl> - add_dependencies ( $ { _targetName } $ { _unityTargetName } ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire_setup_clean_all_target ) <nl> - set ( _targetName " $ { COTIRE_CLEAN_ALL_TARGET_NAME } " ) <nl> - if ( NOT TARGET " $ { _targetName } " ) <nl> - cotire_set_cmd_to_prologue ( _cmds ) <nl> - list ( APPEND _cmds - P " $ { COTIRE_CMAKE_MODULE_FILE } " " cleanup " " $ { CMAKE_BINARY_DIR } " " $ { COTIRE_INTDIR } " ) <nl> - add_custom_target ( $ { _targetName } <nl> - COMMAND $ { _cmds } <nl> - WORKING_DIRECTORY " $ { CMAKE_BINARY_DIR } " <nl> - COMMENT " Cleaning up all cotire generated files " <nl> - VERBATIM ) <nl> - cotire_init_target ( " $ { _targetName } " ) <nl> - endif ( ) <nl> - endfunction ( ) <nl> - <nl> - function ( cotire ) <nl> - set ( _options " " ) <nl> - set ( _oneValueArgs " " ) <nl> - set ( _multiValueArgs LANGUAGES CONFIGURATIONS ) <nl> - cmake_parse_arguments ( _option " $ { _options } " " $ { _oneValueArgs } " " $ { _multiValueArgs } " $ { ARGN } ) <nl> - set ( _targets $ { _option_UNPARSED_ARGUMENTS } ) <nl> - foreach ( _target $ { _targets } ) <nl> - if ( TARGET $ { _target } ) <nl> - cotire_target ( $ { _target } LANGUAGES $ { _option_LANGUAGES } CONFIGURATIONS $ { _option_CONFIGURATIONS } ) <nl> - else ( ) <nl> - message ( WARNING " cotire : $ { _target } is not a target . " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - foreach ( _target $ { _targets } ) <nl> - if ( TARGET $ { _target } ) <nl> - cotire_target_link_libraries ( $ { _target } ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - endfunction ( ) <nl> - <nl> - if ( CMAKE_SCRIPT_MODE_FILE ) <nl> - <nl> - # cotire is being run in script mode <nl> - # locate - P on command args <nl> - set ( COTIRE_ARGC - 1 ) <nl> - foreach ( _index RANGE $ { CMAKE_ARGC } ) <nl> - if ( COTIRE_ARGC GREATER - 1 ) <nl> - set ( COTIRE_ARGV $ { COTIRE_ARGC } " $ { CMAKE_ARGV $ { _index } } " ) <nl> - math ( EXPR COTIRE_ARGC " $ { COTIRE_ARGC } + 1 " ) <nl> - elseif ( " $ { CMAKE_ARGV $ { _index } } " STREQUAL " - P " ) <nl> - set ( COTIRE_ARGC 0 ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - <nl> - # include target script if available <nl> - if ( " $ { COTIRE_ARGV2 } " MATCHES " \ \ . cmake $ " ) <nl> - # the included target scripts sets up additional variables relating to the target ( e . g . , COTIRE_TARGET_SOURCES ) <nl> - include ( " $ { COTIRE_ARGV2 } " ) <nl> - endif ( ) <nl> - <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " $ { COTIRE_ARGV0 } $ { COTIRE_ARGV1 } $ { COTIRE_ARGV2 } $ { COTIRE_ARGV3 } $ { COTIRE_ARGV4 } $ { COTIRE_ARGV5 } " ) <nl> - endif ( ) <nl> - <nl> - if ( NOT COTIRE_BUILD_TYPE ) <nl> - set ( COTIRE_BUILD_TYPE " None " ) <nl> - endif ( ) <nl> - string ( TOUPPER " $ { COTIRE_BUILD_TYPE } " _upperConfig ) <nl> - set ( _includeDirs $ { COTIRE_TARGET_INCLUDE_DIRECTORIES_ $ { _upperConfig } } ) <nl> - set ( _systemIncludeDirs $ { COTIRE_TARGET_SYSTEM_INCLUDE_DIRECTORIES_ $ { _upperConfig } } ) <nl> - set ( _compileDefinitions $ { COTIRE_TARGET_COMPILE_DEFINITIONS_ $ { _upperConfig } } ) <nl> - set ( _compileFlags $ { COTIRE_TARGET_COMPILE_FLAGS_ $ { _upperConfig } } ) <nl> - # check if target has been cotired for actual build type COTIRE_BUILD_TYPE <nl> - list ( FIND COTIRE_TARGET_CONFIGURATION_TYPES " $ { COTIRE_BUILD_TYPE } " _index ) <nl> - if ( _index GREATER - 1 ) <nl> - set ( _sources $ { COTIRE_TARGET_SOURCES } ) <nl> - set ( _sourcesDefinitions $ { COTIRE_TARGET_SOURCES_COMPILE_DEFINITIONS_ $ { _upperConfig } } ) <nl> - else ( ) <nl> - if ( COTIRE_DEBUG ) <nl> - message ( STATUS " COTIRE_BUILD_TYPE = $ { COTIRE_BUILD_TYPE } not cotired ( $ { COTIRE_TARGET_CONFIGURATION_TYPES } ) " ) <nl> - endif ( ) <nl> - set ( _sources " " ) <nl> - set ( _sourcesDefinitions " " ) <nl> - endif ( ) <nl> - set ( _targetPreUndefs $ { COTIRE_TARGET_PRE_UNDEFS } ) <nl> - set ( _targetPostUndefs $ { COTIRE_TARGET_POST_UNDEFS } ) <nl> - set ( _sourcesPreUndefs $ { COTIRE_TARGET_SOURCES_PRE_UNDEFS } ) <nl> - set ( _sourcesPostUndefs $ { COTIRE_TARGET_SOURCES_POST_UNDEFS } ) <nl> - <nl> - if ( " $ { COTIRE_ARGV1 } " STREQUAL " unity " ) <nl> - <nl> - if ( XCODE ) <nl> - # executing pre - build action under Xcode , check dependency on target script <nl> - set ( _dependsOption DEPENDS " $ { COTIRE_ARGV2 } " ) <nl> - else ( ) <nl> - # executing custom command , no need to re - check for dependencies <nl> - set ( _dependsOption " " ) <nl> - endif ( ) <nl> - <nl> - cotire_select_unity_source_files ( " $ { COTIRE_ARGV3 } " _sources $ { _sources } ) <nl> - <nl> - cotire_generate_unity_source ( <nl> - " $ { COTIRE_ARGV3 } " $ { _sources } <nl> - LANGUAGE " $ { COTIRE_TARGET_LANGUAGE } " <nl> - SOURCES_COMPILE_DEFINITIONS $ { _sourcesDefinitions } <nl> - PRE_UNDEFS $ { _targetPreUndefs } <nl> - POST_UNDEFS $ { _targetPostUndefs } <nl> - SOURCES_PRE_UNDEFS $ { _sourcesPreUndefs } <nl> - SOURCES_POST_UNDEFS $ { _sourcesPostUndefs } <nl> - $ { _dependsOption } ) <nl> - <nl> - elseif ( " $ { COTIRE_ARGV1 } " STREQUAL " prefix " ) <nl> - <nl> - if ( XCODE ) <nl> - # executing pre - build action under Xcode , check dependency on unity file and prefix dependencies <nl> - set ( _dependsOption DEPENDS " $ { COTIRE_ARGV4 } " $ { COTIRE_TARGET_PREFIX_DEPENDS } ) <nl> - else ( ) <nl> - # executing custom command , no need to re - check for dependencies <nl> - set ( _dependsOption " " ) <nl> - endif ( ) <nl> - <nl> - set ( _files " " ) <nl> - foreach ( _index RANGE 4 $ { COTIRE_ARGC } ) <nl> - if ( COTIRE_ARGV $ { _index } ) <nl> - list ( APPEND _files " $ { COTIRE_ARGV $ { _index } } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - <nl> - cotire_generate_prefix_header ( <nl> - " $ { COTIRE_ARGV3 } " $ { _files } <nl> - COMPILER_LAUNCHER " $ { COTIRE_TARGET_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_LAUNCHER } " <nl> - COMPILER_EXECUTABLE " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER } " <nl> - COMPILER_ARG1 $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_ARG1 } <nl> - COMPILER_ID " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_ID } " <nl> - COMPILER_VERSION " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_VERSION } " <nl> - LANGUAGE " $ { COTIRE_TARGET_LANGUAGE } " <nl> - IGNORE_PATH " $ { COTIRE_TARGET_IGNORE_PATH } ; $ { COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH } " <nl> - INCLUDE_PATH $ { COTIRE_TARGET_INCLUDE_PATH } <nl> - IGNORE_EXTENSIONS " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _SOURCE_FILE_EXTENSIONS } ; $ { COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS } " <nl> - INCLUDE_PRIORITY_PATH $ { COTIRE_TARGET_INCLUDE_PRIORITY_PATH } <nl> - INCLUDE_DIRECTORIES $ { _includeDirs } <nl> - SYSTEM_INCLUDE_DIRECTORIES $ { _systemIncludeDirs } <nl> - COMPILE_DEFINITIONS $ { _compileDefinitions } <nl> - COMPILE_FLAGS $ { _compileFlags } <nl> - $ { _dependsOption } ) <nl> - <nl> - elseif ( " $ { COTIRE_ARGV1 } " STREQUAL " precompile " ) <nl> - <nl> - set ( _files " " ) <nl> - foreach ( _index RANGE 5 $ { COTIRE_ARGC } ) <nl> - if ( COTIRE_ARGV $ { _index } ) <nl> - list ( APPEND _files " $ { COTIRE_ARGV $ { _index } } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - <nl> - cotire_precompile_prefix_header ( <nl> - " $ { COTIRE_ARGV3 } " " $ { COTIRE_ARGV4 } " " $ { COTIRE_ARGV5 } " <nl> - COMPILER_LAUNCHER " $ { COTIRE_TARGET_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_LAUNCHER } " <nl> - COMPILER_EXECUTABLE " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER } " <nl> - COMPILER_ARG1 $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_ARG1 } <nl> - COMPILER_ID " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_ID } " <nl> - COMPILER_VERSION " $ { CMAKE_ $ { COTIRE_TARGET_LANGUAGE } _COMPILER_VERSION } " <nl> - LANGUAGE " $ { COTIRE_TARGET_LANGUAGE } " <nl> - INCLUDE_DIRECTORIES $ { _includeDirs } <nl> - SYSTEM_INCLUDE_DIRECTORIES $ { _systemIncludeDirs } <nl> - COMPILE_DEFINITIONS $ { _compileDefinitions } <nl> - COMPILE_FLAGS $ { _compileFlags } ) <nl> - <nl> - elseif ( " $ { COTIRE_ARGV1 } " STREQUAL " combine " ) <nl> - <nl> - if ( COTIRE_TARGET_LANGUAGE ) <nl> - set ( _combinedFile " $ { COTIRE_ARGV3 } " ) <nl> - set ( _startIndex 4 ) <nl> - else ( ) <nl> - set ( _combinedFile " $ { COTIRE_ARGV2 } " ) <nl> - set ( _startIndex 3 ) <nl> - endif ( ) <nl> - set ( _files " " ) <nl> - foreach ( _index RANGE $ { _startIndex } $ { COTIRE_ARGC } ) <nl> - if ( COTIRE_ARGV $ { _index } ) <nl> - list ( APPEND _files " $ { COTIRE_ARGV $ { _index } } " ) <nl> - endif ( ) <nl> - endforeach ( ) <nl> - <nl> - if ( XCODE ) <nl> - # executing pre - build action under Xcode , check dependency on files to be combined <nl> - set ( _dependsOption DEPENDS $ { _files } ) <nl> - else ( ) <nl> - # executing custom command , no need to re - check for dependencies <nl> - set ( _dependsOption " " ) <nl> - endif ( ) <nl> - <nl> - if ( COTIRE_TARGET_LANGUAGE ) <nl> - cotire_generate_unity_source ( <nl> - " $ { _combinedFile } " $ { _files } <nl> - LANGUAGE " $ { COTIRE_TARGET_LANGUAGE } " <nl> - $ { _dependsOption } ) <nl> - else ( ) <nl> - cotire_generate_unity_source ( " $ { _combinedFile } " $ { _files } $ { _dependsOption } ) <nl> - endif ( ) <nl> - <nl> - elseif ( " $ { COTIRE_ARGV1 } " STREQUAL " cleanup " ) <nl> - <nl> - cotire_cleanup ( " $ { COTIRE_ARGV2 } " " $ { COTIRE_ARGV3 } " " $ { COTIRE_ARGV4 } " ) <nl> - <nl> - else ( ) <nl> - message ( FATAL_ERROR " cotire : unknown command \ " $ { COTIRE_ARGV1 } \ " . " ) <nl> - endif ( ) <nl> - <nl> - else ( ) <nl> - <nl> - # cotire is being run in include mode <nl> - # set up all variable and property definitions <nl> - <nl> - if ( NOT DEFINED COTIRE_DEBUG_INIT ) <nl> - if ( DEFINED COTIRE_DEBUG ) <nl> - set ( COTIRE_DEBUG_INIT $ { COTIRE_DEBUG } ) <nl> - else ( ) <nl> - set ( COTIRE_DEBUG_INIT FALSE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - option ( COTIRE_DEBUG " Enable cotire debugging output ? " $ { COTIRE_DEBUG_INIT } ) <nl> - <nl> - if ( NOT DEFINED COTIRE_VERBOSE_INIT ) <nl> - if ( DEFINED COTIRE_VERBOSE ) <nl> - set ( COTIRE_VERBOSE_INIT $ { COTIRE_VERBOSE } ) <nl> - else ( ) <nl> - set ( COTIRE_VERBOSE_INIT FALSE ) <nl> - endif ( ) <nl> - endif ( ) <nl> - option ( COTIRE_VERBOSE " Enable cotire verbose output ? " $ { COTIRE_VERBOSE_INIT } ) <nl> - <nl> - set ( COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS " inc ; inl ; ipp " CACHE STRING <nl> - " Ignore headers with the listed file extensions from the generated prefix header . " ) <nl> - <nl> - set ( COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH " " CACHE STRING <nl> - " Ignore headers from these directories when generating the prefix header . " ) <nl> - <nl> - set ( COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS " m ; mm " CACHE STRING <nl> - " Ignore sources with the listed file extensions from the generated unity source . " ) <nl> - <nl> - set ( COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES " 3 " CACHE STRING <nl> - " Minimum number of sources in target required to enable use of precompiled header . " ) <nl> - <nl> - if ( NOT DEFINED COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT ) <nl> - if ( DEFINED COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES ) <nl> - set ( COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT $ { COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES } ) <nl> - elseif ( " $ { CMAKE_GENERATOR } " MATCHES " JOM | Ninja | Visual Studio " ) <nl> - # enable parallelization for generators that run multiple jobs by default <nl> - set ( COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT " - j " ) <nl> - else ( ) <nl> - set ( COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT " 0 " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - set ( COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES " $ { COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT } " CACHE STRING <nl> - " Maximum number of source files to include in a single unity source file . " ) <nl> - <nl> - if ( NOT COTIRE_PREFIX_HEADER_FILENAME_SUFFIX ) <nl> - set ( COTIRE_PREFIX_HEADER_FILENAME_SUFFIX " _prefix " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_UNITY_SOURCE_FILENAME_SUFFIX ) <nl> - set ( COTIRE_UNITY_SOURCE_FILENAME_SUFFIX " _unity " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_INTDIR ) <nl> - set ( COTIRE_INTDIR " cotire " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_PCH_ALL_TARGET_NAME ) <nl> - set ( COTIRE_PCH_ALL_TARGET_NAME " all_pch " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_UNITY_BUILD_ALL_TARGET_NAME ) <nl> - set ( COTIRE_UNITY_BUILD_ALL_TARGET_NAME " all_unity " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_CLEAN_ALL_TARGET_NAME ) <nl> - set ( COTIRE_CLEAN_ALL_TARGET_NAME " clean_cotire " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_CLEAN_TARGET_SUFFIX ) <nl> - set ( COTIRE_CLEAN_TARGET_SUFFIX " _clean_cotire " ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_PCH_TARGET_SUFFIX ) <nl> - set ( COTIRE_PCH_TARGET_SUFFIX " _pch " ) <nl> - endif ( ) <nl> - if ( MSVC ) <nl> - # MSVC default PCH memory scaling factor of 100 percent ( 75 MB ) is too small for template heavy C + + code <nl> - # use a bigger default factor of 170 percent ( 128 MB ) <nl> - if ( NOT DEFINED COTIRE_PCH_MEMORY_SCALING_FACTOR ) <nl> - set ( COTIRE_PCH_MEMORY_SCALING_FACTOR " 170 " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - if ( NOT COTIRE_UNITY_BUILD_TARGET_SUFFIX ) <nl> - set ( COTIRE_UNITY_BUILD_TARGET_SUFFIX " _unity " ) <nl> - endif ( ) <nl> - if ( NOT DEFINED COTIRE_TARGETS_FOLDER ) <nl> - set ( COTIRE_TARGETS_FOLDER " cotire " ) <nl> - endif ( ) <nl> - if ( NOT DEFINED COTIRE_UNITY_OUTPUT_DIRECTORY ) <nl> - if ( " $ { CMAKE_GENERATOR } " MATCHES " Ninja " ) <nl> - # generated Ninja build files do not work if the unity target produces the same output file as the cotired target <nl> - set ( COTIRE_UNITY_OUTPUT_DIRECTORY " unity " ) <nl> - else ( ) <nl> - set ( COTIRE_UNITY_OUTPUT_DIRECTORY " " ) <nl> - endif ( ) <nl> - endif ( ) <nl> - <nl> - # define cotire cache variables <nl> - <nl> - define_property ( <nl> - CACHED_VARIABLE PROPERTY " COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH " <nl> - BRIEF_DOCS " Ignore headers from these directories when generating the prefix header . " <nl> - FULL_DOCS <nl> - " The variable can be set to a semicolon separated list of include directories . " <nl> - " If a header file is found in one of these directories or sub - directories , it will be excluded from the generated prefix header . " <nl> - " If not defined , defaults to empty list . " <nl> - ) <nl> - <nl> - define_property ( <nl> - CACHED_VARIABLE PROPERTY " COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS " <nl> - BRIEF_DOCS " Ignore includes with the listed file extensions from the generated prefix header . " <nl> - FULL_DOCS <nl> - " The variable can be set to a semicolon separated list of file extensions . " <nl> - " If a header file extension matches one in the list , it will be excluded from the generated prefix header . " <nl> - " Includes with an extension in CMAKE_ < LANG > _SOURCE_FILE_EXTENSIONS are always ignored . " <nl> - " If not defined , defaults to inc ; inl ; ipp . " <nl> - ) <nl> - <nl> - define_property ( <nl> - CACHED_VARIABLE PROPERTY " COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS " <nl> - BRIEF_DOCS " Exclude sources with the listed file extensions from the generated unity source . " <nl> - FULL_DOCS <nl> - " The variable can be set to a semicolon separated list of file extensions . " <nl> - " If a source file extension matches one in the list , it will be excluded from the generated unity source file . " <nl> - " Source files with an extension in CMAKE_ < LANG > _IGNORE_EXTENSIONS are always excluded . " <nl> - " If not defined , defaults to m ; mm . " <nl> - ) <nl> - <nl> - define_property ( <nl> - CACHED_VARIABLE PROPERTY " COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES " <nl> - BRIEF_DOCS " Minimum number of sources in target required to enable use of precompiled header . " <nl> - FULL_DOCS <nl> - " The variable can be set to an integer > 0 . " <nl> - " If a target contains less than that number of source files , cotire will not enable the use of the precompiled header for the target . " <nl> - " If not defined , defaults to 3 . " <nl> - ) <nl> - <nl> - define_property ( <nl> - CACHED_VARIABLE PROPERTY " COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES " <nl> - BRIEF_DOCS " Maximum number of source files to include in a single unity source file . " <nl> - FULL_DOCS <nl> - " This may be set to an integer > = 0 . " <nl> - " If 0 , cotire will only create a single unity source file . " <nl> - " If a target contains more than that number of source files , cotire will create multiple unity source files for it . " <nl> - " Can be set to \ " - j \ " to optimize the count of unity source files for the number of available processor cores . " <nl> - " Can be set to \ " - j jobs \ " to optimize the number of unity source files for the given number of simultaneous jobs . " <nl> - " Is used to initialize the target property COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES . " <nl> - " Defaults to \ " - j \ " for the generators Visual Studio , JOM or Ninja . Defaults to 0 otherwise . " <nl> - ) <nl> - <nl> - # define cotire directory properties <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_ENABLE_PRECOMPILED_HEADER " <nl> - BRIEF_DOCS " Modify build command of cotired targets added in this directory to make use of the generated precompiled header . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_ENABLE_PRECOMPILED_HEADER . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_ADD_UNITY_BUILD " <nl> - BRIEF_DOCS " Add a new target that performs a unity build for cotired targets added in this directory . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_ADD_UNITY_BUILD . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_ADD_CLEAN " <nl> - BRIEF_DOCS " Add a new target that cleans all cotire generated files for cotired targets added in this directory . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_ADD_CLEAN . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_PREFIX_HEADER_IGNORE_PATH " <nl> - BRIEF_DOCS " Ignore headers from these directories when generating the prefix header . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_PREFIX_HEADER_IGNORE_PATH . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_PREFIX_HEADER_INCLUDE_PATH " <nl> - BRIEF_DOCS " Honor headers from these directories when generating the prefix header . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_PREFIX_HEADER_INCLUDE_PATH . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH " <nl> - BRIEF_DOCS " Header paths matching one of these directories are put at the top of the prefix header . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_UNITY_SOURCE_PRE_UNDEFS " <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file before the inclusion of each source file . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_UNITY_SOURCE_PRE_UNDEFS . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_UNITY_SOURCE_POST_UNDEFS " <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file after the inclusion of each source file . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_UNITY_SOURCE_POST_UNDEFS . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES " <nl> - BRIEF_DOCS " Maximum number of source files to include in a single unity source file . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES . " <nl> - ) <nl> - <nl> - define_property ( <nl> - DIRECTORY PROPERTY " COTIRE_UNITY_LINK_LIBRARIES_INIT " <nl> - BRIEF_DOCS " Define strategy for setting up the unity target ' s link libraries . " <nl> - FULL_DOCS <nl> - " See target property COTIRE_UNITY_LINK_LIBRARIES_INIT . " <nl> - ) <nl> - <nl> - # define cotire target properties <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ENABLE_PRECOMPILED_HEADER " INHERITED <nl> - BRIEF_DOCS " Modify this target ' s build command to make use of the generated precompiled header . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , cotire will modify the build command to make use of the generated precompiled header . " <nl> - " Irrespective of the value of this property , cotire will setup custom commands to generate the unity source and prefix header for the target . " <nl> - " For makefile based generators cotire will also set up a custom target to manually invoke the generation of the precompiled header . " <nl> - " The target name will be set to this target ' s name with the suffix _pch appended . " <nl> - " Inherited from directory . " <nl> - " Defaults to TRUE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ADD_UNITY_BUILD " INHERITED <nl> - BRIEF_DOCS " Add a new target that performs a unity build for this target . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , cotire creates a new target of the same type that uses the generated unity source file instead of the target sources . " <nl> - " Most of the relevant target properties will be copied from this target to the new unity build target . " <nl> - " Target dependencies and linked libraries have to be manually set up for the new unity build target . " <nl> - " The unity target name will be set to this target ' s name with the suffix _unity appended . " <nl> - " Inherited from directory . " <nl> - " Defaults to TRUE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ADD_CLEAN " INHERITED <nl> - BRIEF_DOCS " Add a new target that cleans all cotire generated files for this target . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , cotire creates a new target that clean all files ( unity source , prefix header , precompiled header ) . " <nl> - " The clean target name will be set to this target ' s name with the suffix _clean_cotire appended . " <nl> - " Inherited from directory . " <nl> - " Defaults to FALSE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_PREFIX_HEADER_IGNORE_PATH " INHERITED <nl> - BRIEF_DOCS " Ignore headers from these directories when generating the prefix header . " <nl> - FULL_DOCS <nl> - " The property can be set to a list of directories . " <nl> - " If a header file is found in one of these directories or sub - directories , it will be excluded from the generated prefix header . " <nl> - " Inherited from directory . " <nl> - " If not set , this property is initialized to \ $ { CMAKE_SOURCE_DIR } ; \ $ { CMAKE_BINARY_DIR } . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_PREFIX_HEADER_INCLUDE_PATH " INHERITED <nl> - BRIEF_DOCS " Honor headers from these directories when generating the prefix header . " <nl> - FULL_DOCS <nl> - " The property can be set to a list of directories . " <nl> - " If a header file is found in one of these directories or sub - directories , it will be included in the generated prefix header . " <nl> - " If a header file is both selected by COTIRE_PREFIX_HEADER_IGNORE_PATH and COTIRE_PREFIX_HEADER_INCLUDE_PATH , " <nl> - " the option which yields the closer relative path match wins . " <nl> - " Inherited from directory . " <nl> - " If not set , this property is initialized to the empty list . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_PREFIX_HEADER_INCLUDE_PRIORITY_PATH " INHERITED <nl> - BRIEF_DOCS " Header paths matching one of these directories are put at the top of prefix header . " <nl> - FULL_DOCS <nl> - " The property can be set to a list of directories . " <nl> - " Header file paths matching one of these directories will be inserted at the beginning of the generated prefix header . " <nl> - " Header files are sorted according to the order of the directories in the property . " <nl> - " If not set , this property is initialized to the empty list . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_UNITY_SOURCE_PRE_UNDEFS " INHERITED <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file before the inclusion of each target source file . " <nl> - FULL_DOCS <nl> - " This may be set to a semicolon - separated list of preprocessor symbols . " <nl> - " cotire will add corresponding # undef directives to the generated unit source file before each target source file . " <nl> - " Inherited from directory . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_UNITY_SOURCE_POST_UNDEFS " INHERITED <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file after the inclusion of each target source file . " <nl> - FULL_DOCS <nl> - " This may be set to a semicolon - separated list of preprocessor symbols . " <nl> - " cotire will add corresponding # undef directives to the generated unit source file after each target source file . " <nl> - " Inherited from directory . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES " INHERITED <nl> - BRIEF_DOCS " Maximum number of source files to include in a single unity source file . " <nl> - FULL_DOCS <nl> - " This may be set to an integer > 0 . " <nl> - " If a target contains more than that number of source files , cotire will create multiple unity build files for it . " <nl> - " If not set , cotire will only create a single unity source file . " <nl> - " Inherited from directory . " <nl> - " Defaults to empty . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ < LANG > _UNITY_SOURCE_INIT " <nl> - BRIEF_DOCS " User provided unity source file to be used instead of the automatically generated one . " <nl> - FULL_DOCS <nl> - " If set , cotire will only add the given file ( s ) to the generated unity source file . " <nl> - " If not set , cotire will add all the target source files to the generated unity source file . " <nl> - " The property can be set to a user provided unity source file . " <nl> - " Defaults to empty . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ < LANG > _PREFIX_HEADER_INIT " <nl> - BRIEF_DOCS " User provided prefix header file to be used instead of the automatically generated one . " <nl> - FULL_DOCS <nl> - " If set , cotire will add the given header file ( s ) to the generated prefix header file . " <nl> - " If not set , cotire will generate a prefix header by tracking the header files included by the unity source file . " <nl> - " The property can be set to a user provided prefix header file ( e . g . , stdafx . h ) . " <nl> - " Defaults to empty . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_UNITY_LINK_LIBRARIES_INIT " INHERITED <nl> - BRIEF_DOCS " Define strategy for setting up unity target ' s link libraries . " <nl> - FULL_DOCS <nl> - " If this property is empty or set to NONE , the generated unity target ' s link libraries have to be set up manually . " <nl> - " If this property is set to COPY , the unity target ' s link libraries will be copied from this target . " <nl> - " If this property is set to COPY_UNITY , the unity target ' s link libraries will be copied from this target with considering existing unity targets . " <nl> - " Inherited from directory . " <nl> - " Defaults to empty . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ < LANG > _UNITY_SOURCE " <nl> - BRIEF_DOCS " Read - only property . The generated < LANG > unity source file ( s ) . " <nl> - FULL_DOCS <nl> - " cotire sets this property to the path of the generated < LANG > single computation unit source file for the target . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ < LANG > _PREFIX_HEADER " <nl> - BRIEF_DOCS " Read - only property . The generated < LANG > prefix header file . " <nl> - FULL_DOCS <nl> - " cotire sets this property to the full path of the generated < LANG > language prefix header for the target . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_ < LANG > _PRECOMPILED_HEADER " <nl> - BRIEF_DOCS " Read - only property . The generated < LANG > precompiled header file . " <nl> - FULL_DOCS <nl> - " cotire sets this property to the full path of the generated < LANG > language precompiled header binary for the target . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - TARGET PROPERTY " COTIRE_UNITY_TARGET_NAME " <nl> - BRIEF_DOCS " The name of the generated unity build target corresponding to this target . " <nl> - FULL_DOCS <nl> - " This property can be set to the desired name of the unity target that will be created by cotire . " <nl> - " If not set , the unity target name will be set to this target ' s name with the suffix _unity appended . " <nl> - " After this target has been processed by cotire , the property is set to the actual name of the generated unity target . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - # define cotire source properties <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_EXCLUDED " <nl> - BRIEF_DOCS " Do not modify source file ' s build command . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , the source file ' s build command will not be modified to make use of the precompiled header . " <nl> - " The source file will also be excluded from the generated unity source file . " <nl> - " Source files that have their COMPILE_FLAGS property set will be excluded by default . " <nl> - " Defaults to FALSE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_DEPENDENCY " <nl> - BRIEF_DOCS " Add this source file to dependencies of the automatically generated prefix header file . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , the source file is added to dependencies of the generated prefix header file . " <nl> - " If the file is modified , cotire will re - generate the prefix header source upon build . " <nl> - " Defaults to FALSE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_UNITY_SOURCE_PRE_UNDEFS " <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file before the inclusion of this source file . " <nl> - FULL_DOCS <nl> - " This may be set to a semicolon - separated list of preprocessor symbols . " <nl> - " cotire will add corresponding # undef directives to the generated unit source file before this file is included . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_UNITY_SOURCE_POST_UNDEFS " <nl> - BRIEF_DOCS " Preprocessor undefs to place in the generated unity source file after the inclusion of this source file . " <nl> - FULL_DOCS <nl> - " This may be set to a semicolon - separated list of preprocessor symbols . " <nl> - " cotire will add corresponding # undef directives to the generated unit source file after this file is included . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_START_NEW_UNITY_SOURCE " <nl> - BRIEF_DOCS " Start a new unity source file which includes this source file as the first one . " <nl> - FULL_DOCS <nl> - " If this property is set to TRUE , cotire will complete the current unity file and start a new one . " <nl> - " The new unity source file will include this source file as the first one . " <nl> - " This property essentially works as a separator for unity source files . " <nl> - " Defaults to FALSE . " <nl> - ) <nl> - <nl> - define_property ( <nl> - SOURCE PROPERTY " COTIRE_TARGET " <nl> - BRIEF_DOCS " Read - only property . Mark this source file as cotired for the given target . " <nl> - FULL_DOCS <nl> - " cotire sets this property to the name of target , that the source file ' s build command has been altered for . " <nl> - " Defaults to empty string . " <nl> - ) <nl> - <nl> - message ( STATUS " cotire $ { COTIRE_CMAKE_MODULE_VERSION } loaded . " ) <nl> - <nl> - endif ( ) <nl> | Merge pull request from EOSIO / remove_cotire | EOSIO/eos | 5e1fc53c964a6ebda005415b6622377f1d6cf797 | 2019-01-10T19:32:24Z |
mmm a / benchmark / README . md <nl> ppp b / benchmark / README . md <nl> gpu_device = - 1 <nl> <nl> Kirin 970 ( Cortex - A73 2 . 4GHz x 4 + Cortex - A53 1 . 8GHz x 4 ) <nl> ` ` ` <nl> - HWBKL : / data / local / tmp / ncnn $ . / benchncnn 8 4 2 <nl> + HWEML : / data / local / tmp / ncnnbench $ . / benchncnn 8 4 2 - 1 1 <nl> + [ 0 Mali - G72 ] queueC = 0 [ 2 ] queueG = 0 [ 2 ] queueT = 0 [ 2 ] <nl> + [ 0 Mali - G72 ] buglssc = 0 bugsbn1 = 0 buglbia = 0 bugihfa = 1 <nl> + [ 0 Mali - G72 ] fp16p = 1 fp16s = 0 fp16a = 1 int8s = 0 int8a = 0 <nl> loop_count = 8 <nl> num_threads = 4 <nl> powersave = 2 <nl> gpu_device = - 1 <nl> - squeezenet min = 22 . 55 max = 27 . 76 avg = 25 . 71 <nl> - squeezenet - int8 min = 18 . 46 max = 24 . 04 avg = 19 . 83 <nl> - mobilenet min = 32 . 52 max = 39 . 48 avg = 34 . 29 <nl> - mobilenet - int8 min = 21 . 65 max = 27 . 64 avg = 22 . 62 <nl> - mobilenet_v2 min = 29 . 93 max = 32 . 77 avg = 31 . 87 <nl> - shufflenet min = 15 . 40 max = 19 . 51 avg = 17 . 56 <nl> - mnasnet min = 25 . 10 max = 29 . 34 avg = 27 . 56 <nl> - proxylessnasnet min = 33 . 08 max = 35 . 05 avg = 33 . 63 <nl> - googlenet min = 81 . 98 max = 95 . 30 avg = 89 . 31 <nl> - googlenet - int8 min = 71 . 39 max = 76 . 15 avg = 73 . 74 <nl> - resnet18 min = 78 . 78 max = 87 . 98 avg = 86 . 15 <nl> - resnet18 - int8 min = 66 . 45 max = 79 . 07 avg = 70 . 57 <nl> - alexnet min = 139 . 34 max = 139 . 66 avg = 139 . 48 <nl> - vgg16 min = 427 . 03 max = 430 . 85 avg = 428 . 96 <nl> - resnet50 min = 343 . 06 max = 353 . 42 avg = 346 . 09 <nl> - resnet50 - int8 min = 146 . 54 max = 150 . 83 avg = 148 . 85 <nl> - squeezenet - ssd min = 57 . 13 max = 57 . 87 avg = 57 . 58 <nl> - squeezenet - ssd - int8 min = 56 . 35 max = 58 . 03 avg = 57 . 10 <nl> - mobilenet - ssd min = 69 . 72 max = 75 . 62 avg = 72 . 84 <nl> - mobilenet - ssd - int8 min = 43 . 79 max = 49 . 95 avg = 44 . 73 <nl> - mobilenet - yolo min = 179 . 57 max = 187 . 39 avg = 184 . 98 <nl> - mobilenet - yolov3 min = 164 . 52 max = 182 . 49 avg = 174 . 72 <nl> + cooling_down = 1 <nl> + squeezenet min = 24 . 38 max = 28 . 03 avg = 25 . 83 <nl> + squeezenet_int8 min = 21 . 79 max = 24 . 80 avg = 22 . 60 <nl> + mobilenet min = 34 . 09 max = 36 . 88 avg = 35 . 93 <nl> + mobilenet_int8 min = 52 . 62 max = 61 . 70 avg = 55 . 38 <nl> + mobilenet_v2 min = 23 . 71 max = 25 . 70 avg = 24 . 49 <nl> + mobilenet_v3 min = 20 . 66 max = 25 . 68 avg = 23 . 07 <nl> + shufflenet min = 17 . 89 max = 19 . 91 avg = 18 . 53 <nl> + shufflenet_v2 min = 13 . 73 max = 16 . 54 avg = 15 . 37 <nl> + mnasnet min = 24 . 36 max = 27 . 14 avg = 25 . 58 <nl> + proxylessnasnet min = 27 . 19 max = 29 . 70 avg = 28 . 59 <nl> + efficientnet_b0 min = 49 . 31 max = 50 . 26 avg = 49 . 70 <nl> + regnety_400m min = 42 . 54 max = 51 . 22 avg = 46 . 71 <nl> + blazeface min = 5 . 49 max = 7 . 67 avg = 6 . 27 <nl> + googlenet min = 72 . 67 max = 81 . 22 avg = 75 . 92 <nl> + googlenet_int8 min = 67 . 60 max = 74 . 50 avg = 71 . 21 <nl> + resnet18 min = 69 . 32 max = 81 . 59 avg = 73 . 45 <nl> + resnet18_int8 min = 60 . 92 max = 68 . 11 avg = 64 . 18 <nl> + alexnet min = 60 . 90 max = 79 . 28 avg = 66 . 72 <nl> + vgg16 min = 337 . 01 max = 378 . 89 avg = 352 . 37 <nl> + vgg16_int8 min = 465 . 88 max = 505 . 19 avg = 489 . 76 <nl> + resnet50 min = 207 . 75 max = 220 . 74 avg = 214 . 42 <nl> + resnet50_int8 min = 165 . 67 max = 183 . 80 avg = 171 . 27 <nl> + squeezenet_ssd min = 72 . 77 max = 84 . 45 avg = 79 . 09 <nl> + squeezenet_ssd_int8 min = 75 . 37 max = 86 . 58 avg = 78 . 70 <nl> + mobilenet_ssd min = 88 . 88 max = 96 . 43 avg = 92 . 02 <nl> + mobilenet_ssd_int8 min = 89 . 04 max = 101 . 35 avg = 92 . 23 <nl> + mobilenet_yolo min = 189 . 73 max = 206 . 55 avg = 193 . 64 <nl> + mobilenetv2_yolov3 min = 99 . 08 max = 111 . 64 avg = 104 . 23 <nl> + <nl> + HWEML : / data / local / tmp / ncnnbench $ . / benchncnn 8 1 2 - 1 1 <nl> + [ 0 Mali - G72 ] queueC = 0 [ 2 ] queueG = 0 [ 2 ] queueT = 0 [ 2 ] <nl> + [ 0 Mali - G72 ] buglssc = 0 bugsbn1 = 0 buglbia = 0 bugihfa = 1 <nl> + [ 0 Mali - G72 ] fp16p = 1 fp16s = 0 fp16a = 1 int8s = 0 int8a = 0 <nl> + loop_count = 8 <nl> + num_threads = 1 <nl> + powersave = 2 <nl> + gpu_device = - 1 <nl> + cooling_down = 1 <nl> + squeezenet min = 73 . 47 max = 81 . 39 avg = 76 . 06 <nl> + squeezenet_int8 min = 62 . 63 max = 73 . 66 avg = 66 . 52 <nl> + mobilenet min = 103 . 85 max = 112 . 83 avg = 108 . 98 <nl> + mobilenet_int8 min = 152 . 27 max = 161 . 26 avg = 157 . 17 <nl> + mobilenet_v2 min = 70 . 53 max = 87 . 26 avg = 76 . 67 <nl> + mobilenet_v3 min = 59 . 87 max = 68 . 59 avg = 63 . 08 <nl> + shufflenet min = 36 . 69 max = 41 . 45 avg = 39 . 24 <nl> + shufflenet_v2 min = 33 . 97 max = 37 . 84 avg = 35 . 03 <nl> + mnasnet min = 69 . 24 max = 79 . 73 avg = 74 . 20 <nl> + proxylessnasnet min = 78 . 63 max = 88 . 57 avg = 81 . 83 <nl> + efficientnet_b0 min = 147 . 45 max = 159 . 07 avg = 152 . 09 <nl> + regnety_400m min = 90 . 83 max = 98 . 51 avg = 93 . 82 <nl> + blazeface min = 10 . 05 max = 11 . 59 avg = 10 . 78 <nl> + googlenet min = 240 . 26 max = 277 . 71 avg = 259 . 61 <nl> + googlenet_int8 min = 214 . 64 max = 233 . 56 avg = 225 . 01 <nl> + resnet18 min = 245 . 62 max = 268 . 49 avg = 260 . 37 <nl> + resnet18_int8 min = 184 . 85 max = 194 . 91 avg = 190 . 60 <nl> + alexnet min = 202 . 52 max = 241 . 12 avg = 211 . 51 <nl> + vgg16 min = 1632 . 98 max = 1769 . 05 avg = 1710 . 89 <nl> + vgg16_int8 min = 1237 . 01 max = 1316 . 40 avg = 1273 . 44 <nl> + resnet50 min = 558 . 41 max = 601 . 59 avg = 581 . 26 <nl> + resnet50_int8 min = 425 . 26 max = 445 . 19 avg = 436 . 22 <nl> + squeezenet_ssd min = 228 . 50 max = 255 . 89 avg = 244 . 63 <nl> + squeezenet_ssd_int8 min = 166 . 97 max = 193 . 77 avg = 180 . 22 <nl> + mobilenet_ssd min = 226 . 54 max = 246 . 62 avg = 235 . 75 <nl> + mobilenet_ssd_int8 min = 231 . 35 max = 249 . 63 avg = 241 . 29 <nl> + mobilenet_yolo min = 469 . 71 max = 508 . 79 avg = 497 . 50 <nl> + mobilenetv2_yolov3 min = 242 . 88 max = 265 . 30 avg = 254 . 68 <nl> + <nl> + HWEML : / data / local / tmp / ncnnbench $ . / benchncnn 4 1 2 0 1 <nl> + [ 0 Mali - G72 ] queueC = 0 [ 2 ] queueG = 0 [ 2 ] queueT = 0 [ 2 ] <nl> + [ 0 Mali - G72 ] buglssc = 0 bugsbn1 = 0 buglbia = 0 bugihfa = 1 <nl> + [ 0 Mali - G72 ] fp16p = 1 fp16s = 0 fp16a = 1 int8s = 0 int8a = 0 <nl> + loop_count = 4 <nl> + num_threads = 1 <nl> + powersave = 2 <nl> + gpu_device = 0 <nl> + cooling_down = 1 <nl> + squeezenet min = 24 . 54 max = 25 . 75 avg = 25 . 16 <nl> + mobilenet min = 22 . 03 max = 29 . 61 avg = 27 . 31 <nl> + mobilenet_v2 min = 20 . 15 max = 28 . 05 avg = 25 . 35 <nl> + mobilenet_v3 min = 34 . 26 max = 37 . 49 avg = 35 . 51 <nl> + shufflenet min = 26 . 29 max = 27 . 68 avg = 26 . 86 <nl> + shufflenet_v2 min = 29 . 60 max = 32 . 08 avg = 31 . 27 <nl> + mnasnet min = 25 . 85 max = 29 . 38 avg = 27 . 98 <nl> + proxylessnasnet min = 23 . 64 max = 30 . 09 avg = 26 . 36 <nl> + efficientnet_b0 min = 52 . 55 max = 58 . 51 avg = 55 . 56 <nl> + regnety_400m min = 37 . 81 max = 43 . 22 avg = 40 . 30 <nl> + blazeface min = 9 . 14 max = 10 . 93 avg = 10 . 08 <nl> + googlenet min = 60 . 19 max = 62 . 84 avg = 61 . 51 <nl> + resnet18 min = 50 . 42 max = 52 . 93 avg = 51 . 70 <nl> + alexnet min = 195 . 34 max = 196 . 98 avg = 196 . 14 <nl> + vgg16 min = 725 . 88 max = 751 . 20 avg = 739 . 99 <nl> + resnet50 min = 124 . 47 max = 125 . 93 avg = 125 . 02 <nl> + squeezenet_ssd min = 91 . 79 max = 97 . 04 avg = 93 . 56 <nl> + mobilenet_ssd min = 51 . 81 max = 59 . 31 avg = 54 . 09 <nl> + mobilenet_yolo min = 124 . 67 max = 127 . 62 avg = 126 . 53 <nl> + mobilenetv2_yolov3 min = 53 . 11 max = 54 . 81 avg = 54 . 11 <nl> ` ` ` <nl> <nl> Qualcomm MSM8998 Snapdragon 835 ( Kyro 2 . 45GHz x 4 + Kyro 1 . 9GHz x 4 + Adreno 540 ) <nl> | update kirin970 benchmark | Tencent/ncnn | 27b7b0d9d53b3727fef86bcabc6b9f1a14f97c9f | 2020-06-12T12:11:19Z |
mmm a / js / common / modules / graph . js <nl> ppp b / js / common / modules / graph . js <nl> Edge . prototype . getPropertyKeys = function ( ) { <nl> <nl> Edge . prototype . setProperty = function ( name , value ) { <nl> var shallow = shallowCopy ( this . _properties ) ; <nl> + shallow [ ' $ id ' ] = this . _properties . $ id ; <nl> + shallow [ ' $ label ' ] = this . _properties . $ label ; <nl> shallow [ name ] = value ; <nl> <nl> / / TODO use " update " if this becomes available <nl> Vertex . prototype . properties = function ( ) { <nl> <nl> Vertex . prototype . setProperty = function ( name , value ) { <nl> var shallow = shallowCopy ( this . _properties ) ; <nl> + shallow [ ' $ id ' ] = this . _properties . $ id ; <nl> shallow [ name ] = value ; <nl> <nl> / / TODO use " update " if this becomes available <nl> | fix lost id | arangodb/arangodb | 7a80c3c5ee53eaede181d85ab604dfbb8d7916d7 | 2012-04-18T08:53:03Z |
mmm a / ports / grpc / portfile . cmake <nl> ppp b / ports / grpc / portfile . cmake <nl> SET ( VCPKG_POLICY_EMPTY_PACKAGE enabled ) <nl> file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / include ) <nl> <nl> vcpkg_copy_pdbs ( ) <nl> + # <nl> \ No newline at end of file <nl> | [ grpc ] Change portfile to trigger build | microsoft/vcpkg | 224300c2387cb959bbbec5623f5fa017142d3733 | 2018-12-19T06:30:26Z |
mmm a / mars / comm / http . cc <nl> ppp b / mars / comm / http . cc <nl> bool HeaderFields : : IsConnectionClose ( ) const { <nl> return false ; <nl> } <nl> <nl> - int64_t HeaderFields : : ContentLength ( ) const { <nl> + uint64_t HeaderFields : : ContentLength ( ) const { <nl> const char * strContentLength = HeaderField ( HeaderFields : : KStringContentLength ) ; <nl> - int64_t contentLength = 0 ; <nl> + uint64_t contentLength = 0 ; <nl> <nl> if ( strContentLength ) { <nl> - contentLength = strtoll ( strContentLength , NULL , 10 ) ; <nl> + contentLength = strtoull ( strContentLength , NULL , 10 ) ; <nl> } <nl> <nl> return contentLength ; <nl> bool HeaderFields : : Range ( long & _start , long & _end ) const { <nl> return true ; <nl> } <nl> <nl> - bool HeaderFields : : ContentRange ( int64_t * start , int64_t * end , int64_t * total ) const { <nl> + bool HeaderFields : : ContentRange ( uint64_t * start , uint64_t * end , uint64_t * total ) const { <nl> / / Content - Range : bytes 0 - 102400 / 102399 <nl> <nl> * start = 0 ; <nl> bool HeaderFields : : ContentRange ( int64_t * start , int64_t * end , int64_t * total ) co <nl> <nl> if ( std : : string : : npos ! = range_start ) { <nl> std : : string startstr = bytes . substr ( 0 , range_start ) ; <nl> - * start = strtoll ( startstr . c_str ( ) , NULL , 10 ) ; <nl> + * start = strtoull ( startstr . c_str ( ) , NULL , 10 ) ; <nl> <nl> size_t range_end = bytes . find ( " / " , range_start + 1 ) ; <nl> <nl> if ( range_end ! = std : : string : : npos ) { <nl> std : : string endstr = bytes . substr ( range_start + 1 , range_end - range_start - 1 ) ; <nl> - * end = strtoll ( endstr . c_str ( ) , NULL , 10 ) ; <nl> + * end = strtoull ( endstr . c_str ( ) , NULL , 10 ) ; <nl> <nl> <nl> std : : string totalstr = bytes . substr ( range_end + 1 ) ; <nl> - * total = strtoll ( totalstr . c_str ( ) , NULL , 10 ) ; <nl> + * total = strtoull ( totalstr . c_str ( ) , NULL , 10 ) ; <nl> <nl> return true ; <nl> } <nl> mmm a / mars / comm / http . h <nl> ppp b / mars / comm / http . h <nl> class HeaderFields { <nl> <nl> bool IsTransferEncodingChunked ( ) const ; <nl> bool IsConnectionClose ( ) const ; <nl> - int64_t ContentLength ( ) const ; <nl> + uint64_t ContentLength ( ) const ; <nl> <nl> bool Range ( long & _start , long & _end ) const ; <nl> - bool ContentRange ( int64_t * start , int64_t * end , int64_t * total ) const ; <nl> + bool ContentRange ( uint64_t * start , uint64_t * end , uint64_t * total ) const ; <nl> <nl> const std : : string ToString ( ) const ; <nl> <nl> | change length type to uint64_t | Tencent/mars | 1199b3b65cb8017ec2ab99ff370788dbb9a152cc | 2019-07-23T12:37:28Z |
mmm a / api / baseapi . cpp <nl> ppp b / api / baseapi . cpp <nl> static void AddBoxToTSV ( const PageIterator * it , <nl> it - > BoundingBox ( level , & left , & top , & right , & bottom ) ; <nl> hocr_str - > add_str_int ( " \ t " , left ) ; <nl> hocr_str - > add_str_int ( " \ t " , top ) ; <nl> - hocr_str - > add_str_int ( " \ t " , right - left + 1 ) ; <nl> - hocr_str - > add_str_int ( " \ t " , bottom - top + 1 ) ; <nl> + hocr_str - > add_str_int ( " \ t " , right - left ) ; <nl> + hocr_str - > add_str_int ( " \ t " , bottom - top ) ; <nl> } <nl> <nl> <nl> mmm a / api / renderer . cpp <nl> ppp b / api / renderer . cpp <nl> bool TessHOcrRenderer : : AddImageHandler ( TessBaseAPI * api ) { <nl> } <nl> <nl> / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * HOcr Text Renderer interface implementation <nl> + * TSV Text Renderer interface implementation <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> TessTsvRenderer : : TessTsvRenderer ( const char * outputbase ) <nl> : TessResultRenderer ( outputbase , " tsv " ) { <nl> | Merge pull request from StefRe / tsv - fix | tesseract-ocr/tesseract | 828f8528a8c03b89c9f88b62f23e5c7e740d5419 | 2016-06-27T07:09:12Z |
mmm a / src / app / ui / main_window . cpp <nl> ppp b / src / app / ui / main_window . cpp <nl> void MainWindow : : onActiveViewChange ( ) <nl> configureWorkspaceLayout ( ) ; <nl> } <nl> <nl> + bool MainWindow : : onIsModified ( Tabs * tabs , TabView * tabView ) <nl> + { <nl> + if ( DocumentView * docView = dynamic_cast < DocumentView * > ( tabView ) ) { <nl> + Document * document = docView - > getDocument ( ) ; <nl> + return document - > isModified ( ) ; <nl> + } <nl> + else { <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> void MainWindow : : onSelectTab ( Tabs * tabs , TabView * tabView ) <nl> { <nl> if ( ! tabView ) <nl> void MainWindow : : onMouseOverTab ( Tabs * tabs , TabView * tabView ) <nl> } <nl> } <nl> <nl> - bool MainWindow : : onIsModified ( Tabs * tabs , TabView * tabView ) <nl> + void MainWindow : : onFloatingTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) <nl> { <nl> - if ( DocumentView * docView = dynamic_cast < DocumentView * > ( tabView ) ) { <nl> - Document * document = docView - > getDocument ( ) ; <nl> - return document - > isModified ( ) ; <nl> - } <nl> - else { <nl> - return false ; <nl> - } <nl> + m_workspace - > setDropViewPreview ( pos ) ; <nl> + } <nl> + <nl> + DropTabResult MainWindow : : onDropTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) <nl> + { <nl> + m_workspace - > removeDropViewPreview ( pos ) ; <nl> + m_workspace - > dropViewAt ( pos , dynamic_cast < WorkspaceView * > ( tabView ) ) ; <nl> + return DropTabResult : : IGNORE ; <nl> } <nl> <nl> void MainWindow : : configureWorkspaceLayout ( ) <nl> mmm a / src / app / ui / main_window . h <nl> ppp b / src / app / ui / main_window . h <nl> namespace app { <nl> void popTimeline ( ) ; <nl> <nl> / / TabsDelegate implementation . <nl> + bool onIsModified ( Tabs * tabs , TabView * tabView ) override ; <nl> void onSelectTab ( Tabs * tabs , TabView * tabView ) override ; <nl> void onCloseTab ( Tabs * tabs , TabView * tabView ) override ; <nl> void onContextMenuTab ( Tabs * tabs , TabView * tabView ) override ; <nl> void onMouseOverTab ( Tabs * tabs , TabView * tabView ) override ; <nl> - bool onIsModified ( Tabs * tabs , TabView * tabView ) override ; <nl> + void onFloatingTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) override ; <nl> + DropTabResult onDropTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) override ; <nl> <nl> protected : <nl> bool onProcessMessage ( ui : : Message * msg ) override ; <nl> mmm a / src / app / ui / tabs . cpp <nl> ppp b / src / app / ui / tabs . cpp <nl> bool Tabs : : onProcessMessage ( Message * msg ) <nl> createFloatingTab ( m_selected ) ; <nl> <nl> m_floatingOverlay - > moveOverlay ( mousePos - m_dragOffset ) ; <nl> + <nl> + if ( m_delegate ) <nl> + m_delegate - > onFloatingTab ( this , m_selected - > view , mousePos ) ; <nl> } <nl> else { <nl> justDocked = m_floatingTab ; <nl> bool Tabs : : onProcessMessage ( Message * msg ) <nl> <nl> releaseMouse ( ) ; <nl> <nl> - if ( m_isDragging ) <nl> + if ( m_isDragging ) { <nl> + if ( m_delegate ) <nl> + m_delegate - > onDropTab ( this , m_selected - > view , <nl> + mouseMsg - > position ( ) ) ; <nl> + <nl> stopDrag ( ) ; <nl> + } <nl> <nl> if ( m_clickedCloseButton ) { <nl> m_clickedCloseButton = false ; <nl> mmm a / src / app / ui / tabs . h <nl> ppp b / src / app / ui / tabs . h <nl> namespace app { <nl> virtual TabIcon getTabIcon ( ) = 0 ; <nl> } ; <nl> <nl> + enum class DropTabResult { <nl> + IGNORE , <nl> + DOCKED_IN_OTHER_PLACE , <nl> + } ; <nl> + <nl> / / Interface used to control notifications from the Tabs widget . <nl> class TabsDelegate { <nl> public : <nl> + <nl> virtual ~ TabsDelegate ( ) { } <nl> <nl> + / / Returns true if the tab represent a modified document . <nl> + virtual bool onIsModified ( Tabs * tabs , TabView * tabView ) = 0 ; <nl> + <nl> / / Called when the user selected the tab with the left mouse button . <nl> virtual void onSelectTab ( Tabs * tabs , TabView * tabView ) = 0 ; <nl> <nl> namespace app { <nl> / / mouse just leave all tabs ) <nl> virtual void onMouseOverTab ( Tabs * tabs , TabView * tabView ) = 0 ; <nl> <nl> - virtual bool onIsModified ( Tabs * tabs , TabView * tabView ) = 0 ; <nl> + / / Called when the user is dragging a tab outside the Tabs bar . <nl> + virtual void onFloatingTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) = 0 ; <nl> + virtual DropTabResult onDropTab ( Tabs * tabs , TabView * tabView , const gfx : : Point & pos ) = 0 ; <nl> } ; <nl> <nl> / / Tabs control . Used to show opened documents . <nl> mmm a / src / app / ui / workspace . cpp <nl> ppp b / src / app / ui / workspace . cpp <nl> Workspace : : Workspace ( ) <nl> : Widget ( kGenericWidget ) <nl> , m_tabsBar ( nullptr ) <nl> , m_activeView ( nullptr ) <nl> + , m_dropPreview ( false ) <nl> { <nl> SkinTheme * theme = static_cast < SkinTheme * > ( getTheme ( ) ) ; <nl> setBgColor ( theme - > colors . workspace ( ) ) ; <nl> void Workspace : : onPaint ( PaintEvent & ev ) <nl> ev . getGraphics ( ) - > fillRect ( getBgColor ( ) , getClientBounds ( ) ) ; <nl> } <nl> <nl> + void Workspace : : onResize ( ui : : ResizeEvent & ev ) <nl> + { <nl> + setBoundsQuietly ( ev . getBounds ( ) ) ; <nl> + <nl> + gfx : : Rect cpos = getChildrenBounds ( ) ; <nl> + <nl> + / / Preview to drop tabs in workspace <nl> + if ( m_dropPreview & & cpos . contains ( m_dropPos ) ) { <nl> + int left = ABS ( cpos . x - m_dropPos . x ) ; <nl> + int top = ABS ( cpos . y - m_dropPos . y ) ; <nl> + int right = ABS ( cpos . x + cpos . w - m_dropPos . x ) ; <nl> + int bottom = ABS ( cpos . y + cpos . h - m_dropPos . y ) ; <nl> + int threshold = 32 * guiscale ( ) ; <nl> + if ( threshold > cpos . w / 2 ) threshold = cpos . w / 2 ; <nl> + if ( threshold > cpos . h / 2 ) threshold = cpos . h / 2 ; <nl> + <nl> + if ( left < threshold & & left < right & & left < top & & left < bottom ) { <nl> + cpos . x + = threshold ; <nl> + cpos . w - = threshold ; <nl> + } <nl> + else if ( top < threshold & & top < left & & top < right & & top < bottom ) { <nl> + cpos . y + = threshold ; <nl> + cpos . h - = threshold ; <nl> + } <nl> + else if ( right < threshold & & right < left & & right < top & & right < bottom ) { <nl> + cpos . w - = threshold ; <nl> + } <nl> + else if ( bottom < threshold & & bottom < left & & bottom < top & & bottom < right ) { <nl> + cpos . h - = threshold ; <nl> + } <nl> + } <nl> + <nl> + for ( Widget * child : getChildren ( ) ) <nl> + child - > setBounds ( cpos ) ; <nl> + } <nl> + <nl> + void Workspace : : setDropViewPreview ( const gfx : : Point & pos ) <nl> + { <nl> + m_dropPos = pos ; <nl> + m_dropPreview = true ; <nl> + <nl> + layout ( ) ; <nl> + } <nl> + <nl> + void Workspace : : removeDropViewPreview ( const gfx : : Point & pos ) <nl> + { <nl> + m_dropPreview = false ; <nl> + layout ( ) ; <nl> + } <nl> + <nl> + void Workspace : : dropViewAt ( const gfx : : Point & pos , WorkspaceView * view ) <nl> + { <nl> + } <nl> + <nl> } / / namespace app <nl> mmm a / src / app / ui / workspace . h <nl> ppp b / src / app / ui / workspace . h <nl> namespace app { <nl> WorkspaceView * activeView ( ) ; <nl> void setActiveView ( WorkspaceView * view ) ; <nl> <nl> + / / Drop views into workspace <nl> + void setDropViewPreview ( const gfx : : Point & pos ) ; <nl> + void removeDropViewPreview ( const gfx : : Point & pos ) ; <nl> + void dropViewAt ( const gfx : : Point & pos , WorkspaceView * view ) ; <nl> + <nl> Signal0 < void > ActiveViewChanged ; <nl> <nl> protected : <nl> void onPaint ( ui : : PaintEvent & ev ) override ; <nl> + void onResize ( ui : : ResizeEvent & ev ) override ; <nl> <nl> private : <nl> Tabs * m_tabsBar ; <nl> WorkspaceViews m_views ; <nl> WorkspaceView * m_activeView ; <nl> + bool m_dropPreview ; <nl> + gfx : : Point m_dropPos ; <nl> } ; <nl> <nl> } / / namespace app <nl> | Add feedback to Workspace to drop tabs on it | aseprite/aseprite | 2cfef9e2507074c665b2beff788b2e4628ffa6f1 | 2015-03-27T21:14:00Z |
mmm a / test / SILGen / partial_apply_super . swift <nl> ppp b / test / SILGen / partial_apply_super . swift <nl> <nl> / / RUN : % target - swift - frontend - use - native - super - method - emit - silgen % s | FileCheck % s <nl> <nl> - class B { <nl> - func foo ( ) { } <nl> - func bar ( ) ( ) { } <nl> - } <nl> - <nl> - class D : B { <nl> - override func foo ( ) { } <nl> - override func bar ( ) ( ) { } <nl> - <nl> - / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super1D7getFoos <nl> - / / CHECK : function_ref @ _TFC19partial_apply_super1D3foo <nl> - / / CHECK : super_method % 0 : $ D , # B . foo ! 1 : B - > ( ) - > ( ) <nl> - func getFoos ( ) - > ( ( ) - > ( ) , ( ) - > ( ) ) { <nl> - return ( self . foo , super . foo ) <nl> - } <nl> - <nl> - / / CHECK - LABEL : sil shared @ _TFC19partial_apply_super1D3fooFT_T_ <nl> - / / CHECK : class_method % 0 : $ D , # D . foo ! 1 <nl> - <nl> - / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super1D6getBar <nl> - / / CHECK : function_ref @ _TFC19partial_apply_super1D3bar <nl> - func getBar ( ) - > ( ( ) - > ( ) - > ( ) ) { <nl> - return self . bar <nl> - } <nl> - <nl> - / / CHECK - LABEL : sil shared @ _TFC19partial_apply_super1D3bar <nl> - / / CHECK : function_ref @ _TFC19partial_apply_super1D3bar <nl> - } <nl> - <nl> func doFoo ( f : ( ) - > ( ) ) { <nl> f ( ) <nl> } <nl> <nl> class Base { <nl> - func foo ( ) { } <nl> + func method ( ) { } <nl> + func bar ( ) ( ) { } <nl> + class func classMethod ( ) { } <nl> } <nl> <nl> class Derived : Base { <nl> - / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super7Derived3foofT_T_ <nl> + / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super7Derived6methodfT_T_ <nl> / / CHECK : [ [ DOFOO : % [ 0 - 9 ] + ] ] = function_ref @ _TF19partial_apply_super5doFooFFT_T_T_ <nl> / / CHECK : [ [ CASTED_SELF : % [ 0 - 9 ] + ] ] = upcast % 0 : $ Derived to $ Base <nl> - / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ Derived , # Base . foo ! 1 <nl> + / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ Derived , # Base . method ! 1 <nl> / / CHECK : [ [ PARTIAL_APPLY : % [ 0 - 9 ] + ] ] = partial_apply [ [ SUPER_METHOD ] ] ( [ [ CASTED_SELF ] ] ) <nl> / / CHECK : apply [ [ DOFOO ] ] ( [ [ PARTIAL_APPLY ] ] ) <nl> - override func foo ( ) { <nl> - doFoo ( super . foo ) <nl> + override func method ( ) { <nl> + doFoo ( super . method ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil hidden @ _TZFC19partial_apply_super7Derived11classMethodfT_T_ <nl> + / / CHECK : [ [ DOFOO : % [ 0 - 9 ] + ] ] = function_ref @ _TF19partial_apply_super5doFooFFT_T_T_ <nl> + / / CHECK : [ [ CASTED_SELF : % [ 0 - 9 ] + ] ] = upcast % 0 : $ @ thick Derived . Type to $ @ thick Base . Type <nl> + / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ @ thick Derived . Type , # Base . classMethod ! 1 <nl> + / / CHECK : [ [ PARTIAL_APPLY : % [ 0 - 9 ] + ] ] = partial_apply % 4 ( % 3 ) : $ @ convention ( thin ) ( @ thick Base . Type ) - > ( ) <nl> + override class func classMethod ( ) { <nl> + doFoo ( super . classMethod ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super7Derived10getMethodsfT_TFT_T_FT_T__ <nl> + / / CHECK : function_ref @ _TFC19partial_apply_super7Derived6method <nl> + / / CHECK : super_method % 0 : $ Derived , # Base . method ! 1 : Base - > ( ) - > ( ) <nl> + func getMethods ( ) - > ( ( ) - > ( ) , ( ) - > ( ) ) { <nl> + return ( self . method , super . method ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil shared @ _TFC19partial_apply_super7Derived6methodFT_T_ <nl> + / / CHECK : class_method % 0 : $ Derived , # Derived . method ! 1 <nl> + <nl> + / / CHECK - LABEL : sil hidden @ _TFC19partial_apply_super7Derived6getBar <nl> + / / CHECK : function_ref @ _TFC19partial_apply_super4Base3barFT_FT_T_ <nl> + func getBar ( ) - > ( ( ) - > ( ) - > ( ) ) { <nl> + return self . bar <nl> } <nl> } <nl> <nl> / / Test partial application of super with local types <nl> let c = { <nl> class Base { <nl> - func foo ( ) { <nl> - print ( " c . A . foo ( ) " ) <nl> - } <nl> + func method ( ) { } <nl> + class func classMethod ( ) { } <nl> } <nl> class Derived : Base { <nl> - / / CHECK - LABEL : sil shared @ _TFCF19partial_apply_superU_FT_T_L_7Derived3foofT_T_ <nl> + / / CHECK - LABEL : sil shared @ _TFCF19partial_apply_superU_FT_T_L_7Derived6methodfT_T_ <nl> / / CHECK : [ [ DOFOO : % [ 0 - 9 ] + ] ] = function_ref @ _TF19partial_apply_super5doFooFFT_T_T_ <nl> / / CHECK : [ [ CASTED_SELF : % [ 0 - 9 ] + ] ] = upcast % 0 : $ Derived to $ Base <nl> - / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ Derived , # < anonymous function > Base . foo ! 1 <nl> + / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ Derived , # < anonymous function > Base . method ! 1 <nl> / / CHECK : [ [ PARTIAL_APPLY : % [ 0 - 9 ] + ] ] = partial_apply [ [ SUPER_METHOD ] ] ( [ [ CASTED_SELF ] ] ) <nl> / / CHECK : apply [ [ DOFOO ] ] ( [ [ PARTIAL_APPLY ] ] ) <nl> - override func foo ( ) { <nl> - print ( " c . B . foo ( ) " ) <nl> - doFoo ( super . foo ) <nl> + override func method ( ) { <nl> + doFoo ( super . method ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil shared @ _TZFCF19partial_apply_superU_FT_T_L_7Derived11classMethodfT_T_ <nl> + / / CHECK : [ [ DOFOO : % [ 0 - 9 ] + ] ] = function_ref @ _TF19partial_apply_super5doFooFFT_T_T_ <nl> + / / CHECK : [ [ CASTED_SELF : % [ 0 - 9 ] + ] ] = upcast % 0 : $ @ thick Derived . Type to $ @ thick Base . Type <nl> + / / CHECK : [ [ SUPER_METHOD : % [ 0 - 9 ] + ] ] = super_method % 0 : $ @ thick Derived . Type , # < anonymous function > Base . classMethod ! 1 <nl> + / / CHECK : [ [ PARTIAL_APPLY : % [ 0 - 9 ] + ] ] = partial_apply % 4 ( % 3 ) : $ @ convention ( thin ) ( @ thick Base . Type ) - > ( ) <nl> + / / CHECK : apply [ [ DOFOO ] ] ( [ [ PARTIAL_APPLY ] ] ) <nl> + override class func classMethod ( ) { <nl> + doFoo ( super . classMethod ) <nl> } <nl> } <nl> } <nl> | test / SILGen / partial_apply_super . swift : add partially applied class method case | apple/swift | cc25f4a99227c8408b1a345075865a473a23acba | 2015-12-08T22:17:44Z |
mmm a / xbmc / FileItem . cpp <nl> ppp b / xbmc / FileItem . cpp <nl> const CStdString & CFileItem : : GetMimeType ( bool lookup / * = true * / ) const <nl> <nl> if ( m_bIsFolder ) <nl> m_ref = " x - directory / normal " ; <nl> + else if ( m_pvrChannelInfoTag ) <nl> + m_ref = m_pvrChannelInfoTag - > InputFormat ( ) ; <nl> else if ( m_strPath . Left ( 8 ) . Equals ( " shout : / / " ) <nl> | | m_strPath . Left ( 7 ) . Equals ( " http : / / " ) <nl> | | m_strPath . Left ( 8 ) . Equals ( " https : / / " ) ) <nl> | Merge pull request from BtbN / pvr_channel_mimetype | xbmc/xbmc | 14dc4c8ac40471a88d004f6ad85038fc22516e4d | 2012-10-02T08:18:01Z |
mmm a / emscripten . py <nl> ppp b / emscripten . py <nl> def asmjs_mangle ( name ) : <nl> elif line . startswith ( ' ( func ' ) : <nl> parts = line . split ( ) <nl> func_name = parts [ 1 ] [ 1 : ] <nl> - metadata [ ' implementedFunctions ' ] . append ( func_name ) <nl> + metadata [ ' implementedFunctions ' ] . append ( ' _ ' + func_name ) <nl> elif line . startswith ( ' ( export ' ) : <nl> parts = line . split ( ) <nl> export_name = parts [ 1 ] [ 1 : - 1 ] <nl> mmm a / tests / test_core . py <nl> ppp b / tests / test_core . py <nl> def test_set_align ( self ) : <nl> <nl> self . do_run_in_out_file_test ( ' tests ' , ' core ' , ' test_set_align ' ) <nl> <nl> - @ no_wasm_backend ( ' printf is incorrectly handling float values ' ) <nl> def test_emscripten_api ( self ) : <nl> check = ' ' ' <nl> def process ( filename ) : <nl> def get_freetype ( self ) : <nl> return self . get_library ( ' freetype ' , <nl> os . path . join ( ' objs ' , ' . libs ' , ' libfreetype . a ' ) ) <nl> <nl> - @ no_wasm_backend ( ) <nl> def test_freetype ( self ) : <nl> if WINDOWS : return self . skip ( ' test_freetype uses a . / configure script to build and therefore currently only runs on Linux and OS X . ' ) <nl> assert ' asm2g ' in test_modes <nl> def test ( ) : <nl> assert old . count ( ' tempBigInt ' ) > new . count ( ' tempBigInt ' ) <nl> <nl> @ sync <nl> - @ no_wasm_backend ( ) <nl> def test_poppler ( self ) : <nl> if WINDOWS : return self . skip ( ' test_poppler depends on freetype , which uses a . / configure script to build and therefore currently only runs on Linux and OS X . ' ) <nl> <nl> | Add leading underscore to implemented functions in wasm_backend ( ) | emscripten-core/emscripten | 8c9a3fdb88cb88a2c788bd5d7ba7befecf394ca3 | 2017-03-14T21:42:43Z |
mmm a / lib / SILPasses / Devirtualizer . cpp <nl> ppp b / lib / SILPasses / Devirtualizer . cpp <nl> static bool optimizeProtocolMethod ( ApplyInst * AI , ProtocolMethodInst * PMI ) { <nl> <nl> / / / Return the final class decl using metadata . <nl> static ClassDecl * getClassFromClassMetadata ( ClassMethodInst * CMI ) { <nl> + SILDeclRef Member = CMI - > getMember ( ) ; <nl> + FuncDecl * FD = Member . getFuncDecl ( ) ; <nl> + SILType ClassType = CMI - > getOperand ( ) . stripCasts ( ) . getType ( ) ; <nl> + ClassDecl * CD = ClassType . getClassOrBoundGenericClass ( ) ; <nl> + <nl> + / / Only handle valid non - dynatmic non - overridden members . <nl> + if ( ! CD | | ! FD | | FD - > isInvalid ( ) | | FD - > isDynamic ( ) | | FD - > isOverridden ( ) ) <nl> + return nullptr ; <nl> + <nl> + / / Only consider ' private ' members . <nl> + if ( ! FD - > hasAccessibility ( ) | | <nl> + FD - > getAccessibility ( ) ! = Accessibility : : Private ) <nl> + return nullptr ; <nl> + <nl> + / / Search the class hierarchy for the class that has the member . <nl> + while ( CD ) { <nl> + for ( Decl * M : CD - > getMembers ( ) ) <nl> + if ( M = = FD ) { <nl> + DEBUG ( llvm : : dbgs ( ) < < " Devirtualizing member " < < FD - > getName ( ) < < <nl> + " in class " < < CD - > getName ( ) < < " \ n " ) ; <nl> + return CD ; <nl> + } <nl> + <nl> + CD = CD - > getSuperclass ( ) - > getClassOrBoundGenericClass ( ) ; <nl> + } <nl> + <nl> return nullptr ; <nl> } <nl> <nl> new file mode 100644 <nl> index 000000000000 . . 732a9c9b5671 <nl> mmm / dev / null <nl> ppp b / test / SILPasses / devirt_access . sil <nl> <nl> + / / RUN : % sil - opt % s - inline - devirtualize - verify | FileCheck % s <nl> + sil_stage canonical <nl> + <nl> + import Builtin <nl> + import Swift <nl> + import SwiftShims <nl> + <nl> + class K { <nl> + func ping ( ) - > Int <nl> + private func pong ( ) - > Int <nl> + @ objc deinit <nl> + init ( ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : _TFC4test1K4pingfS0_FT_Si <nl> + / / CHECK : function_ref @ _TFC4test1K4pongfS0_FT_Si <nl> + / / CHECK - NEXT : apply <nl> + / / CHECK - NEXT : return <nl> + sil @ _TFC4test1K4pingfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned K ) - > Int { <nl> + bb0 ( % 0 : $ K ) : <nl> + % 1 = class_method % 0 : $ K , # K . pong ! 1 : K - > ( ) - > Int , $ @ cc ( method ) @ thin ( @ owned K ) - > Int / / user : % 2 <nl> + % 2 = apply % 1 ( % 0 ) : $ @ cc ( method ) @ thin ( @ owned K ) - > Int / / user : % 3 <nl> + return % 2 : $ Int / / id : % 3 <nl> + } <nl> + <nl> + sil @ _TFC5test21K4pongfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned K ) - > Int <nl> + sil @ _TFC4test1K4pongfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned K ) - > Int <nl> + sil @ _TFC4test1Kd : $ @ cc ( method ) @ thin ( @ owned K ) - > @ owned Builtin . NativeObject <nl> + sil @ _TFC4test1KD : $ @ cc ( method ) @ thin ( @ owned K ) - > ( ) <nl> + sil @ _TFC4test1KcfMS0_FT_S0_ : $ @ cc ( method ) @ thin ( @ owned K ) - > @ owned K <nl> + sil @ _TFC4test1KCfMS0_FT_S0_ : $ @ thin ( @ thick K . Type ) - > @ owned K <nl> + <nl> + sil_vtable K { <nl> + # K . ping ! 1 : _TFC4test1K4pingfS0_FT_Si / / test . K . ping ( test . K ) ( ) - > Swift . Int <nl> + # K . pong ! 1 : _TFC4test1K4pongfS0_FT_Si / / test . K . pong ( test . K ) ( ) - > Swift . Int <nl> + # K . init ! initializer . 1 : _TFC4test1KcfMS0_FT_S0_ / / test . K . init ( test . K . Type ) ( ) - > test . K <nl> + } <nl> + <nl> + <nl> + class X <nl> + { <nl> + private func ping ( ) - > Int <nl> + @ objc deinit <nl> + init ( ) <nl> + } <nl> + <nl> + class Y : X <nl> + { <nl> + @ objc deinit <nl> + override init ( ) <nl> + } <nl> + <nl> + class A <nl> + { <nl> + private func ping ( ) - > Int <nl> + @ objc deinit <nl> + init ( ) <nl> + } <nl> + <nl> + class B : A <nl> + { <nl> + override func ping ( ) - > Int <nl> + @ objc deinit <nl> + override init ( ) <nl> + } <nl> + <nl> + sil @ _TFC14devirt_access21X4pingfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned X ) - > Int <nl> + sil public_external [ transparent ] @ _TFSi33_convertFromBuiltinIntegerLiteralfMSiFBi2048_Si : $ @ thin ( Builtin . Int2048 , @ thin Int . Type ) - > Int <nl> + sil @ _TFC14devirt_access21Xd : $ @ cc ( method ) @ thin ( @ owned X ) - > @ owned Builtin . NativeObject <nl> + sil @ _TFC14devirt_access21XD : $ @ cc ( method ) @ thin ( @ owned X ) - > ( ) <nl> + sil @ _TFC14devirt_access21XcfMS0_FT_S0_ : $ @ cc ( method ) @ thin ( @ owned X ) - > @ owned X <nl> + sil @ _TFC14devirt_access21XCfMS0_FT_S0_ : $ @ thin ( @ thick X . Type ) - > @ owned X <nl> + sil @ _TFC14devirt_access21Yd : $ @ cc ( method ) @ thin ( @ owned Y ) - > @ owned Builtin . NativeObject <nl> + sil @ _TFC14devirt_access21YD : $ @ cc ( method ) @ thin ( @ owned Y ) - > ( ) <nl> + sil @ _TFC14devirt_access21YcfMS0_FT_S0_ : $ @ cc ( method ) @ thin ( @ owned Y ) - > @ owned Y <nl> + sil @ _TFC14devirt_access21YCfMS0_FT_S0_ : $ @ thin ( @ thick Y . Type ) - > @ owned Y <nl> + sil @ _TFC14devirt_access21A4pingfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned A ) - > Int <nl> + sil @ _TFC14devirt_access21Ad : $ @ cc ( method ) @ thin ( @ owned A ) - > @ owned Builtin . NativeObject <nl> + sil @ _TFC14devirt_access21AD : $ @ cc ( method ) @ thin ( @ owned A ) - > ( ) <nl> + sil @ _TFC14devirt_access21AcfMS0_FT_S0_ : $ @ cc ( method ) @ thin ( @ owned A ) - > @ owned A <nl> + sil @ _TFC14devirt_access21ACfMS0_FT_S0_ : $ @ thin ( @ thick A . Type ) - > @ owned A <nl> + sil @ _TFC14devirt_access21B4pingfS0_FT_Si : $ @ cc ( method ) @ thin ( @ owned B ) - > Int <nl> + sil @ _TFC14devirt_access21Bd : $ @ cc ( method ) @ thin ( @ owned B ) - > @ owned Builtin . NativeObject <nl> + sil @ _TFC14devirt_access21BD : $ @ cc ( method ) @ thin ( @ owned B ) - > ( ) <nl> + sil @ _TFC14devirt_access21BcfMS0_FT_S0_ : $ @ cc ( method ) @ thin ( @ owned B ) - > @ owned B <nl> + sil @ _TFC14devirt_access21BCfMS0_FT_S0_ : $ @ thin ( @ thick B . Type ) - > @ owned B <nl> + <nl> + / / CHECK - LABEL : sil @ Case1 <nl> + / / CHECK : function_ref @ _TFC14devirt_access21X4pingfS0_FT_Si <nl> + / / CHECK - NEXT : apply <nl> + / / CHECK : return <nl> + sil @ Case1 : $ @ thin ( @ owned X ) - > Int { <nl> + bb0 ( % 0 : $ X ) : <nl> + debug_value % 0 : $ X / / let a / / id : % 1 <nl> + strong_retain % 0 : $ X / / id : % 2 <nl> + % 3 = class_method % 0 : $ X , # X . ping ! 1 : X - > ( ) - > Int , $ @ cc ( method ) @ thin ( @ owned X ) - > Int / / user : % 4 <nl> + % 4 = apply % 3 ( % 0 ) : $ @ cc ( method ) @ thin ( @ owned X ) - > Int / / user : % 6 <nl> + strong_release % 0 : $ X / / id : % 5 <nl> + return % 4 : $ Int / / id : % 6 <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ Case2 <nl> + / / CHECK : function_ref @ _TFC14devirt_access21X4pingfS0_FT_Si <nl> + / / CHECK - NEXT : apply <nl> + / / CHECK : return <nl> + sil @ Case2 : $ @ thin ( @ owned Y ) - > Int { <nl> + bb0 ( % 0 : $ Y ) : <nl> + debug_value % 0 : $ Y / / let a / / id : % 1 <nl> + strong_retain % 0 : $ Y / / id : % 2 <nl> + % 3 = upcast % 0 : $ Y to $ X / / users : % 4 , % 5 <nl> + % 4 = class_method % 3 : $ X , # X . ping ! 1 : X - > ( ) - > Int , $ @ cc ( method ) @ thin ( @ owned X ) - > Int / / user : % 5 <nl> + % 5 = apply % 4 ( % 3 ) : $ @ cc ( method ) @ thin ( @ owned X ) - > Int / / user : % 7 <nl> + strong_release % 0 : $ Y / / id : % 6 <nl> + return % 5 : $ Int / / id : % 7 <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ Case3 <nl> + / / CHECK : class_method <nl> + / / CHECK : return <nl> + sil @ Case3 : $ @ thin ( @ owned A ) - > Int { <nl> + bb0 ( % 0 : $ A ) : <nl> + debug_value % 0 : $ A / / let a / / id : % 1 <nl> + strong_retain % 0 : $ A / / id : % 2 <nl> + % 3 = class_method % 0 : $ A , # A . ping ! 1 : A - > ( ) - > Int , $ @ cc ( method ) @ thin ( @ owned A ) - > Int / / user : % 4 <nl> + % 4 = apply % 3 ( % 0 ) : $ @ cc ( method ) @ thin ( @ owned A ) - > Int / / user : % 6 <nl> + strong_release % 0 : $ A / / id : % 5 <nl> + return % 4 : $ Int / / id : % 6 <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil @ Case4 <nl> + / / CHECK : class_method <nl> + / / CHECK : return <nl> + sil @ Case4 : $ @ thin ( @ owned B ) - > Int { <nl> + bb0 ( % 0 : $ B ) : <nl> + debug_value % 0 : $ B / / let a / / id : % 1 <nl> + strong_retain % 0 : $ B / / id : % 2 <nl> + % 3 = class_method % 0 : $ B , # B . ping ! 1 : B - > ( ) - > Int , $ @ cc ( method ) @ thin ( @ owned B ) - > Int / / user : % 4 <nl> + % 4 = apply % 3 ( % 0 ) : $ @ cc ( method ) @ thin ( @ owned B ) - > Int / / user : % 6 <nl> + strong_release % 0 : $ B / / id : % 5 <nl> + return % 4 : $ Int / / id : % 6 <nl> + } <nl> + <nl> + sil_vtable X { <nl> + # X . ping ! 1 : _TFC14devirt_access21X4pingfS0_FT_Si / / devirt_access2 . X . ping ( devirt_access2 . X ) ( ) - > Swift . Int <nl> + # X . init ! initializer . 1 : _TFC14devirt_access21XcfMS0_FT_S0_ / / devirt_access2 . X . init ( devirt_access2 . X . Type ) ( ) - > devirt_access2 . X <nl> + } <nl> + <nl> + sil_vtable Y { <nl> + # X . ping ! 1 : _TFC14devirt_access21X4pingfS0_FT_Si / / devirt_access2 . X . ping ( devirt_access2 . X ) ( ) - > Swift . Int <nl> + # X . init ! initializer . 1 : _TFC14devirt_access21YcfMS0_FT_S0_ / / devirt_access2 . Y . init ( devirt_access2 . Y . Type ) ( ) - > devirt_access2 . Y <nl> + } <nl> + <nl> + sil_vtable A { <nl> + # A . ping ! 1 : _TFC14devirt_access21A4pingfS0_FT_Si / / devirt_access2 . A . ping ( devirt_access2 . A ) ( ) - > Swift . Int <nl> + # A . init ! initializer . 1 : _TFC14devirt_access21AcfMS0_FT_S0_ / / devirt_access2 . A . init ( devirt_access2 . A . Type ) ( ) - > devirt_access2 . A <nl> + } <nl> + <nl> + sil_vtable B { <nl> + # A . ping ! 1 : _TFC14devirt_access21B4pingfS0_FT_Si / / devirt_access2 . B . ping ( devirt_access2 . B ) ( ) - > Swift . Int <nl> + # A . init ! initializer . 1 : _TFC14devirt_access21BcfMS0_FT_S0_ / / devirt_access2 . B . init ( devirt_access2 . B . Type ) ( ) - > devirt_access2 . B <nl> + } <nl> | Devirtualize class methods based on access control . | apple/swift | c66f460266f0765107778221f6aa9a8d8a4ccf5f | 2014-08-04T23:58:30Z |
mmm a / fdbbackup / FileDecoder . actor . cpp <nl> ppp b / fdbbackup / FileDecoder . actor . cpp <nl> <nl> # include " flow / serialize . h " <nl> # include " flow / actorcompiler . h " / / has to be last include <nl> <nl> + # define SevDecodeInfo SevVerbose <nl> + <nl> extern bool g_crashOnError ; <nl> <nl> namespace file_converter { <nl> std : : vector < MutationRef > decode_value ( const StringRef & value ) { <nl> reader . consume < uint64_t > ( ) ; / / Consume the includeVersion <nl> uint32_t val_length = reader . consume < uint32_t > ( ) ; <nl> if ( val_length ! = value . size ( ) - sizeof ( uint64_t ) - sizeof ( uint32_t ) ) { <nl> - TraceEvent ( " ValueError " ) <nl> + TraceEvent ( SevError , " ValueError " ) <nl> . detail ( " ValueLen " , val_length ) <nl> . detail ( " ValueSize " , value . size ( ) ) <nl> . detail ( " Value " , printable ( value ) ) ; <nl> - ASSERT ( false ) ; <nl> } <nl> <nl> std : : vector < MutationRef > mutations ; <nl> struct DecodeProgress { <nl> std : : pair < Version , int32_t > version_part = decode_key ( StringRef ( k , kLen ) ) ; <nl> uint32_t vLen = reader . consumeNetworkUInt32 ( ) ; <nl> const uint8_t * v = reader . consume ( vLen ) ; <nl> - TraceEvent ( " Block " ) <nl> + TraceEvent ( SevDecodeInfo , " Block " ) <nl> . detail ( " KeySize " , kLen ) <nl> . detail ( " valueSize " , vLen ) <nl> . detail ( " Offset " , reader . rptr - buf . begin ( ) ) <nl> | Address review comments on trace events | apple/foundationdb | 7831bec2b07e4826cbe8ff6eac6eaa29b577ec94 | 2020-03-24T17:54:12Z |
mmm a / tensorflow / python / keras / activations . py <nl> ppp b / tensorflow / python / keras / activations . py <nl> def linear ( x ) : <nl> <nl> Returns : <nl> The linear activation : ` x ` . <nl> + <nl> + Note : <nl> + Often used as last layer of regression networks . <nl> " " " <nl> return x <nl> <nl> | Merge pull request from lufol : lukas - fix - 1 | tensorflow/tensorflow | fa74e37990207a0b17a024522b6a263ea997f0f9 | 2019-06-19T21:59:29Z |
mmm a / tools / sil - opt / SILOpt . cpp <nl> ppp b / tools / sil - opt / SILOpt . cpp <nl> int main ( int argc , char * * argv ) { <nl> SILOpts . EnableSILOwnership = EnableSILOwnershipOpt ; <nl> SILOpts . AssumeUnqualifiedOwnershipWhenParsing = <nl> AssumeUnqualifiedOwnershipWhenParsing ; <nl> - SILOpts . EnableGuaranteedNormalArguments = <nl> + SILOpts . EnableGuaranteedNormalArguments | = <nl> EnableGuaranteedNormalArguments ; <nl> <nl> SILOpts . VerifyExclusivity = VerifyExclusivity ; <nl> | Merge remote - tracking branch ' origin / master ' into master - next | apple/swift | 729f6ba8db25aad24858b034452d578c6c210c6b | 2018-03-10T10:09:01Z |
new file mode 100644 <nl> index 000000000000 . . f1a1dacd7ad4 <nl> mmm / dev / null <nl> ppp b / jstests / core / elemmatch_or_pushdown . js <nl> <nl> + / * * <nl> + * Tests that an $ elemMatch - $ or query is evaluated correctly . Designed to reproduce SERVER - 33005 . <nl> + * / <nl> + ( function ( ) { <nl> + " use strict " ; <nl> + <nl> + const coll = db . elemmatch_or_pushdown ; <nl> + coll . drop ( ) ; <nl> + <nl> + assert . writeOK ( coll . insert ( { _id : 0 , a : 1 , b : [ { c : 4 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 1 , a : 2 , b : [ { c : 4 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 2 , a : 2 , b : [ { c : 5 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 3 , a : 1 , b : [ { c : 5 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 4 , a : 1 , b : [ { c : 6 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 5 , a : 1 , b : [ { c : 7 } ] } ) ) ; <nl> + assert . commandWorked ( coll . createIndex ( { a : 1 , " b . c " : 1 } ) ) ; <nl> + <nl> + assert . eq ( coll . find ( { a : 1 , b : { $ elemMatch : { $ or : [ { c : 4 } , { c : 5 } ] } } } ) . sort ( { _id : 1 } ) . toArray ( ) , <nl> + [ { _id : 0 , a : 1 , b : [ { c : 4 } ] } , { _id : 3 , a : 1 , b : [ { c : 5 } ] } ] ) ; <nl> + assert . eq ( coll . find ( { a : 1 , $ or : [ { a : 2 } , { b : { $ elemMatch : { $ or : [ { c : 4 } , { c : 5 } ] } } } ] } ) <nl> + . sort ( { _id : 1 } ) <nl> + . toArray ( ) , <nl> + [ { _id : 0 , a : 1 , b : [ { c : 4 } ] } , { _id : 3 , a : 1 , b : [ { c : 5 } ] } ] ) ; <nl> + <nl> + coll . drop ( ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 0 , a : 5 , b : [ { c : [ { f : 8 } ] , d : 6 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 1 , a : 4 , b : [ { c : [ { f : 8 } ] , d : 6 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 2 , a : 5 , b : [ { c : [ { f : 8 } ] , d : 7 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 3 , a : 4 , b : [ { c : [ { f : 9 } ] , d : 6 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 4 , a : 5 , b : [ { c : [ { f : 8 } ] , e : 7 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 5 , a : 4 , b : [ { c : [ { f : 8 } ] , e : 7 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 6 , a : 5 , b : [ { c : [ { f : 8 } ] , e : 8 } ] } ) ) ; <nl> + assert . writeOK ( coll . insert ( { _id : 7 , a : 5 , b : [ { c : [ { f : 9 } ] , e : 7 } ] } ) ) ; <nl> + assert . commandWorked ( coll . createIndex ( { " b . d " : 1 , " b . c . f " : 1 } ) ) ; <nl> + assert . commandWorked ( coll . createIndex ( { " b . e " : 1 , " b . c . f " : 1 } ) ) ; <nl> + <nl> + assert . eq ( coll . find ( { a : 5 , b : { $ elemMatch : { c : { $ elemMatch : { f : 8 } } , $ or : [ { d : 6 } , { e : 7 } ] } } } ) <nl> + . sort ( { _id : 1 } ) <nl> + . toArray ( ) , <nl> + [ { _id : 0 , a : 5 , b : [ { c : [ { f : 8 } ] , d : 6 } ] } , { _id : 4 , a : 5 , b : [ { c : [ { f : 8 } ] , e : 7 } ] } ] ) ; <nl> + } ( ) ) ; <nl> mmm a / src / mongo / db / query / plan_enumerator . cpp <nl> ppp b / src / mongo / db / query / plan_enumerator . cpp <nl> bool PlanEnumerator : : prepMemo ( MatchExpression * node , PrepMemoContext context ) { <nl> <nl> / / Extend the path through the indexed ORs of each outside predicate . <nl> auto childContextCopy = childContext ; <nl> - for ( auto & pred : childContextCopy . outsidePreds ) { <nl> - pred . second . push_back ( i ) ; <nl> + for ( auto it = childContextCopy . outsidePreds . begin ( ) ; <nl> + it ! = childContextCopy . outsidePreds . end ( ) ; ) { <nl> + / / If the route has already traversed through an $ elemMatch object , then we cannot <nl> + / / push down through this OR . Here we remove such routes from our context object . <nl> + / / <nl> + / / For example , suppose we have index { a : 1 , " b . c " : 1 } and the following query : <nl> + / / <nl> + / / { a : 1 , b : { $ elemMatch : { $ or : [ { c : 2 } , { c : 3 } ] } } } <nl> + / / <nl> + / / It is not correct to push the ' a ' predicate down such that it is a sibling of <nl> + / / either of the predicates on ' c ' , since this would change the predicate ' s meaning <nl> + / / from a = = 1 to " b . a " = = 1 . <nl> + if ( it - > second . traversedThroughElemMatchObj ) { <nl> + it = childContextCopy . outsidePreds . erase ( it ) ; <nl> + } else { <nl> + it - > second . route . push_back ( i ) ; <nl> + + + it ; <nl> + } <nl> } <nl> <nl> if ( ! prepMemo ( node - > getChild ( i ) , childContextCopy ) ) { <nl> bool PlanEnumerator : : prepMemo ( MatchExpression * node , PrepMemoContext context ) { <nl> <nl> if ( MatchExpression : : ELEM_MATCH_OBJECT = = node - > matchType ( ) ) { <nl> childContext . elemMatchExpr = node ; <nl> + markTraversedThroughElemMatchObj ( & childContext ) ; <nl> } <nl> <nl> / / For an OR to be indexed , all its children must be indexed . <nl> bool PlanEnumerator : : prepMemo ( MatchExpression * node , PrepMemoContext context ) { <nl> / / ( e . g . an OR which contains a TEXT child ) . <nl> vector < MemoID > mandatorySubnodes ; <nl> <nl> - / / A list of predicates contained in the subtree rooted at ' node ' <nl> - / / obtained by traversing deeply through $ and and $ elemMatch children . <nl> - vector < MatchExpression * > indexedPreds ; <nl> + / / A list of predicates contained in the subtree rooted at ' node ' obtained by traversing <nl> + / / deeply through $ and and $ elemMatch children . <nl> + std : : vector < MatchExpression * > indexedPreds ; <nl> <nl> - / / Partition the childen into the children that aren ' t predicates which may or may <nl> - / / not be indexed ( ' subnodes ' ) , children that aren ' t predicates which must use the <nl> - / / index ( ' mandatorySubnodes ' ) . and children that are predicates ( ' indexedPreds ' ) . <nl> + / / Partition the childen into the children that aren ' t predicates which may or may not be <nl> + / / indexed ( ' subnodes ' ) , children that aren ' t predicates which must use the index <nl> + / / ( ' mandatorySubnodes ' ) . and children that are predicates ( ' indexedPreds ' ) . <nl> / / <nl> - / / We have to get the subnodes with mandatory assignments rather than adding the <nl> - / / mandatory preds to ' indexedPreds ' . Adding the mandatory preds directly to <nl> - / / ' indexedPreds ' would lead to problems such as pulling a predicate beneath an OR <nl> - / / into a set joined by an AND . <nl> + / / We have to get the subnodes with mandatory assignments rather than adding the mandatory <nl> + / / preds to ' indexedPreds ' . Adding the mandatory preds directly to ' indexedPreds ' would lead <nl> + / / to problems such as pulling a predicate beneath an OR into a set joined by an AND . <nl> getIndexedPreds ( node , childContext , & indexedPreds ) ; <nl> / / Pass in the indexed predicates as outside predicates when prepping the subnodes . <nl> auto childContextCopy = childContext ; <nl> for ( auto pred : indexedPreds ) { <nl> - childContextCopy . outsidePreds [ pred ] = std : : deque < size_t > ( ) ; <nl> + childContextCopy . outsidePreds [ pred ] = OutsidePredRoute { } ; <nl> } <nl> if ( ! prepSubNodes ( node , childContextCopy , & subnodes , & mandatorySubnodes ) ) { <nl> return false ; <nl> bool PlanEnumerator : : enumerateMandatoryIndex ( const IndexToPredMap & idxToFirst , <nl> / / Assign any predicates on the non - leading index fields to ' indexAssign ' that <nl> / / don ' t violate the intersecting or compounding rules for multikey indexes . <nl> / / We do not currently try to assign outside predicates to mandatory indexes . <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > outsidePreds { } ; <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > outsidePreds { } ; <nl> assignMultikeySafePredicates ( compIt - > second , outsidePreds , & indexAssign ) ; <nl> } <nl> } else { <nl> / / Assign any predicates on the leading index field to ' indexAssign ' that don ' t <nl> / / violate the intersecting rules for multikey indexes . <nl> / / We do not currently try to assign outside predicates to mandatory indexes . <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > outsidePreds { } ; <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > outsidePreds { } ; <nl> assignMultikeySafePredicates ( predsOverLeadingField , outsidePreds , & indexAssign ) ; <nl> <nl> / / Assign the mandatory predicate to ' thisIndex ' . Due to how keys are generated for <nl> bool PlanEnumerator : : enumerateMandatoryIndex ( const IndexToPredMap & idxToFirst , <nl> } <nl> <nl> void PlanEnumerator : : assignPredicate ( <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> MatchExpression * pred , <nl> size_t position , <nl> OneIndexAssignment * indexAssignment ) { <nl> if ( outsidePreds . find ( pred ) ! = outsidePreds . end ( ) ) { <nl> OrPushdownTag : : Destination dest ; <nl> - dest . route = outsidePreds . at ( pred ) ; <nl> + dest . route = outsidePreds . at ( pred ) . route ; <nl> <nl> / / This method should only be called if we can combine bounds . <nl> const bool canCombineBounds = true ; <nl> void PlanEnumerator : : assignPredicate ( <nl> } <nl> } <nl> <nl> + void PlanEnumerator : : markTraversedThroughElemMatchObj ( PrepMemoContext * context ) { <nl> + invariant ( context ) ; <nl> + for ( auto & & pred : context - > outsidePreds ) { <nl> + auto relevantTag = static_cast < RelevantTag * > ( pred . first - > getTag ( ) ) ; <nl> + / / Only indexed predicates should ever be considered as outside predicates eligible for <nl> + / / pushdown . <nl> + invariant ( relevantTag ) ; <nl> + <nl> + / / Check whether the current $ elemMatch through which we are traversing is the same as the <nl> + / / outside predicate ' s $ elemMatch context . If so , then that outside predicate hasn ' t <nl> + / / actually traversed through an $ elemMatch ( it has simply been promoted by <nl> + / / getIndexedPreds ( ) into the set of AND - related indexed predicates ) . If not , then the OR <nl> + / / pushdown route descends through an $ elemMatch object node , and must be marked as such . <nl> + if ( relevantTag - > elemMatchExpr ! = context - > elemMatchExpr ) { <nl> + pred . second . traversedThroughElemMatchObj = true ; <nl> + } <nl> + } <nl> + } <nl> + <nl> void PlanEnumerator : : enumerateOneIndex ( <nl> IndexToPredMap idxToFirst , <nl> IndexToPredMap idxToNotFirst , <nl> const vector < MemoID > & subnodes , <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> AndAssignment * andAssignment ) { <nl> / / Each choice in the ' andAssignment ' will consist of a single subnode to index ( an OR or array <nl> / / operator ) or a OneIndexAssignment . When creating a OneIndexAssignment , we ensure that at <nl> void PlanEnumerator : : enumerateAndIntersect ( const IndexToPredMap & idxToFirst , <nl> <nl> void PlanEnumerator : : getIndexedPreds ( MatchExpression * node , <nl> PrepMemoContext context , <nl> - vector < MatchExpression * > * indexedPreds ) { <nl> + std : : vector < MatchExpression * > * indexedPreds ) { <nl> if ( Indexability : : nodeCanUseIndexOnOwnField ( node ) ) { <nl> RelevantTag * rt = static_cast < RelevantTag * > ( node - > getTag ( ) ) ; <nl> if ( context . elemMatchExpr ) { <nl> bool PlanEnumerator : : prepSubNodes ( MatchExpression * node , <nl> PrepMemoContext childContext ; <nl> childContext . elemMatchExpr = child ; <nl> childContext . outsidePreds = context . outsidePreds ; <nl> + markTraversedThroughElemMatchObj ( & childContext ) ; <nl> prepSubNodes ( child , childContext , subnodesOut , mandatorySubnodes ) ; <nl> } else if ( MatchExpression : : AND = = child - > matchType ( ) ) { <nl> prepSubNodes ( child , context , subnodesOut , mandatorySubnodes ) ; <nl> void PlanEnumerator : : getMultikeyCompoundablePreds ( const vector < MatchExpression * > <nl> <nl> void PlanEnumerator : : assignMultikeySafePredicates ( <nl> const std : : vector < MatchExpression * > & couldAssign , <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> OneIndexAssignment * indexAssignment ) { <nl> invariant ( indexAssignment ) ; <nl> invariant ( indexAssignment - > preds . size ( ) = = indexAssignment - > positions . size ( ) ) ; <nl> mmm a / src / mongo / db / query / plan_enumerator . h <nl> ppp b / src / mongo / db / query / plan_enumerator . h <nl> class PlanEnumerator { <nl> / / The position of a field in a possibly compound index . <nl> typedef size_t IndexPosition ; <nl> <nl> + / * * <nl> + * Represents the route that an outside predicate has taken during the PlanEnumerator ' s <nl> + * recursive descent of the match expression tree . <nl> + * / <nl> + struct OutsidePredRoute { <nl> + / * * <nl> + * Whether or not the route has traversed through an $ elemMatch object node . This is needed <nl> + * because it is not correct to push down a predicate through an $ elemMatch object . <nl> + * / <nl> + bool traversedThroughElemMatchObj = false ; <nl> + <nl> + / * * <nl> + * The route of the outside predicate . This starts at the indexed OR sibling of the <nl> + * predicate . Each value in ' route ' is the index of a child in an indexed OR . <nl> + * <nl> + * For example , if the MatchExpression tree is : <nl> + * AND <nl> + * / \ <nl> + * { a : 5 } OR <nl> + * / \ <nl> + * AND { e : 9 } <nl> + * / \ <nl> + * { b : 6 } OR <nl> + * / \ <nl> + * { c : 7 } { d : 8 } <nl> + * <nl> + * and the predicate is { a : 5 } , then the route will be { 0 , 1 } when the recursive descent <nl> + * reaches { d : 8 } . <nl> + * / <nl> + std : : deque < size_t > route ; <nl> + } ; <nl> + <nl> struct PrepMemoContext { <nl> PrepMemoContext ( ) : elemMatchExpr ( NULL ) { } <nl> MatchExpression * elemMatchExpr ; <nl> <nl> / / Maps from indexable predicates that can be pushed into the current node to the route <nl> / / through ORs that they have taken to get to this node . <nl> - unordered_map < MatchExpression * , std : : deque < size_t > > outsidePreds ; <nl> + unordered_map < MatchExpression * , OutsidePredRoute > outsidePreds ; <nl> } ; <nl> <nl> / * * <nl> class PlanEnumerator { <nl> * / <nl> void assignMultikeySafePredicates ( <nl> const std : : vector < MatchExpression * > & couldAssign , <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> OneIndexAssignment * indexAssignment ) ; <nl> <nl> / * * <nl> class PlanEnumerator { <nl> void enumerateOneIndex ( IndexToPredMap idxToFirst , <nl> IndexToPredMap idxToNotFirst , <nl> const std : : vector < MemoID > & subnodes , <nl> - const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> AndAssignment * andAssignment ) ; <nl> <nl> / * * <nl> class PlanEnumerator { <nl> * ' outsidePreds ' . ' pred ' must be able to use the index and be multikey - safe to add to <nl> * ' indexAssignment ' . <nl> * / <nl> - void assignPredicate ( const unordered_map < MatchExpression * , std : : deque < size_t > > & outsidePreds , <nl> + void assignPredicate ( const unordered_map < MatchExpression * , OutsidePredRoute > & outsidePreds , <nl> MatchExpression * pred , <nl> size_t position , <nl> OneIndexAssignment * indexAssignment ) ; <nl> <nl> + / * * <nl> + * Sets a flag on all outside pred routes that descend through an $ elemMatch object node . <nl> + * / <nl> + void markTraversedThroughElemMatchObj ( PrepMemoContext * context ) ; <nl> + <nl> / * * <nl> * Return the memo entry for ' node ' . Does some sanity checking to ensure that a memo entry <nl> * actually exists . <nl> mmm a / src / mongo / db / query / query_planner_array_test . cpp <nl> ppp b / src / mongo / db / query / query_planner_array_test . cpp <nl> TEST_F ( QueryPlannerTest , ContainedOrPathLevelMultikeyCannotCompoundTrailingOutsi <nl> assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> } <nl> <nl> + TEST_F ( QueryPlannerTest , ContainedOrCannotPushdownThroughElemMatchObj ) { <nl> + addIndex ( BSON ( " a " < < 1 < < " b . c " < < 1 ) ) ; <nl> + <nl> + runQuery ( fromjson ( " { a : 1 , b : { $ elemMatch : { $ or : [ { c : 2 } , { c : 3 } ] } } } " ) ) ; <nl> + <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { b : { $ elemMatch : { $ or : [ { c : 2 } , { c : 3 } ] } } } , node : " <nl> + " { ixscan : { filter : null , pattern : { a : 1 , ' b . c ' : 1 } , " <nl> + " bounds : { a : [ [ 1 , 1 , true , true ] ] , ' b . c ' : [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + TEST_F ( QueryPlannerTest , ContainedOrCannotPushdownThroughElemMatchObjWithMultikeyPaths ) { <nl> + MultikeyPaths multikeyPaths { { } , { 0U } } ; <nl> + addIndex ( BSON ( " a " < < 1 < < " b . c " < < 1 ) , multikeyPaths ) ; <nl> + <nl> + runQuery ( fromjson ( " { a : 1 , b : { $ elemMatch : { $ or : [ { c : 2 } , { c : 3 } ] } } } " ) ) ; <nl> + <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { b : { $ elemMatch : { $ or : [ { c : 2 } , { c : 3 } ] } } } , node : " <nl> + " { ixscan : { filter : null , pattern : { a : 1 , ' b . c ' : 1 } , " <nl> + " bounds : { a : [ [ 1 , 1 , true , true ] ] , ' b . c ' : [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + TEST_F ( QueryPlannerTest , ContainedOrCannotPushdownThroughOrElemMatchObjOrPattern ) { <nl> + addIndex ( BSON ( " a " < < 1 < < " b . c " < < 1 ) ) ; <nl> + <nl> + runQuery ( fromjson ( " { a : 1 , $ or : [ { a : 2 } , { b : { $ elemMatch : { $ or : [ { c : 3 } , { c : 4 } ] } } } ] } " ) ) ; <nl> + <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { $ or : [ { a : 2 } , { b : { $ elemMatch : { $ or : [ { c : 3 } , { c : 4 } ] } } } ] } , node : " <nl> + " { ixscan : { filter : null , pattern : { a : 1 , ' b . c ' : 1 } , " <nl> + " bounds : { a : [ [ 1 , 1 , true , true ] ] , ' b . c ' : [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + TEST_F ( QueryPlannerTest , ContainedOrCannotPushdownThroughOrElemMatchObjOrPatternWithMultikeyPaths ) { <nl> + MultikeyPaths multikeyPaths { { } , { 0U } } ; <nl> + addIndex ( BSON ( " a " < < 1 < < " b . c " < < 1 ) , multikeyPaths ) ; <nl> + <nl> + runQuery ( fromjson ( " { a : 1 , $ or : [ { a : 2 } , { b : { $ elemMatch : { $ or : [ { c : 3 } , { c : 4 } ] } } } ] } " ) ) ; <nl> + <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { $ or : [ { a : 2 } , { b : { $ elemMatch : { $ or : [ { c : 3 } , { c : 4 } ] } } } ] } , node : " <nl> + " { ixscan : { filter : null , pattern : { a : 1 , ' b . c ' : 1 } , " <nl> + " bounds : { a : [ [ 1 , 1 , true , true ] ] , ' b . c ' : [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + / / TODO SERVER - 30145 : Fixing this ticket should allow us to generate tight bounds on " b . c . f " below . <nl> + TEST_F ( QueryPlannerTest , ContainedOrInAndInNestedElemMatch ) { <nl> + addIndex ( BSON ( " b . d " < < 1 < < " b . c . f " < < 1 ) ) ; <nl> + addIndex ( BSON ( " b . e " < < 1 < < " b . c . f " < < 1 ) ) ; <nl> + <nl> + runQuery ( <nl> + fromjson ( " { $ and : [ { a : 5 } , { b : { $ elemMatch : { $ and : [ " <nl> + " { c : { $ elemMatch : { f : 5 } } } , { $ or : [ { d : 6 } , { e : 7 } ] } ] } } } ] } " ) ) ; <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { $ and : [ { a : 5 } , { b : { $ elemMatch : { $ and : [ { c : { $ elemMatch : { f : 5 } } } , " <nl> + " { $ or : [ { d : 6 } , { e : 7 } ] } ] } } } ] } , " <nl> + " node : { or : { nodes : [ " <nl> + " { ixscan : { pattern : { ' b . d ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . d ' : [ [ 6 , 6 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } , " <nl> + " { ixscan : { pattern : { ' b . e ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . e ' : [ [ 7 , 7 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } " <nl> + " ] } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + / / TODO SERVER - 30145 : Fixing this ticket should allow us to generate tight bounds on " b . c . f " below . <nl> + TEST_F ( QueryPlannerTest , ContainedOrInAndInNestedElemMatchWithMultikeyPaths ) { <nl> + MultikeyPaths multikeyPaths { { 0U } , { 0U , 1U } } ; <nl> + addIndex ( BSON ( " b . d " < < 1 < < " b . c . f " < < 1 ) , multikeyPaths ) ; <nl> + addIndex ( BSON ( " b . e " < < 1 < < " b . c . f " < < 1 ) , multikeyPaths ) ; <nl> + <nl> + runQuery ( <nl> + fromjson ( " { $ and : [ { a : 5 } , { b : { $ elemMatch : { $ and : [ " <nl> + " { c : { $ elemMatch : { f : 5 } } } , { $ or : [ { d : 6 } , { e : 7 } ] } ] } } } ] } " ) ) ; <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { $ and : [ { a : 5 } , { b : { $ elemMatch : { $ and : [ { c : { $ elemMatch : { f : 5 } } } , " <nl> + " { $ or : [ { d : 6 } , { e : 7 } ] } ] } } } ] } , " <nl> + " node : { or : { nodes : [ " <nl> + " { ixscan : { pattern : { ' b . d ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . d ' : [ [ 6 , 6 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } , " <nl> + " { ixscan : { pattern : { ' b . e ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . e ' : [ [ 7 , 7 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } " <nl> + " ] } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + / / TODO SERVER - 30145 : Fixing this ticket should allow us to generate tight bounds on " b . c . f " below . <nl> + TEST_F ( QueryPlannerTest , ContainedOrInNestedElemMatchWithMultikeyPaths ) { <nl> + MultikeyPaths multikeyPaths { { 0U } , { 0U , 1U } } ; <nl> + addIndex ( BSON ( " b . d " < < 1 < < " b . c . f " < < 1 ) , multikeyPaths ) ; <nl> + addIndex ( BSON ( " b . e " < < 1 < < " b . c . f " < < 1 ) , multikeyPaths ) ; <nl> + <nl> + runQuery ( fromjson ( " { b : { $ elemMatch : { c : { $ elemMatch : { f : 5 } } , $ or : [ { d : 6 } , { e : 7 } ] } } } " ) ) ; <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { b : { $ elemMatch : { c : { $ elemMatch : { f : 5 } } , $ or : [ { d : 6 } , { e : 7 } ] } } } , " <nl> + " node : { or : { nodes : [ " <nl> + " { ixscan : { pattern : { ' b . d ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . d ' : [ [ 6 , 6 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } , " <nl> + " { ixscan : { pattern : { ' b . e ' : 1 , ' b . c . f ' : 1 } , bounds : { ' b . e ' : [ [ 7 , 7 , true , true ] ] , ' b . c . f ' : " <nl> + " [ [ ' MinKey ' , ' MaxKey ' , true , true ] ] } } } " <nl> + " ] } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> + TEST_F ( QueryPlannerTest , ContainedOrMoveElemMatchToNestedElemMatchObject ) { <nl> + addIndex ( BSON ( " b . c . d " < < 1 < < " a . f " < < 1 ) , MultikeyPaths { { 0U , 1U } , { 0U } } ) ; <nl> + addIndex ( BSON ( " e " < < 1 < < " a . f " < < 1 ) , MultikeyPaths { { } , { 0U } } ) ; <nl> + <nl> + runQuery ( fromjson ( <nl> + " { a : { $ elemMatch : { f : 5 } } , $ or : [ { b : { $ elemMatch : { c : { $ elemMatch : { d : 6 } } } } } , { e : 7 } ] } " ) ) ; <nl> + assertNumSolutions ( 2U ) ; <nl> + assertSolutionExists ( <nl> + " { fetch : { filter : { a : { $ elemMatch : { f : 5 } } } , node : { or : { nodes : [ " <nl> + " { fetch : { filter : { b : { $ elemMatch : { c : { $ elemMatch : { d : 6 } } } } } , node : { ixscan : { pattern : " <nl> + " { ' b . c . d ' : 1 , ' a . f ' : 1 } , bounds : { ' b . c . d ' : [ [ 6 , 6 , true , true ] ] , ' a . f ' : [ [ 5 , 5 , true , " <nl> + " true ] ] } } } } } , " <nl> + " { ixscan : { pattern : { e : 1 , ' a . f ' : 1 } , bounds : { e : [ [ 7 , 7 , true , true ] ] , ' a . f ' : [ [ 5 , 5 , " <nl> + " true , true ] ] } } } ] } } } } " ) ; <nl> + assertSolutionExists ( " { cscan : { dir : 1 } } } } " ) ; <nl> + } <nl> + <nl> TEST_F ( QueryPlannerTest , TypeArrayUsingTypeCodeMustFetchAndFilter ) { <nl> addIndex ( BSON ( " a " < < 1 ) ) ; <nl> runQuery ( fromjson ( " { a : { $ type : 4 } } " ) ) ; <nl> | SERVER - 33005 Fix planner to avoid incorrect OR pushdown through $ elemMatch . | mongodb/mongo | 17b4094c4d781ffd486b27869f46eea706e490af | 2018-02-09T22:04:04Z |
mmm a / CHANGELOG <nl> ppp b / CHANGELOG <nl> cocos2d - x - 3 . 5beta0 Feb . 27 2015 <nl> [ NEW ] C + + : add Romanian languange support <nl> <nl> [ FIX ] Audio : audio can not resume if it is interrupt , and back from background <nl> + [ FIX ] Cocos Studio UI : setCameraMask does not work for the Cocos Studio UI <nl> [ FIX ] C + + : compiling error when using CC_USE_CULLING <nl> [ FIX ] Label : texture size of string has unexpected padding on iOS 7 and upper version <nl> [ FIX ] HttpClient : if the request data is started by a null character , it does not fill http body <nl> | Merge pull request from super626 / v3 | cocos2d/cocos2d-x | 1a54c302c993488b97db1c36a3556c6e4ea103ae | 2015-02-27T03:55:35Z |
mmm a / fdbserver / DataDistribution . actor . cpp <nl> ppp b / fdbserver / DataDistribution . actor . cpp <nl> struct DDTeamCollection : ReferenceCounted < DDTeamCollection > { <nl> + + num ; <nl> } <nl> } <nl> - if ( ret = = false ) { <nl> + if ( num ! = mt - > serverTeams . size ( ) ) { <nl> + ret = false ; <nl> TraceEvent ( SevError , " ServerTeamNumberOnMachineIncorrect " ) <nl> . detail ( " MachineTeam " , mt - > getMachineIDsStr ( ) ) <nl> . detail ( " ServerTeamsSize " , mt - > serverTeams . size ( ) ) <nl> struct DDTeamCollection : ReferenceCounted < DDTeamCollection > { <nl> } <nl> <nl> / / Find the machine team with the least number of server teams <nl> - int getMachineTeamWithLeastProcessTeams ( Reference < TCMachineTeamInfo > & ret ) { <nl> + std : : pair < Reference < TCMachineTeamInfo > , int > getMachineTeamWithLeastProcessTeams ( ) { <nl> + Reference < TCMachineTeamInfo > retMT ; <nl> int minNumProcessTeams = std : : numeric_limits < int > : : max ( ) ; <nl> <nl> for ( auto & mt : machineTeams ) { <nl> - ASSERT ( isServerTeamNumberCorrect ( mt ) ) ; <nl> + if ( EXPENSIVE_VALIDATION ) { <nl> + ASSERT ( isServerTeamNumberCorrect ( mt ) ) ; <nl> + } <nl> if ( mt - > serverTeams . size ( ) < minNumProcessTeams ) { <nl> minNumProcessTeams = mt - > serverTeams . size ( ) ; <nl> - ret = mt ; <nl> + retMT = mt ; <nl> } <nl> } <nl> <nl> - return minNumProcessTeams ; <nl> + return std : : pair < Reference < TCMachineTeamInfo > , int > ( retMT , minNumProcessTeams ) ; <nl> } <nl> <nl> int getHealthyMachineTeamCount ( ) { <nl> struct DDTeamCollection : ReferenceCounted < DDTeamCollection > { <nl> . detail ( " DesiredTeamsPerServer " , SERVER_KNOBS - > DESIRED_TEAMS_PER_SERVER ) ; <nl> <nl> if ( teamsToBuild > 0 ) { <nl> - std : : set < UID > desiredServerSet ; <nl> - for ( auto i = self - > server_info . begin ( ) ; i ! = self - > server_info . end ( ) ; + + i ) { <nl> - if ( ! self - > server_status . get ( i - > first ) . isUnhealthy ( ) ) { <nl> - desiredServerSet . insert ( i - > second - > id ) ; <nl> - } <nl> - } <nl> - <nl> - vector < UID > desiredServerVector ( desiredServerSet . begin ( ) , desiredServerSet . end ( ) ) ; <nl> - <nl> state vector < std : : vector < UID > > builtTeams ; <nl> <nl> / / addTeamsBestOf ( ) will not add more teams than needed . <nl> ACTOR Future < Void > teamRemover ( DDTeamCollection * self ) { <nl> <nl> if ( totalMTCount > desiredMachineTeams ) { <nl> / / Pick the machine team with the least number of server teams and mark it undesired <nl> - state Reference < TCMachineTeamInfo > mt ; <nl> - state int minNumProcessTeams = self - > getMachineTeamWithLeastProcessTeams ( mt ) ; <nl> + state std : : pair < Reference < TCMachineTeamInfo > , int > foundMTInfo = self - > getMachineTeamWithLeastProcessTeams ( ) ; <nl> + state Reference < TCMachineTeamInfo > mt = foundMTInfo . first ; <nl> + state int minNumProcessTeams = foundMTInfo . second ; <nl> ASSERT ( mt . isValid ( ) ) ; <nl> <nl> / / Pick one process team , and mark it as a bad team <nl> mmm a / fdbserver / tester . actor . cpp <nl> ppp b / fdbserver / tester . actor . cpp <nl> ACTOR Future < DistributedTestResults > runWorkload ( Database cx , std : : vector < Test <nl> } <nl> } <nl> <nl> - printf ( " success : % d failure : % d \ n " , success , failure ) ; <nl> + / / printf ( " success : % d failure : % d \ n " , success , failure ) ; <nl> <nl> if ( spec . phases & TestWorkload : : METRICS ) { <nl> state std : : vector < Future < vector < PerfMetric > > > metricTasks ; <nl> | TeamRemover : Resolve minor comments from code review | apple/foundationdb | 0ac7014142780ca8b28859b5f70b650e7805d1a0 | 2019-02-21T21:18:11Z |
mmm a / js / server / modules / @ arangodb / foxx / manager . js <nl> ppp b / js / server / modules / @ arangodb / foxx / manager . js <nl> const RE_NOT_FQPATH = / ^ [ ^ \ / ] / ; <nl> const RE_NOT_EMPTY = / . / ; <nl> <nl> const manifestSchema = { <nl> - assets : ( <nl> + / / Metadata <nl> + name : joi . string ( ) . regex ( / ^ [ - _a - z ] [ - _a - z0 - 9 ] * $ / i ) . required ( ) , <nl> + version : joi . string ( ) . required ( ) , <nl> + engines : ( <nl> joi . object ( ) . optional ( ) <nl> . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> - . pattern ( RE_NOT_EMPTY , ( <nl> - joi . object ( ) . required ( ) <nl> - . keys ( { <nl> - files : ( <nl> - joi . array ( ) . required ( ) <nl> - . items ( joi . string ( ) . required ( ) ) <nl> - ) , <nl> - contentType : joi . string ( ) . optional ( ) <nl> - } ) <nl> - ) ) <nl> + . pattern ( RE_NOT_EMPTY , joi . string ( ) . required ( ) ) <nl> ) , <nl> + license : joi . string ( ) . optional ( ) , <nl> + description : joi . string ( ) . allow ( ' ' ) . default ( ' ' ) , <nl> + keywords : joi . array ( ) . optional ( ) , <nl> + thumbnail : joi . string ( ) . optional ( ) , <nl> author : joi . string ( ) . allow ( ' ' ) . default ( ' ' ) , <nl> + contributors : joi . array ( ) . optional ( ) , <nl> + repository : ( <nl> + joi . object ( ) . optional ( ) <nl> + . keys ( { <nl> + type : joi . string ( ) . required ( ) , <nl> + url : joi . string ( ) . required ( ) <nl> + } ) <nl> + ) , <nl> + <nl> + / / Config <nl> + lib : joi . string ( ) . default ( ' . ' ) , <nl> + isSystem : joi . boolean ( ) . default ( false ) , <nl> + rootElement : joi . boolean ( ) . default ( false ) , <nl> configuration : ( <nl> joi . object ( ) . optional ( ) <nl> . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> const manifestSchema = { <nl> } ) <nl> ) ) <nl> ) , <nl> - contributors : joi . array ( ) . optional ( ) , <nl> + dependencies : ( <nl> + joi . object ( ) . optional ( ) <nl> + . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> + . pattern ( RE_NOT_EMPTY , joi . alternatives ( ) . try ( <nl> + joi . string ( ) . required ( ) , <nl> + joi . object ( ) . required ( ) <nl> + . keys ( { <nl> + name : joi . string ( ) . default ( ' * ' ) , <nl> + version : joi . string ( ) . default ( ' * ' ) , <nl> + required : joi . boolean ( ) . default ( true ) <nl> + } ) <nl> + ) ) <nl> + ) , <nl> + <nl> + / / Routing <nl> + defaultDocument : joi . string ( ) . allow ( ' ' ) . allow ( null ) . default ( ' index . html ' ) , <nl> controllers : joi . alternatives ( ) . try ( <nl> joi . string ( ) . optional ( ) , <nl> ( <nl> const manifestSchema = { <nl> . pattern ( RE_FQPATH , joi . string ( ) . required ( ) ) <nl> ) <nl> ) , <nl> - defaultDocument : joi . string ( ) . allow ( ' ' ) . allow ( null ) . default ( ' index . html ' ) , <nl> - dependencies : ( <nl> + assets : ( <nl> joi . object ( ) . optional ( ) <nl> . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> - . pattern ( RE_NOT_EMPTY , joi . alternatives ( ) . try ( <nl> - joi . string ( ) . required ( ) , <nl> + . pattern ( RE_NOT_EMPTY , ( <nl> joi . object ( ) . required ( ) <nl> . keys ( { <nl> - name : joi . string ( ) . default ( ' * ' ) , <nl> - version : joi . string ( ) . default ( ' * ' ) , <nl> - required : joi . boolean ( ) . default ( true ) <nl> + files : ( <nl> + joi . array ( ) . required ( ) <nl> + . items ( joi . string ( ) . required ( ) ) <nl> + ) , <nl> + contentType : joi . string ( ) . optional ( ) <nl> } ) <nl> ) ) <nl> ) , <nl> - description : joi . string ( ) . allow ( ' ' ) . default ( ' ' ) , <nl> - engines : ( <nl> + files : ( <nl> joi . object ( ) . optional ( ) <nl> . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> - . pattern ( RE_NOT_EMPTY , joi . string ( ) . required ( ) ) <nl> + . pattern ( RE_NOT_EMPTY , joi . alternatives ( ) . try ( joi . string ( ) . required ( ) , joi . object ( ) . required ( ) ) ) <nl> ) , <nl> + <nl> + / / Scripts <nl> exports : joi . alternatives ( ) . try ( <nl> joi . string ( ) . optional ( ) , <nl> ( <nl> const manifestSchema = { <nl> . pattern ( RE_NOT_EMPTY , joi . string ( ) . required ( ) ) <nl> ) <nl> ) , <nl> - files : ( <nl> - joi . object ( ) . optional ( ) <nl> - . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> - . pattern ( RE_NOT_EMPTY , joi . alternatives ( ) . try ( joi . string ( ) . required ( ) , joi . object ( ) . required ( ) ) ) <nl> - ) , <nl> - isSystem : joi . boolean ( ) . default ( false ) , <nl> - keywords : joi . array ( ) . optional ( ) , <nl> - lib : joi . string ( ) . default ( ' . ' ) , <nl> - license : joi . string ( ) . optional ( ) , <nl> - name : joi . string ( ) . regex ( / ^ [ - _a - z ] [ - _a - z0 - 9 ] * $ / i ) . required ( ) , <nl> - repository : ( <nl> - joi . object ( ) . optional ( ) <nl> - . keys ( { <nl> - type : joi . string ( ) . required ( ) , <nl> - url : joi . string ( ) . required ( ) <nl> - } ) <nl> - ) , <nl> scripts : ( <nl> joi . object ( ) . optional ( ) <nl> . pattern ( RE_EMPTY , joi . forbidden ( ) ) <nl> const manifestSchema = { <nl> . default ( Array , ' empty test files array ' ) <nl> ) <nl> ) <nl> - ) , <nl> - thumbnail : joi . string ( ) . optional ( ) , <nl> - version : joi . string ( ) . required ( ) , <nl> - rootElement : joi . boolean ( ) . default ( false ) <nl> + ) <nl> } ; <nl> <nl> <nl> | Tidy up manifest props | arangodb/arangodb | 5f91c5399c63357af893eda39d6756817e81f644 | 2016-04-18T16:00:19Z |
mmm a / include / swift / AST / Attr . def <nl> ppp b / include / swift / AST / Attr . def <nl> DECL_ATTR ( available , Available , <nl> 1 ) <nl> CONTEXTUAL_SIMPLE_DECL_ATTR ( final , Final , <nl> OnClass | OnFunc | OnAccessor | OnVar | OnSubscript | <nl> - DeclModifier , <nl> + DeclModifier | ABIBreakingToAdd | ABIBreakingToRemove | <nl> + APIBreakingToAdd , <nl> 2 ) <nl> DECL_ATTR ( objc , ObjC , <nl> OnAbstractFunction | OnClass | OnProtocol | OnExtension | OnVar | <nl> - OnSubscript | OnEnum | OnEnumElement , <nl> + OnSubscript | OnEnum | OnEnumElement | ABIBreakingToAdd | ABIBreakingToRemove , <nl> 3 ) <nl> CONTEXTUAL_SIMPLE_DECL_ATTR ( required , Required , <nl> OnConstructor | <nl> DECL_ATTR ( _semantics , Semantics , <nl> 21 ) <nl> CONTEXTUAL_SIMPLE_DECL_ATTR ( dynamic , Dynamic , <nl> OnFunc | OnAccessor | OnVar | OnSubscript | OnConstructor | <nl> - DeclModifier , <nl> + DeclModifier | ABIBreakingToAdd | ABIBreakingToRemove , <nl> 22 ) <nl> CONTEXTUAL_SIMPLE_DECL_ATTR ( infix , Infix , <nl> OnFunc | OnOperator | <nl> SIMPLE_DECL_ATTR ( nonobjc , NonObjC , <nl> 30 ) <nl> SIMPLE_DECL_ATTR ( _fixed_layout , FixedLayout , <nl> OnVar | OnClass | OnStruct | <nl> - UserInaccessible , <nl> + UserInaccessible | ABIBreakingToAdd | ABIBreakingToRemove , <nl> 31 ) <nl> SIMPLE_DECL_ATTR ( inlinable , Inlinable , <nl> OnVar | OnSubscript | OnAbstractFunction , <nl> SIMPLE_DECL_ATTR ( _weakLinked , WeakLinked , <nl> OnSubscript | OnConstructor | OnEnumElement | OnExtension | UserInaccessible , <nl> 75 ) <nl> SIMPLE_DECL_ATTR ( frozen , Frozen , <nl> - OnEnum | OnStruct , <nl> + OnEnum | OnStruct | ABIBreakingToAdd | ABIBreakingToRemove | APIBreakingToRemove , <nl> 76 ) <nl> DECL_ATTR_ALIAS ( _frozen , Frozen ) <nl> SIMPLE_DECL_ATTR ( _forbidSerializingReference , ForbidSerializingReference , <nl> mmm a / include / swift / AST / Attr . h <nl> ppp b / include / swift / AST / Attr . h <nl> class DeclAttribute : public AttributeBase { <nl> <nl> / / / Whether client code cannot use the attribute . <nl> UserInaccessible = 1ull < < ( unsigned ( DeclKindIndex : : Last_Decl ) + 7 ) , <nl> + <nl> + / / / Whether adding this attribute can break API <nl> + APIBreakingToAdd = 1ull < < ( unsigned ( DeclKindIndex : : Last_Decl ) + 8 ) , <nl> + <nl> + / / / Whether removing this attribute can break API <nl> + APIBreakingToRemove = 1ull < < ( unsigned ( DeclKindIndex : : Last_Decl ) + 9 ) , <nl> + <nl> + / / / Whether adding this attribute can break ABI <nl> + ABIBreakingToAdd = 1ull < < ( unsigned ( DeclKindIndex : : Last_Decl ) + 10 ) , <nl> + <nl> + / / / Whether removing this attribute can break ABI <nl> + ABIBreakingToRemove = 1ull < < ( unsigned ( DeclKindIndex : : Last_Decl ) + 11 ) , <nl> } ; <nl> <nl> LLVM_READNONE <nl> class DeclAttribute : public AttributeBase { <nl> return getOptions ( DK ) & UserInaccessible ; <nl> } <nl> <nl> + static bool isAddingBreakingABI ( DeclAttrKind DK ) { <nl> + return getOptions ( DK ) & ABIBreakingToAdd ; <nl> + } <nl> + <nl> + static bool isAddingBreakingAPI ( DeclAttrKind DK ) { <nl> + return getOptions ( DK ) & APIBreakingToAdd ; <nl> + } <nl> + <nl> + static bool isRemovingBreakingABI ( DeclAttrKind DK ) { <nl> + return getOptions ( DK ) & ABIBreakingToRemove ; <nl> + } <nl> + static bool isRemovingBreakingAPI ( DeclAttrKind DK ) { <nl> + return getOptions ( DK ) & APIBreakingToRemove ; <nl> + } <nl> + <nl> bool isDeclModifier ( ) const { <nl> return isDeclModifier ( getKind ( ) ) ; <nl> } <nl> mmm a / test / api - digester / Outputs / Cake . txt <nl> ppp b / test / api - digester / Outputs / Cake . txt <nl> cake : Func ownershipChange ( _ : _ : ) has parameter 1 changing from Shared to Owned <nl> cake : TypeAlias TChangesFromIntToString . T has underlying type change from Swift . Int to Swift . String <nl> <nl> / * Decl Attribute changes * / <nl> + cake : Enum IceKind is now without @ frozen <nl> cake : Func C1 . foo1 ( ) is now not static <nl> cake : Func FinalFuncContainer . NewFinalFunc ( ) is now with final <nl> - cake : Func FinalFuncContainer . NoLongerFinalFunc ( ) is now without final <nl> cake : Func HasMutatingMethodClone . foo ( ) has self access kind changing from Mutating to NonMutating <nl> cake : Func S1 . foo1 ( ) has self access kind changing from NonMutating to Mutating <nl> cake : Func S1 . foo3 ( ) is now static <nl> mmm a / tools / swift - api - digester / ModuleAnalyzerNodes . cpp <nl> ppp b / tools / swift - api - digester / ModuleAnalyzerNodes . cpp <nl> namespace fs = llvm : : sys : : fs ; <nl> namespace path = llvm : : sys : : path ; <nl> <nl> namespace { <nl> - static StringRef getAttrName ( DeclAttrKind Kind ) { <nl> - switch ( Kind ) { <nl> - # define DECL_ATTR ( NAME , CLASS , . . . ) \ <nl> - case DAK_ # # CLASS : \ <nl> - return DeclAttribute : : isDeclModifier ( DAK_ # # CLASS ) ? # NAME : " @ " # NAME ; <nl> - # include " swift / AST / Attr . def " <nl> - case DAK_Count : <nl> - llvm_unreachable ( " unrecognized attribute kind . " ) ; <nl> - } <nl> - llvm_unreachable ( " covered switch " ) ; <nl> - } <nl> - <nl> static PrintOptions getTypePrintOpts ( CheckerOptions CheckerOpts ) { <nl> PrintOptions Opts ; <nl> Opts . SynthesizeSugarOnTypes = true ; <nl> static PrintOptions getTypePrintOpts ( CheckerOptions CheckerOpts ) { <nl> } <nl> return Opts ; <nl> } <nl> - <nl> } / / End of anonymous namespace . <nl> <nl> struct swift : : ide : : api : : SDKNodeInitInfo { <nl> struct swift : : ide : : api : : SDKNodeInitInfo { <nl> SDKNode * createSDKNode ( SDKNodeKind Kind ) ; <nl> } ; <nl> <nl> - SDKContext : : SDKContext ( CheckerOptions Opts ) : Diags ( SourceMgr ) , Opts ( Opts ) { <nl> - # define ADD ( NAME ) BreakingAttrs . push_back ( { DeclAttrKind : : DAK_ # # NAME , \ <nl> - getAttrName ( DeclAttrKind : : DAK_ # # NAME ) } ) ; <nl> - / / Add attributes that both break ABI and API . <nl> - ADD ( Final ) <nl> - if ( checkingABI ( ) ) { <nl> - / / Add ABI - breaking - specific attributes . <nl> - ADD ( ObjC ) <nl> - ADD ( FixedLayout ) <nl> - ADD ( Frozen ) <nl> - ADD ( Dynamic ) <nl> - } <nl> - # undef ADD <nl> - } <nl> + SDKContext : : SDKContext ( CheckerOptions Opts ) : Diags ( SourceMgr ) , Opts ( Opts ) { } <nl> <nl> void SDKNodeRoot : : registerDescendant ( SDKNode * D ) { <nl> / / Operator doesn ' t have usr <nl> mmm a / tools / swift - api - digester / swift - api - digester . cpp <nl> ppp b / tools / swift - api - digester / swift - api - digester . cpp <nl> void swift : : ide : : api : : SDKNodeDeclFunction : : diagnose ( SDKNode * Right ) { <nl> } <nl> } <nl> <nl> + static StringRef getAttrName ( DeclAttrKind Kind ) { <nl> + switch ( Kind ) { <nl> + # define DECL_ATTR ( NAME , CLASS , . . . ) \ <nl> + case DAK_ # # CLASS : \ <nl> + return DeclAttribute : : isDeclModifier ( DAK_ # # CLASS ) ? # NAME : " @ " # NAME ; <nl> + # include " swift / AST / Attr . def " <nl> + case DAK_Count : <nl> + llvm_unreachable ( " unrecognized attribute kind . " ) ; <nl> + } <nl> + llvm_unreachable ( " covered switch " ) ; <nl> + } <nl> + <nl> + static bool shouldDiagnoseAddingAttribute ( SDKNodeDecl * D , DeclAttrKind Kind ) { <nl> + return true ; <nl> + } <nl> + <nl> + static bool shouldDiagnoseRemovingAttribute ( SDKNodeDecl * D , DeclAttrKind Kind ) { <nl> + return true ; <nl> + } <nl> + <nl> void swift : : ide : : api : : SDKNodeDecl : : diagnose ( SDKNode * Right ) { <nl> SDKNode : : diagnose ( Right ) ; <nl> auto * RD = dyn_cast < SDKNodeDecl > ( Right ) ; <nl> void swift : : ide : : api : : SDKNodeDecl : : diagnose ( SDKNode * Right ) { <nl> } <nl> } <nl> <nl> - / / Check if some attributes with ABI / API - impact have been added / removed . <nl> - for ( auto & Info : Ctx . getBreakingAttributeInfo ( ) ) { <nl> - if ( hasDeclAttribute ( Info . Kind ) ! = RD - > hasDeclAttribute ( Info . Kind ) ) { <nl> - auto Desc = hasDeclAttribute ( Info . Kind ) ? <nl> - Ctx . buffer ( ( llvm : : Twine ( " without " ) + Info . Content ) . str ( ) ) : <nl> - Ctx . buffer ( ( llvm : : Twine ( " with " ) + Info . Content ) . str ( ) ) ; <nl> - emitDiag ( diag : : decl_new_attr , Desc ) ; <nl> + / / Diagnose removing attributes . <nl> + for ( auto Kind : getDeclAttributes ( ) ) { <nl> + if ( ! RD - > hasDeclAttribute ( Kind ) ) { <nl> + if ( ( Ctx . checkingABI ( ) ? DeclAttribute : : isRemovingBreakingABI ( Kind ) : <nl> + DeclAttribute : : isRemovingBreakingAPI ( Kind ) ) & & <nl> + shouldDiagnoseRemovingAttribute ( this , Kind ) ) { <nl> + emitDiag ( diag : : decl_new_attr , <nl> + Ctx . buffer ( ( llvm : : Twine ( " without " ) + getAttrName ( Kind ) ) . str ( ) ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Diagnose adding attributes . <nl> + for ( auto Kind : RD - > getDeclAttributes ( ) ) { <nl> + if ( ! hasDeclAttribute ( Kind ) ) { <nl> + if ( ( Ctx . checkingABI ( ) ? DeclAttribute : : isAddingBreakingABI ( Kind ) : <nl> + DeclAttribute : : isAddingBreakingAPI ( Kind ) ) & & <nl> + shouldDiagnoseAddingAttribute ( this , Kind ) ) { <nl> + emitDiag ( diag : : decl_new_attr , <nl> + Ctx . buffer ( ( llvm : : Twine ( " with " ) + getAttrName ( Kind ) ) . str ( ) ) ) ; <nl> + } <nl> } <nl> } <nl> <nl> | AST : promote API / ABI impact bit of decl attributes to AST , NFC | apple/swift | 4189b6c99b0279f927b2391cdc4ed3c9906ee61f | 2019-09-12T00:58:36Z |
mmm a / src / php / docker / alpine / Dockerfile <nl> ppp b / src / php / docker / alpine / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / src / php / docker / grpc - ext / Dockerfile <nl> ppp b / src / php / docker / grpc - ext / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / src / php / docker / grpc - src / Dockerfile <nl> ppp b / src / php / docker / grpc - src / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init & & \ <nl> - make & & make install <nl> + COPY . . <nl> <nl> + RUN make & & make install <nl> <nl> - WORKDIR / github / grpc / src / php / ext / grpc <nl> <nl> - COPY src / php / ext / grpc / * . c . / <nl> - COPY src / php / ext / grpc / * . h . / <nl> - COPY src / php / ext / grpc / config . m4 . / <nl> + WORKDIR / github / grpc / src / php / ext / grpc <nl> <nl> RUN phpize & & \ <nl> . / configure - - enable - tests & & \ <nl> mmm a / src / php / docker / php - future / Dockerfile <nl> ppp b / src / php / docker / php - future / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / src / php / docker / php - src / Dockerfile <nl> ppp b / src / php / docker / php - src / Dockerfile <nl> RUN git checkout php - 7 . 2 . 22 & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / src / php / docker / php - zts / Dockerfile <nl> ppp b / src / php / docker / php - zts / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / src / php / docker / php5 / Dockerfile <nl> ppp b / src / php / docker / php5 / Dockerfile <nl> RUN wget https : / / phar . phpunit . de / phpunit - 5 . 7 . 27 . phar & & \ <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & \ <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & \ <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> mmm a / templates / src / php / docker / grpc - src / Dockerfile . template <nl> ppp b / templates / src / php / docker / grpc - src / Dockerfile . template <nl> <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & $ { ' \ \ ' } <nl> - git submodule update - - init & & $ { ' \ \ ' } <nl> - make & & make install <nl> + COPY . . <nl> <nl> + RUN make & & make install <nl> <nl> - WORKDIR / github / grpc / src / php / ext / grpc <nl> <nl> - COPY src / php / ext / grpc / * . c . / <nl> - COPY src / php / ext / grpc / * . h . / <nl> - COPY src / php / ext / grpc / config . m4 . / <nl> + WORKDIR / github / grpc / src / php / ext / grpc <nl> <nl> RUN phpize & & $ { ' \ \ ' } <nl> . / configure - - enable - tests & & $ { ' \ \ ' } <nl> mmm a / templates / src / php / docker / pecl_ext_build_src . include <nl> ppp b / templates / src / php / docker / pecl_ext_build_src . include <nl> <nl> WORKDIR / github / grpc <nl> <nl> - RUN git clone https : / / github . com / grpc / grpc . & & $ { ' \ \ ' } <nl> - git submodule update - - init <nl> - <nl> - COPY src / . / src <nl> + COPY . . <nl> <nl> RUN pear package & & $ { ' \ \ ' } <nl> find . - name grpc - * . tgz | xargs - I { } pecl install { } <nl> | PHP : update dockerfiles | grpc/grpc | 0579539b07a11468a894ff8fffed1595d9647035 | 2019-11-05T21:11:27Z |
mmm a / tensorflow / contrib / nccl / kernels / nccl_manager . cc <nl> ppp b / tensorflow / contrib / nccl / kernels / nccl_manager . cc <nl> ncclDataType_t ToNcclType ( DataType t ) { <nl> struct NcclManager : : Participant { <nl> Participant ( const Tensor * in_t , Tensor * out_t , EventMgr * event_mgr , <nl> perftools : : gputools : : Stream * tensor_stream , <nl> - perftools : : gputools : : StreamExecutor * executor , <nl> + perftools : : gputools : : StreamExecutor * executor , int gpu_device_id , <nl> NcclManager : : DoneCallback done_callback ) <nl> : in_t ( in_t ) , <nl> out_t ( out_t ) , <nl> event_mgr ( event_mgr ) , <nl> tensor_stream ( tensor_stream ) , <nl> executor ( executor ) , <nl> + gpu_device_id ( gpu_device_id ) , <nl> done_callback ( std : : move ( done_callback ) ) { <nl> DCHECK ( executor ! = nullptr ) ; <nl> DCHECK ( event_mgr ! = nullptr ) ; <nl> struct NcclManager : : Participant { <nl> <nl> / / Matches the executor in CommunicatorMember : : stream . Expected to be live for <nl> / / process lifetime . <nl> - perftools : : gputools : : StreamExecutor * executor = nullptr ; <nl> + perftools : : gputools : : StreamExecutor * const executor = nullptr ; <nl> + <nl> + const int gpu_device_id ; <nl> <nl> NcclManager : : DoneCallback done_callback ; <nl> <nl> NcclManager : : Communicator * NcclManager : : GetCommunicator ( <nl> / / Note that this is done under the lock ; performance is not expected to <nl> / / matter as this happens a very small number of times . <nl> std : : vector < CommunicatorMember > members ( num_devices ) ; <nl> + std : : vector < int > devices ( num_devices ) ; <nl> for ( int i = 0 ; i < num_devices ; + + i ) { <nl> auto * executor = collective - > participants [ i ] - > executor ; <nl> <nl> NcclManager : : Communicator * NcclManager : : GetCommunicator ( <nl> } <nl> <nl> members [ i ] . nccl_stream = nccl_stream ; <nl> + devices [ i ] = collective - > participants [ i ] - > gpu_device_id ; <nl> } <nl> <nl> - / / Call ncclCommInitRank for each member . <nl> - ncclUniqueId id ; <nl> - CHECK_EQ ( ncclSuccess , ncclGetUniqueId ( & id ) ) ; <nl> - std : : unique_ptr < thread : : ThreadPool > pool ( <nl> - new thread : : ThreadPool ( env , " ncclCommInitRank " , num_devices ) ) ; <nl> - std : : vector < ncclResult_t > results ( num_devices ) ; <nl> + std : : vector < ncclComm_t > nccl_comms ( num_devices ) ; <nl> + auto result = ncclCommInitAll ( nccl_comms . data ( ) , num_devices , devices . data ( ) ) ; <nl> + CHECK_EQ ( result , ncclSuccess ) ; <nl> for ( int rank = 0 ; rank < num_devices ; + + rank ) { <nl> - CommunicatorMember * member = & members [ rank ] ; <nl> - ncclResult_t * result = & results [ rank ] ; <nl> - pool - > Schedule ( [ member , num_devices , result , rank , & id ] ( ) { <nl> - ScopedActivateExecutorContext scoped_context ( <nl> - member - > nccl_stream - > executor ) ; <nl> - LOG ( INFO ) < < " Calling ncclCommInitRank for rank " < < rank ; <nl> - * result = ncclCommInitRank ( & member - > nccl_comm , num_devices , id , rank ) ; <nl> - LOG ( INFO ) < < " Done calling ncclCommInitRank for rank " < < rank < < " : " <nl> - < < * result ; <nl> - } ) ; <nl> - } <nl> - <nl> - pool . reset ( ) ; / / wait for completion . <nl> - for ( int i = 0 ; i < num_devices ; + + i ) { <nl> - CHECK_EQ ( results [ i ] , ncclSuccess ) ; <nl> + members [ rank ] . nccl_comm = nccl_comms [ rank ] ; <nl> } <nl> communicators_ . emplace_back ( new Communicator ( std : : move ( members ) ) ) ; <nl> return communicators_ . back ( ) . get ( ) ; <nl> NcclManager : : Communicator * NcclManager : : GetCommunicator ( <nl> void NcclManager : : AddToAllReduce ( int num_devices , const string & key , <nl> ncclRedOp_t reduction_op , <nl> perftools : : gputools : : StreamExecutor * executor , <nl> - EventMgr * event_mgr , <nl> + int gpu_device_id , EventMgr * event_mgr , <nl> perftools : : gputools : : Stream * tensor_stream , <nl> const Tensor * in_t , Tensor * out_t , <nl> const DoneCallback & done_callback ) { <nl> - std : : unique_ptr < Participant > participant ( new Participant ( <nl> - in_t , out_t , event_mgr , tensor_stream , executor , done_callback ) ) ; <nl> + std : : unique_ptr < Participant > participant ( <nl> + new Participant ( in_t , out_t , event_mgr , tensor_stream , executor , <nl> + gpu_device_id , done_callback ) ) ; <nl> AddParticipant ( num_devices , key , std : : move ( participant ) , in_t - > dtype ( ) , <nl> kAllReduce , reduction_op ) ; <nl> } <nl> <nl> void NcclManager : : AddBroadcastSend ( <nl> int num_devices , const string & key , <nl> - perftools : : gputools : : StreamExecutor * executor , EventMgr * event_mgr , <nl> - perftools : : gputools : : Stream * tensor_stream , const Tensor * in_t , <nl> - DoneCallback done_callback ) { <nl> + perftools : : gputools : : StreamExecutor * executor , int gpu_device_id , <nl> + EventMgr * event_mgr , perftools : : gputools : : Stream * tensor_stream , <nl> + const Tensor * in_t , DoneCallback done_callback ) { <nl> std : : unique_ptr < Participant > participant ( <nl> new Participant ( in_t , nullptr / * out_t * / , event_mgr , tensor_stream , <nl> - executor , done_callback ) ) ; <nl> + executor , gpu_device_id , done_callback ) ) ; <nl> participant - > root = true ; <nl> AddParticipant ( num_devices , key , std : : move ( participant ) , in_t - > dtype ( ) , <nl> kBroadcast , ncclSum / * unused * / ) ; <nl> void NcclManager : : AddBroadcastSend ( <nl> <nl> void NcclManager : : AddBroadcastRecv ( <nl> int num_devices , const string & key , <nl> - perftools : : gputools : : StreamExecutor * executor , EventMgr * event_mgr , <nl> - perftools : : gputools : : Stream * tensor_stream , Tensor * out_t , <nl> - DoneCallback done_callback ) { <nl> + perftools : : gputools : : StreamExecutor * executor , int gpu_device_id , <nl> + EventMgr * event_mgr , perftools : : gputools : : Stream * tensor_stream , <nl> + Tensor * out_t , DoneCallback done_callback ) { <nl> std : : unique_ptr < Participant > participant ( <nl> new Participant ( nullptr / * in_t * / , out_t , event_mgr , tensor_stream , <nl> - executor , done_callback ) ) ; <nl> + executor , gpu_device_id , done_callback ) ) ; <nl> AddParticipant ( num_devices , key , std : : move ( participant ) , out_t - > dtype ( ) , <nl> kBroadcast , ncclSum / * unused * / ) ; <nl> } <nl> void NcclManager : : AddParticipant ( int num_devices , const string & key , <nl> } <nl> Collective * collective = collective_ptr . get ( ) ; <nl> DCHECK_EQ ( collective - > type , collective_type ) ; <nl> - DCHECK_EQ ( collective - > participants . size ( ) , num_devices ) ; <nl> + DCHECK_LT ( collective - > participants . size ( ) , num_devices ) ; <nl> collective - > participants . emplace_back ( std : : move ( participant ) ) ; <nl> + + collective - > available_participants ; <nl> <nl> mmm a / tensorflow / contrib / nccl / kernels / nccl_manager . h <nl> ppp b / tensorflow / contrib / nccl / kernels / nccl_manager . h <nl> class NcclManager { <nl> void AddToAllReduce ( int num_devices , const string & key , <nl> ncclRedOp_t reduction_op , <nl> perftools : : gputools : : StreamExecutor * executor , <nl> - EventMgr * event_mgr , <nl> + int gpu_device_id , EventMgr * event_mgr , <nl> perftools : : gputools : : Stream * tensor_stream , <nl> const Tensor * in_t , Tensor * out_t , <nl> const DoneCallback & done_callback ) ; <nl> class NcclManager { <nl> / / to all receivers . <nl> void AddBroadcastSend ( int num_devices , const string & key , <nl> perftools : : gputools : : StreamExecutor * executor , <nl> - EventMgr * event_mgr , <nl> + int gpu_device_id , EventMgr * event_mgr , <nl> perftools : : gputools : : Stream * tensor_stream , <nl> const Tensor * in_t , DoneCallback done_callback ) ; <nl> void AddBroadcastRecv ( int num_devices , const string & key , <nl> perftools : : gputools : : StreamExecutor * executor , <nl> - EventMgr * event_mgr , <nl> + int gpu_device_id , EventMgr * event_mgr , <nl> perftools : : gputools : : Stream * tensor_stream , <nl> Tensor * out_t , DoneCallback done_callback ) ; <nl> <nl> mmm a / tensorflow / contrib / nccl / kernels / nccl_manager_test . cc <nl> ppp b / tensorflow / contrib / nccl / kernels / nccl_manager_test . cc <nl> TEST_F ( NcclManagerTest , BasicSumReduction ) { <nl> auto * event_mgr = device - > tensorflow_gpu_device_info ( ) - > event_mgr ; <nl> auto * stream = device - > tensorflow_gpu_device_info ( ) - > stream ; <nl> NcclManager : : instance ( ) - > AddToAllReduce ( <nl> - num_ranks , " allreduce " , reduction_op , device - > executor ( ) , event_mgr , <nl> - stream , & test_case - > ins [ device_num ] , & test_case - > outs [ device_num ] , <nl> - CreateDoneCallback ( test_case . get ( ) ) ) ; <nl> + num_ranks , " allreduce " , reduction_op , device - > executor ( ) , <nl> + device - > gpu_id ( ) , event_mgr , stream , & test_case - > ins [ device_num ] , <nl> + & test_case - > outs [ device_num ] , CreateDoneCallback ( test_case . get ( ) ) ) ; <nl> } <nl> <nl> LOG ( ERROR ) < < " Verifying results " ; <nl> TEST_F ( NcclManagerTest , MultipleCallers ) { <nl> TestCase * test_case = test_cases [ test_num ] . get ( ) ; <nl> NcclManager : : instance ( ) - > AddToAllReduce ( <nl> num_ranks , strings : : StrCat ( " allreduce " , test_num ) , ncclSum , <nl> - device - > executor ( ) , event_mgr , stream , & test_case - > ins [ device_num ] , <nl> - & test_case - > outs [ device_num ] , CreateDoneCallback ( test_case ) ) ; <nl> + device - > executor ( ) , device - > gpu_id ( ) , event_mgr , stream , <nl> + & test_case - > ins [ device_num ] , & test_case - > outs [ device_num ] , <nl> + CreateDoneCallback ( test_case ) ) ; <nl> } ; <nl> pool - > Schedule ( fn ) ; <nl> } <nl> mmm a / tensorflow / contrib / nccl / kernels / nccl_ops . cc <nl> ppp b / tensorflow / contrib / nccl / kernels / nccl_ops . cc <nl> class NcclAllReduceOpKernel : public NcclAsyncOpBase { <nl> } ; <nl> <nl> auto * compute_stream = c - > op_device_context ( ) - > stream ( ) ; <nl> - EventMgr * event_mgr = c - > device ( ) - > tensorflow_gpu_device_info ( ) - > event_mgr ; <nl> + auto * gpu_info = c - > device ( ) - > tensorflow_gpu_device_info ( ) ; <nl> NcclManager : : instance ( ) - > AddToAllReduce ( <nl> num_devices ( ) , GetCollectiveKey ( c ) , reduction_op_ , <nl> - compute_stream - > parent ( ) , event_mgr , compute_stream , in_t , out_t , <nl> - actual_done ) ; <nl> + compute_stream - > parent ( ) , gpu_info - > gpu_id , gpu_info - > event_mgr , <nl> + compute_stream , in_t , out_t , actual_done ) ; <nl> } <nl> <nl> private : <nl> class NcclBroadcastSendKernel : public NcclAsyncOpBase { <nl> } ; <nl> <nl> auto * compute_stream = c - > op_device_context ( ) - > stream ( ) ; <nl> - EventMgr * event_mgr = c - > device ( ) - > tensorflow_gpu_device_info ( ) - > event_mgr ; <nl> + auto * gpu_info = c - > device ( ) - > tensorflow_gpu_device_info ( ) ; <nl> NcclManager : : instance ( ) - > AddBroadcastSend ( <nl> - num_devices ( ) , GetCollectiveKey ( c ) , compute_stream - > parent ( ) , event_mgr , <nl> - compute_stream , & c - > input ( 0 ) , std : : move ( actual_done ) ) ; <nl> + num_devices ( ) , GetCollectiveKey ( c ) , compute_stream - > parent ( ) , <nl> + gpu_info - > gpu_id , gpu_info - > event_mgr , compute_stream , & c - > input ( 0 ) , <nl> + std : : move ( actual_done ) ) ; <nl> } <nl> } ; <nl> REGISTER_KERNEL_BUILDER ( Name ( " NcclBroadcastSend " ) . Device ( DEVICE_GPU ) , <nl> class NcclBroadcastRecvKernel : public NcclAsyncOpBase { <nl> } ; <nl> <nl> auto * compute_stream = c - > op_device_context ( ) - > stream ( ) ; <nl> - EventMgr * event_mgr = c - > device ( ) - > tensorflow_gpu_device_info ( ) - > event_mgr ; <nl> + auto * gpu_info = c - > device ( ) - > tensorflow_gpu_device_info ( ) ; <nl> NcclManager : : instance ( ) - > AddBroadcastRecv ( <nl> - num_devices ( ) , GetCollectiveKey ( c ) , compute_stream - > parent ( ) , event_mgr , <nl> - compute_stream , out_t , std : : move ( actual_done ) ) ; <nl> + num_devices ( ) , GetCollectiveKey ( c ) , compute_stream - > parent ( ) , <nl> + gpu_info - > gpu_id , gpu_info - > event_mgr , compute_stream , out_t , <nl> + std : : move ( actual_done ) ) ; <nl> } <nl> } ; <nl> REGISTER_KERNEL_BUILDER ( <nl> mmm a / tensorflow / core / common_runtime / gpu / gpu_device . cc <nl> ppp b / tensorflow / core / common_runtime / gpu / gpu_device . cc <nl> Status BaseGPUDevice : : Init ( const SessionOptions & options ) { <nl> gpu_device_info_ - > stream = streams_ [ 0 ] . compute ; <nl> gpu_device_info_ - > default_context = device_contexts_ [ 0 ] ; <nl> gpu_device_info_ - > event_mgr = em_ . get ( ) ; <nl> + gpu_device_info_ - > gpu_id = gpu_id_ ; <nl> set_tensorflow_gpu_device_info ( gpu_device_info_ ) ; <nl> <nl> return Status : : OK ( ) ; <nl> mmm a / tensorflow / core / framework / device_base . h <nl> ppp b / tensorflow / core / framework / device_base . h <nl> class DeviceBase { <nl> perftools : : gputools : : Stream * stream = nullptr ; <nl> DeviceContext * default_context = nullptr ; <nl> EventMgr * event_mgr = nullptr ; <nl> + int gpu_id = - 1 ; <nl> } ; <nl> <nl> / / Does not take ownership . <nl> | Change nccl_manager to use ncclCommInitAll . | tensorflow/tensorflow | f3405c2d73196e409041d52bbf30748b2a64493b | 2017-02-22T02:31:12Z |
mmm a / src / mongo / db / query / sbe_stage_builder_filter . cpp <nl> ppp b / src / mongo / db / query / sbe_stage_builder_filter . cpp <nl> struct EvalExpr { <nl> EvalExpr ( sbe : : value : : SlotId s ) : expr ( sbe : : makeE < sbe : : EVariable > ( s ) ) , slot ( s ) { } <nl> <nl> EvalExpr & operator = ( EvalExpr & & e ) { <nl> + if ( this = = & e ) { <nl> + return * this ; <nl> + } <nl> + <nl> expr = std : : move ( e . expr ) ; <nl> slot = e . slot ; <nl> e . slot = boost : : none ; <nl> | SERVER - 50783 Coverity analysis defect 116260 : Unsafe assignment operator | mongodb/mongo | 5b46992843fd3643395bce807d269d751f10763d | 2020-09-28T16:35:22Z |
mmm a / tensorflow / core / framework / device_base . h <nl> ppp b / tensorflow / core / framework / device_base . h <nl> class DeviceBase { <nl> virtual Allocator * GetScopedAllocator ( AllocatorAttributes attr , <nl> int64 step_id ) { <nl> LOG ( FATAL ) < < " Device does not implement GetScopedAllocator ( ) " ; <nl> + return nullptr ; <nl> } <nl> <nl> virtual ScopedAllocatorMgr * GetScopedAllocatorMgr ( ) const { return nullptr ; } <nl> | Silence compilation warning in allocator . h | tensorflow/tensorflow | 1b732d528ba120a6da32100f9991a353128114e8 | 2018-03-25T09:28:40Z |
mmm a / cocos / ui / UIScrollView . cpp <nl> ppp b / cocos / ui / UIScrollView . cpp <nl> const Vec2 SCROLLDIR_UP = Vec2 ( 0 . 0f , 1 . 0f ) ; <nl> const Vec2 SCROLLDIR_DOWN = Vec2 ( 0 . 0f , - 1 . 0f ) ; <nl> const Vec2 SCROLLDIR_LEFT = Vec2 ( - 1 . 0f , 0 . 0f ) ; <nl> const Vec2 SCROLLDIR_RIGHT = Vec2 ( 1 . 0f , 0 . 0f ) ; <nl> - <nl> + <nl> IMPLEMENT_CLASS_GUI_INFO ( ScrollView ) <nl> <nl> ScrollView : : ScrollView ( ) : <nl> ScrollView * ScrollView : : create ( ) <nl> CC_SAFE_DELETE ( widget ) ; <nl> return nullptr ; <nl> } <nl> - <nl> + <nl> void ScrollView : : onEnter ( ) <nl> { <nl> # if CC_ENABLE_SCRIPT_BINDING <nl> void ScrollView : : onEnter ( ) <nl> return ; <nl> } <nl> # endif <nl> - <nl> + <nl> Layout : : onEnter ( ) ; <nl> scheduleUpdate ( ) ; <nl> } <nl> const Size & ScrollView : : getInnerContainerSize ( ) const <nl> { <nl> return _innerContainer - > getContentSize ( ) ; <nl> } <nl> - <nl> + <nl> void ScrollView : : addChild ( Node * child ) <nl> { <nl> ScrollView : : addChild ( child , child - > getLocalZOrder ( ) , child - > getTag ( ) ) ; <nl> } <nl> - <nl> + <nl> void ScrollView : : addChild ( Node * child , int localZOrder ) <nl> { <nl> ScrollView : : addChild ( child , localZOrder , child - > getTag ( ) ) ; <nl> void ScrollView : : addChild ( Node * child , int zOrder , int tag ) <nl> { <nl> _innerContainer - > addChild ( child , zOrder , tag ) ; <nl> } <nl> - <nl> + <nl> void ScrollView : : addChild ( Node * child , int zOrder , const std : : string & name ) <nl> { <nl> _innerContainer - > addChild ( child , zOrder , name ) ; <nl> void ScrollView : : removeAllChildren ( ) <nl> { <nl> removeAllChildrenWithCleanup ( true ) ; <nl> } <nl> - <nl> + <nl> void ScrollView : : removeAllChildrenWithCleanup ( bool cleanup ) <nl> { <nl> _innerContainer - > removeAllChildrenWithCleanup ( cleanup ) ; <nl> ssize_t ScrollView : : getChildrenCount ( ) const <nl> { <nl> return _innerContainer - > getChildrenCount ( ) ; <nl> } <nl> - <nl> + <nl> Node * ScrollView : : getChildByTag ( int tag ) const <nl> { <nl> return _innerContainer - > getChildByTag ( tag ) ; <nl> } <nl> - <nl> + <nl> Node * ScrollView : : getChildByName ( const std : : string & name ) const <nl> { <nl> return _innerContainer - > getChildByName ( name ) ; <nl> } <nl> - <nl> + <nl> void ScrollView : : moveChildren ( float offsetX , float offsetY ) <nl> { <nl> _moveChildPoint = _innerContainer - > getPosition ( ) + Vec2 ( offsetX , offsetY ) ; <nl> void ScrollView : : startAutoScrollChildrenWithOriginalSpeed ( const Vec2 & dir , float <nl> _autoScrollAcceleration = acceleration ; <nl> } <nl> <nl> - void ScrollView : : startAutoScrollChildrenWithDestination ( const Vec2 & des , float time , bool attenuated ) <nl> + void ScrollView : : startAutoScrollChildrenWithDestination ( const Vec2 & des , float second , bool attenuated ) <nl> { <nl> _needCheckAutoScrollDestination = false ; <nl> _autoScrollDestination = des ; <nl> void ScrollView : : startAutoScrollChildrenWithDestination ( const Vec2 & des , float t <nl> float acceleration = - 1000 . 0f ; <nl> if ( attenuated ) <nl> { <nl> - acceleration = ( - ( 2 * dis . getLength ( ) ) ) / ( time * time ) ; <nl> - orSpeed = 2 * dis . getLength ( ) / time ; <nl> + acceleration = ( - ( 2 * dis . getLength ( ) ) ) / ( second * second ) ; <nl> + orSpeed = 2 * dis . getLength ( ) / second ; <nl> } <nl> else <nl> { <nl> _needCheckAutoScrollDestination = true ; <nl> - orSpeed = dis . getLength ( ) / time ; <nl> + orSpeed = dis . getLength ( ) / second ; <nl> } <nl> startAutoScrollChildrenWithOriginalSpeed ( dir , orSpeed , attenuated , acceleration ) ; <nl> } <nl> bool ScrollView : : checkCustomScrollDestination ( float * touchOffsetX , float * touchO <nl> } <nl> return scrollenabled ; <nl> } <nl> - <nl> + <nl> bool ScrollView : : scrollChildrenVertical ( float touchOffsetX , float touchOffsetY ) <nl> { <nl> float realOffset = touchOffsetY ; <nl> bool ScrollView : : scrollChildrenVertical ( float touchOffsetX , float touchOffsetY ) <nl> moveChildren ( 0 . 0f , realOffset ) ; <nl> return scrollEnabled ; <nl> } <nl> - <nl> + <nl> bool ScrollView : : scrollChildrenHorizontal ( float touchOffsetX , float touchOffestY ) <nl> { <nl> bool scrollenabled = true ; <nl> bool ScrollView : : scrollChildrenHorizontal ( float touchOffsetX , float touchOffestY <nl> moveChildren ( realOffset , 0 . 0f ) ; <nl> return scrollenabled ; <nl> } <nl> - <nl> + <nl> bool ScrollView : : scrollChildrenBoth ( float touchOffsetX , float touchOffsetY ) <nl> { <nl> bool scrollenabled = true ; <nl> bool ScrollView : : scrollChildren ( float touchOffsetX , float touchOffsetY ) <nl> return scrollenabled ; <nl> } <nl> <nl> - void ScrollView : : scrollToBottom ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToBottom ( float second , bool attenuated ) <nl> { <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( _innerContainer - > getPosition ( ) . x , 0 . 0f ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( _innerContainer - > getPosition ( ) . x , 0 . 0f ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToTop ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToTop ( float second , bool attenuated ) <nl> { <nl> startAutoScrollChildrenWithDestination ( Vec2 ( _innerContainer - > getPosition ( ) . x , <nl> - _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , time , attenuated ) ; <nl> + _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToLeft ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToLeft ( float second , bool attenuated ) <nl> { <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( 0 . 0f , _innerContainer - > getPosition ( ) . y ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( 0 . 0f , _innerContainer - > getPosition ( ) . y ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToRight ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToRight ( float second , bool attenuated ) <nl> { <nl> startAutoScrollChildrenWithDestination ( Vec2 ( _contentSize . width - _innerContainer - > getContentSize ( ) . width , <nl> - _innerContainer - > getPosition ( ) . y ) , time , attenuated ) ; <nl> + _innerContainer - > getPosition ( ) . y ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToTopLeft ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToTopLeft ( float second , bool attenuated ) <nl> { <nl> if ( _direction ! = Direction : : BOTH ) <nl> { <nl> CCLOG ( " Scroll diretion is not both ! " ) ; <nl> return ; <nl> } <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( 0 . 0f , _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( 0 . 0f , _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToTopRight ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToTopRight ( float second , bool attenuated ) <nl> { <nl> if ( _direction ! = Direction : : BOTH ) <nl> { <nl> void ScrollView : : scrollToTopRight ( float time , bool attenuated ) <nl> return ; <nl> } <nl> startAutoScrollChildrenWithDestination ( Vec2 ( _contentSize . width - _innerContainer - > getContentSize ( ) . width , <nl> - _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , time , attenuated ) ; <nl> + _contentSize . height - _innerContainer - > getContentSize ( ) . height ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToBottomLeft ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToBottomLeft ( float second , bool attenuated ) <nl> { <nl> if ( _direction ! = Direction : : BOTH ) <nl> { <nl> CCLOG ( " Scroll diretion is not both ! " ) ; <nl> return ; <nl> } <nl> - startAutoScrollChildrenWithDestination ( Vec2 : : ZERO , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 : : ZERO , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToBottomRight ( float time , bool attenuated ) <nl> + void ScrollView : : scrollToBottomRight ( float second , bool attenuated ) <nl> { <nl> if ( _direction ! = Direction : : BOTH ) <nl> { <nl> CCLOG ( " Scroll diretion is not both ! " ) ; <nl> return ; <nl> } <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( _contentSize . width - _innerContainer - > getContentSize ( ) . width , 0 . 0f ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( _contentSize . width - _innerContainer - > getContentSize ( ) . width , 0 . 0f ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToPercentVertical ( float percent , float time , bool attenuated ) <nl> + void ScrollView : : scrollToPercentVertical ( float percent , float second , bool attenuated ) <nl> { <nl> float minY = _contentSize . height - _innerContainer - > getContentSize ( ) . height ; <nl> float h = - minY ; <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( _innerContainer - > getPosition ( ) . x , minY + percent * h / 100 . 0f ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( _innerContainer - > getPosition ( ) . x , minY + percent * h / 100 . 0f ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToPercentHorizontal ( float percent , float time , bool attenuated ) <nl> + void ScrollView : : scrollToPercentHorizontal ( float percent , float second , bool attenuated ) <nl> { <nl> float w = _innerContainer - > getContentSize ( ) . width - _contentSize . width ; <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( - ( percent * w / 100 . 0f ) , _innerContainer - > getPosition ( ) . y ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( - ( percent * w / 100 . 0f ) , _innerContainer - > getPosition ( ) . y ) , second , attenuated ) ; <nl> } <nl> <nl> - void ScrollView : : scrollToPercentBothDirection ( const Vec2 & percent , float time , bool attenuated ) <nl> + void ScrollView : : scrollToPercentBothDirection ( const Vec2 & percent , float second , bool attenuated ) <nl> { <nl> if ( _direction ! = Direction : : BOTH ) <nl> { <nl> void ScrollView : : scrollToPercentBothDirection ( const Vec2 & percent , float time , b <nl> float minY = _contentSize . height - _innerContainer - > getContentSize ( ) . height ; <nl> float h = - minY ; <nl> float w = _innerContainer - > getContentSize ( ) . width - _contentSize . width ; <nl> - startAutoScrollChildrenWithDestination ( Vec2 ( - ( percent . x * w / 100 . 0f ) , minY + percent . y * h / 100 . 0f ) , time , attenuated ) ; <nl> + startAutoScrollChildrenWithDestination ( Vec2 ( - ( percent . x * w / 100 . 0f ) , minY + percent . y * h / 100 . 0f ) , second , attenuated ) ; <nl> } <nl> <nl> void ScrollView : : jumpToBottom ( ) <nl> void ScrollView : : handleReleaseLogic ( Touch * touch ) <nl> { <nl> endRecordSlidAction ( ) ; <nl> _bePressed = false ; <nl> - } <nl> + } <nl> <nl> bool ScrollView : : onTouchBegan ( Touch * touch , Event * unusedEvent ) <nl> { <nl> void ScrollView : : interceptTouchEvent ( Widget : : TouchEventType event , Widget * sende <nl> } <nl> } <nl> break ; <nl> - <nl> + <nl> case TouchEventType : : CANCELED : <nl> case TouchEventType : : ENDED : <nl> { <nl> void ScrollView : : addEventListenerScrollView ( Ref * target , SEL_ScrollViewEvent sel <nl> _scrollViewEventListener = target ; <nl> _scrollViewEventSelector = selector ; <nl> } <nl> - <nl> + <nl> void ScrollView : : addEventListener ( const ccScrollViewCallback & callback ) <nl> { <nl> _eventCallback = callback ; <nl> void ScrollView : : copySpecialProperties ( Widget * widget ) <nl> _ccEventCallback = scrollView - > _ccEventCallback ; <nl> } <nl> } <nl> - <nl> + <nl> Widget * ScrollView : : findNextFocusedWidget ( cocos2d : : ui : : Widget : : FocusDirection direction , cocos2d : : ui : : Widget * current ) <nl> { <nl> if ( this - > getLayoutType ( ) = = Layout : : Type : : VERTICAL <nl> mmm a / cocos / ui / UIScrollView . h <nl> ppp b / cocos / ui / UIScrollView . h <nl> typedef void ( Ref : : * SEL_ScrollViewEvent ) ( Ref * , ScrollviewEventType ) ; <nl> * / <nl> class CC_GUI_DLL ScrollView : public Layout <nl> { <nl> - <nl> + <nl> DECLARE_CLASS_GUI_INFO <nl> - <nl> + <nl> public : <nl> / * * <nl> * ScrollView scroll direction type . <nl> class CC_GUI_DLL ScrollView : public Layout <nl> HORIZONTAL , <nl> BOTH <nl> } ; <nl> - <nl> + <nl> / * * <nl> * Scrollview scroll event type . <nl> * / <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * A callback which would be called when a ScrollView is scrolling . <nl> * / <nl> typedef std : : function < void ( Ref * , EventType ) > ccScrollViewCallback ; <nl> - <nl> + <nl> / * * <nl> * Default constructor <nl> * @ js ctor <nl> * @ lua new <nl> * / <nl> ScrollView ( ) ; <nl> - <nl> + <nl> / * * <nl> * Default destructor <nl> * @ js NA <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ return A ScrollView instance . <nl> * / <nl> static ScrollView * create ( ) ; <nl> - <nl> + <nl> / * * <nl> * Changes scroll direction of scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ param dir Scroll direction enum . <nl> * / <nl> virtual void setDirection ( Direction dir ) ; <nl> - <nl> + <nl> / * * <nl> * Query scroll direction of scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ return Scrollview scroll direction . <nl> * / <nl> Direction getDirection ( ) const ; <nl> - <nl> + <nl> / * * <nl> * Get inner container of scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ return Inner container pointer . <nl> * / <nl> Layout * getInnerContainer ( ) const ; <nl> - <nl> + <nl> / * * <nl> * Scroll inner container to bottom boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToBottom ( float time , bool attenuated ) ; <nl> - <nl> + void scrollToBottom ( float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to top boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToTop ( float time , bool attenuated ) ; <nl> - <nl> + void scrollToTop ( float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to left boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToLeft ( float time , bool attenuated ) ; <nl> - <nl> + void scrollToLeft ( float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to right boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> void scrollToRight ( float time , bool attenuated ) ; <nl> - <nl> + <nl> / * * <nl> * Scroll inner container to top and left boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToTopLeft ( float time , bool attenuated ) ; <nl> - <nl> + void scrollToTopLeft ( float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to top and right boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> void scrollToTopRight ( float time , bool attenuated ) ; <nl> - <nl> + <nl> / * * <nl> * Scroll inner container to bottom and left boundary of scrollview . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToBottomLeft ( float time , bool attenuated ) ; <nl> - <nl> + void scrollToBottomLeft ( float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to bottom and right boundary of scrollview . <nl> - * @ param time Time in seconds <nl> + * @ param second Time in seconds <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> void scrollToBottomRight ( float time , bool attenuated ) ; <nl> - <nl> + <nl> / * * <nl> * Scroll inner container to vertical percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToPercentVertical ( float percent , float time , bool attenuated ) ; <nl> - <nl> + void scrollToPercentVertical ( float percent , float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to horizontal percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToPercentHorizontal ( float percent , float time , bool attenuated ) ; <nl> - <nl> + void scrollToPercentHorizontal ( float percent , float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Scroll inner container to both direction percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> - * @ param time Time in seconds . <nl> + * @ param second Time in seconds . <nl> * @ param attenuated Whether scroll speed attenuate or not . <nl> * / <nl> - void scrollToPercentBothDirection ( const Vec2 & percent , float time , bool attenuated ) ; <nl> - <nl> + void scrollToPercentBothDirection ( const Vec2 & percent , float second , bool attenuated ) ; <nl> + <nl> / * * <nl> * Move inner container to bottom boundary of scrollview . <nl> * / <nl> void jumpToBottom ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to top boundary of scrollview . <nl> * / <nl> void jumpToTop ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to left boundary of scrollview . <nl> * / <nl> void jumpToLeft ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to right boundary of scrollview . <nl> * / <nl> void jumpToRight ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to top and left boundary of scrollview . <nl> * / <nl> void jumpToTopLeft ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to top and right boundary of scrollview . <nl> * / <nl> void jumpToTopRight ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to bottom and left boundary of scrollview . <nl> * / <nl> void jumpToBottomLeft ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to bottom and right boundary of scrollview . <nl> * / <nl> void jumpToBottomRight ( ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to vertical percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> * / <nl> void jumpToPercentVertical ( float percent ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to horizontal percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> * / <nl> void jumpToPercentHorizontal ( float percent ) ; <nl> - <nl> + <nl> / * * <nl> * Move inner container to both direction percent position of scrollview . <nl> * @ param percent A value between 0 and 100 . <nl> * / <nl> void jumpToPercentBothDirection ( const Vec2 & percent ) ; <nl> - <nl> + <nl> / * * <nl> * Change inner container size of scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ param size Inner container size . <nl> * / <nl> void setInnerContainerSize ( const Size & size ) ; <nl> - <nl> + <nl> / * * <nl> * Get inner container size of scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ return The inner container size . <nl> * / <nl> const Size & getInnerContainerSize ( ) const ; <nl> - <nl> + <nl> / * * <nl> * Add callback function which will be called when scrollview event triggered . <nl> * @ deprecated Use @ see ` addEventListener ` instead . <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ param callback A callback function with type of ` ccScrollViewCallback ` . <nl> * / <nl> virtual void addEventListener ( const ccScrollViewCallback & callback ) ; <nl> - <nl> + <nl> / / override functions <nl> virtual void addChild ( Node * child ) override ; <nl> virtual void addChild ( Node * child , int localZOrder ) override ; <nl> class CC_GUI_DLL ScrollView : public Layout <nl> virtual void onTouchEnded ( Touch * touch , Event * unusedEvent ) override ; <nl> virtual void onTouchCancelled ( Touch * touch , Event * unusedEvent ) override ; <nl> virtual void update ( float dt ) override ; <nl> - <nl> - <nl> + <nl> + <nl> / * * <nl> * @ brief Toggle bounce enabled when scroll to the edge . <nl> * <nl> * @ param enabled True if enable bounce , false otherwise . <nl> * / <nl> void setBounceEnabled ( bool enabled ) ; <nl> - <nl> + <nl> / * * <nl> * @ brief Query bounce state . <nl> * <nl> * @ return True if bounce is enabled , false otherwise . <nl> * / <nl> bool isBounceEnabled ( ) const ; <nl> - <nl> + <nl> / * * <nl> * @ brief Toggle whether enable scroll inertia while scrolling . <nl> * <nl> * @ param enabled True if enable inertia , false otherwise . <nl> * / <nl> void setInertiaScrollEnabled ( bool enabled ) ; <nl> - <nl> + <nl> / * * <nl> * @ brief Query inertia scroll state . <nl> * <nl> * @ return True if inertia is enabled , false otherwise . <nl> * / <nl> bool isInertiaScrollEnabled ( ) const ; <nl> - <nl> + <nl> / * * <nl> * Set layout type for scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ param type Layout type enum . <nl> * / <nl> virtual void setLayoutType ( Type type ) override ; <nl> - <nl> + <nl> / * * <nl> * Get the layout type for scrollview . <nl> * <nl> class CC_GUI_DLL ScrollView : public Layout <nl> * @ return LayoutType <nl> * / <nl> virtual Type getLayoutType ( ) const override ; <nl> - <nl> + <nl> / * * <nl> * Return the " class name " of widget . <nl> * / <nl> virtual std : : string getDescription ( ) const override ; <nl> - <nl> + <nl> / * * <nl> * @ lua NA <nl> * / <nl> virtual void onEnter ( ) override ; <nl> - <nl> + <nl> / * * <nl> * When a widget is in a layout , you could call this method to get the next focused widget within a specified direction . <nl> * If the widget is not in a layout , it will return itself <nl> class CC_GUI_DLL ScrollView : public Layout <nl> <nl> CC_CONSTRUCTOR_ACCESS : <nl> virtual bool init ( ) override ; <nl> - <nl> + <nl> protected : <nl> virtual void initRenderer ( ) override ; <nl> - <nl> + <nl> virtual void onSizeChanged ( ) override ; <nl> virtual void doLayout ( ) override ; <nl> <nl> virtual Widget * createCloneInstance ( ) override ; <nl> virtual void copySpecialProperties ( Widget * model ) override ; <nl> virtual void copyClonedWidgetChildren ( Widget * model ) override ; <nl> - <nl> - <nl> + <nl> + <nl> void moveChildren ( float offsetX , float offsetY ) ; <nl> void autoScrollChildren ( float dt ) ; <nl> void bounceChildren ( float dt ) ; <nl> void checkBounceBoundary ( ) ; <nl> bool checkNeedBounce ( ) ; <nl> void startAutoScrollChildrenWithOriginalSpeed ( const Vec2 & dir , float v , bool attenuated , float acceleration ) ; <nl> - void startAutoScrollChildrenWithDestination ( const Vec2 & des , float time , bool attenuated ) ; <nl> + void startAutoScrollChildrenWithDestination ( const Vec2 & des , float second , bool attenuated ) ; <nl> void jumpToDestination ( const Vec2 & des ) ; <nl> void stopAutoScrollChildren ( ) ; <nl> void startBounceChildren ( float v ) ; <nl> void stopBounceChildren ( ) ; <nl> bool checkCustomScrollDestination ( float * touchOffsetX , float * touchOffsetY ) ; <nl> - <nl> + <nl> virtual bool scrollChildren ( float touchOffsetX , float touchOffsetY ) ; <nl> <nl> bool scrollChildrenVertical ( float touchOffsetX , float touchOffsetY ) ; <nl> bool scrollChildrenHorizontal ( float touchOffsetX , float touchOffestY ) ; <nl> bool scrollChildrenBoth ( float touchOffsetX , float touchOffsetY ) ; <nl> <nl> - <nl> + <nl> bool bounceScrollChildren ( float touchOffsetX , float touchOffsetY ) ; <nl> void startRecordSlidAction ( ) ; <nl> virtual void endRecordSlidAction ( ) ; <nl> - <nl> + <nl> / / ScrollViewProtocol <nl> virtual void handlePressLogic ( Touch * touch ) ; <nl> virtual void handleMoveLogic ( Touch * touch ) ; <nl> virtual void handleReleaseLogic ( Touch * touch ) ; <nl> - <nl> + <nl> virtual void interceptTouchEvent ( Widget : : TouchEventType event , Widget * sender , Touch * touch ) override ; <nl> - <nl> + <nl> void recordSlidTime ( float dt ) ; <nl> - <nl> + <nl> void scrollToTopEvent ( ) ; <nl> void scrollToBottomEvent ( ) ; <nl> void scrollToLeftEvent ( ) ; <nl> void scrollToRightEvent ( ) ; <nl> void scrollingEvent ( ) ; <nl> - <nl> + <nl> void bounceTopEvent ( ) ; <nl> void bounceBottomEvent ( ) ; <nl> void bounceLeftEvent ( ) ; <nl> void bounceRightEvent ( ) ; <nl> - <nl> + <nl> protected : <nl> Layout * _innerContainer ; <nl> - <nl> + <nl> Direction _direction ; <nl> Vec2 _autoScrollDir ; <nl> - <nl> + <nl> float _topBoundary ; <nl> float _bottomBoundary ; <nl> float _leftBoundary ; <nl> float _rightBoundary ; <nl> - <nl> + <nl> float _bounceTopBoundary ; <nl> float _bounceBottomBoundary ; <nl> float _bounceLeftBoundary ; <nl> float _bounceRightBoundary ; <nl> <nl> - <nl> + <nl> bool _autoScroll ; <nl> float _autoScrollAddUpTime ; <nl> - <nl> + <nl> float _autoScrollOriginalSpeed ; <nl> float _autoScrollAcceleration ; <nl> bool _isAutoScrollSpeedAttenuated ; <nl> bool _needCheckAutoScrollDestination ; <nl> Vec2 _autoScrollDestination ; <nl> - <nl> + <nl> bool _bePressed ; <nl> float _slidTime ; <nl> Vec2 _moveChildPoint ; <nl> float _childFocusCancelOffset ; <nl> - <nl> + <nl> bool _leftBounceNeeded ; <nl> bool _topBounceNeeded ; <nl> bool _rightBounceNeeded ; <nl> bool _bottomBounceNeeded ; <nl> - <nl> + <nl> bool _bounceEnabled ; <nl> bool _bouncing ; <nl> Vec2 _bounceDir ; <nl> float _bounceOriginalSpeed ; <nl> bool _inertiaScrollEnabled ; <nl> - <nl> + <nl> Ref * _scrollViewEventListener ; <nl> # if defined ( __GNUC__ ) & & ( ( __GNUC__ > = 4 ) | | ( ( __GNUC__ = = 3 ) & & ( __GNUC_MINOR__ > = 1 ) ) ) <nl> # pragma GCC diagnostic ignored " - Wdeprecated - declarations " <nl> | make scrollview time arguments more meaningful | cocos2d/cocos2d-x | 96f8842ee1f96569fec7ae20a9ea218f98e3c778 | 2015-04-15T08:37:11Z |
mmm a / modules / planning / reference_line / reference_line . cc <nl> ppp b / modules / planning / reference_line / reference_line . cc <nl> <nl> # include " modules / common / log . h " <nl> # include " modules / common / math / angle . h " <nl> # include " modules / common / math / linear_interpolation . h " <nl> + # include " modules / common / math / vec2d . h " <nl> # include " modules / common / util / string_util . h " <nl> # include " modules / planning / common / planning_gflags . h " <nl> # include " modules / planning / math / double . h " <nl> ReferencePoint ReferenceLine : : get_reference_point ( const double s ) const { <nl> auto s0 = accumulated_s [ index - 1 ] ; <nl> auto s1 = accumulated_s [ index ] ; <nl> <nl> - return ReferenceLine : : interpolate ( p0 , s0 , p1 , s1 , s ) ; <nl> + return interpolate ( p0 , s0 , p1 , s1 , s ) ; <nl> } <nl> } <nl> <nl> double ReferenceLine : : find_min_distance_point ( const ReferencePoint & p0 , <nl> const double s1 , const double x , <nl> const double y ) { <nl> auto func_dist_square = [ & p0 , & p1 , & s0 , & s1 , & x , & y ] ( const double s ) { <nl> - auto p = ReferenceLine : : interpolate ( p0 , s0 , p1 , s1 , s ) ; <nl> + auto p = interpolate ( p0 , s0 , p1 , s1 , s ) ; <nl> double dx = p . x ( ) - x ; <nl> double dy = p . y ( ) - y ; <nl> return dx * dx + dy * dy ; <nl> ReferencePoint ReferenceLine : : get_reference_point ( const double x , <nl> reference_points_ [ index_start ] , s0 , reference_points_ [ index_end ] , s1 , x , <nl> y ) ; <nl> <nl> - return ReferenceLine : : interpolate ( reference_points_ [ index_start ] , s0 , <nl> - reference_points_ [ index_end ] , s1 , s ) ; <nl> + return interpolate ( reference_points_ [ index_start ] , s0 , <nl> + reference_points_ [ index_end ] , s1 , s ) ; <nl> } <nl> <nl> bool ReferenceLine : : get_point_in_cartesian_frame ( <nl> ReferencePoint ReferenceLine : : interpolate ( const ReferencePoint & p0 , <nl> const double s0 , <nl> const ReferencePoint & p1 , <nl> const double s1 , const double s ) { <nl> + if ( std : : fabs ( s0 - s1 ) < common : : math : : kMathEpsilon ) { <nl> + return p0 ; <nl> + } <nl> + DCHECK_LE ( s0 , s ) < < " s : " < < s < < " is less than s0 : " < < s0 ; <nl> + DCHECK_LE ( s , s1 ) < < " s : " < < s < < " is larger than s1 : " < < s1 ; <nl> + CHECK ( ! p0 . lane_waypoints ( ) . empty ( ) ) ; <nl> + CHECK ( ! p1 . lane_waypoints ( ) . empty ( ) ) ; <nl> ReferencePoint p = p1 ; <nl> - p . set_x ( common : : math : : lerp ( p0 . x ( ) , s0 , p1 . x ( ) , s1 , s ) ) ; <nl> - p . set_y ( common : : math : : lerp ( p0 . y ( ) , s0 , p1 . y ( ) , s1 , s ) ) ; <nl> - p . set_heading ( common : : math : : slerp ( p0 . heading ( ) , s0 , p1 . heading ( ) , s1 , s ) ) ; <nl> - p . set_kappa ( common : : math : : lerp ( p0 . kappa ( ) , s0 , p1 . kappa ( ) , s1 , s ) ) ; <nl> - p . set_dkappa ( common : : math : : lerp ( p0 . dkappa ( ) , s0 , p1 . dkappa ( ) , s1 , s ) ) ; <nl> - <nl> - / / lane boundary info , lane info will be the same as the p1 . <nl> - return p ; <nl> + const double x = common : : math : : lerp ( p0 . x ( ) , s0 , p1 . x ( ) , s1 , s ) ; <nl> + const double y = common : : math : : lerp ( p0 . y ( ) , s0 , p1 . y ( ) , s1 , s ) ; <nl> + const double heading = <nl> + common : : math : : slerp ( p0 . heading ( ) , s0 , p1 . heading ( ) , s1 , s ) ; <nl> + const double kappa = common : : math : : lerp ( p0 . kappa ( ) , s0 , p1 . kappa ( ) , s1 , s ) ; <nl> + const double dkappa = common : : math : : lerp ( p0 . dkappa ( ) , s0 , p1 . dkappa ( ) , s1 , s ) ; <nl> + const auto & p0_waypoint = p0 . lane_waypoints ( ) [ 0 ] ; <nl> + std : : vector < hdmap : : LaneWaypoint > waypoints ; <nl> + double upper_bound = 0 . 0 ; <nl> + double lower_bound = 0 . 0 ; <nl> + if ( ( s - s0 ) + p0_waypoint . s < = p0_waypoint . lane - > total_length ( ) ) { <nl> + const double lane_s = p0_waypoint . s + s - s0 ; <nl> + waypoints . emplace_back ( p0_waypoint . lane , lane_s ) ; <nl> + p0_waypoint . lane - > get_width ( lane_s , & upper_bound , & lower_bound ) ; <nl> + } <nl> + const auto & p1_waypoint = p1 . lane_waypoints ( ) [ 0 ] ; <nl> + if ( p1_waypoint . s - ( s1 - s ) > = 0 ) { <nl> + const double lane_s = p1_waypoint . s - ( s1 - s ) ; <nl> + waypoints . emplace_back ( p1_waypoint . lane , lane_s ) ; <nl> + p1_waypoint . lane - > get_width ( lane_s , & upper_bound , & lower_bound ) ; <nl> + } <nl> + <nl> + return ReferencePoint ( hdmap : : MapPathPoint ( { x , y } , heading , waypoints ) , kappa , <nl> + dkappa , lower_bound , upper_bound ) ; <nl> } <nl> <nl> const std : : vector < ReferencePoint > & ReferenceLine : : reference_points ( ) const { <nl> mmm a / modules / planning / reference_line / reference_line . h <nl> ppp b / modules / planning / reference_line / reference_line . h <nl> class ReferenceLine { <nl> double GetSpeedLimitFromPoint ( const common : : math : : Vec2d & point ) const ; <nl> <nl> private : <nl> + / * * <nl> + * @ brief Linearly interpolate p0 and p1 by s0 and s1 . <nl> + * The input has to satisfy condition : s0 < = s < = s1 <nl> + * p0 and p1 must have lane_waypoint . <nl> + * Note : it requires p0 and p1 are on the same lane , adjacent lanes , or <nl> + * parallel neighboring lanes . Otherwise the interpolated result may not <nl> + * valid . <nl> + * @ param p0 the first anchor point for interpolation . <nl> + * @ param s0 the longitutial distance ( s ) of p0 on current reference line . s0 <nl> + * < = s & & s0 < = s1 <nl> + * @ param p1 the second anchor point for interpolation <nl> + * @ param s1 the longitutial distance ( s ) of p1 on current reference line . s1 <nl> + * @ param s identifies the the middle point that is going to be interpolated . <nl> + * s > = s0 & & s < = s1 <nl> + * @ return The interpolated ReferencePoint . <nl> + * / <nl> static ReferencePoint interpolate ( const ReferencePoint & p0 , const double s0 , <nl> const ReferencePoint & p1 , const double s1 , <nl> const double s ) ; <nl> mmm a / modules / planning / reference_line / reference_line_smoother_test . cc <nl> ppp b / modules / planning / reference_line / reference_line_smoother_test . cc <nl> class ReferenceLineSmootherTest : public : : testing : : Test { <nl> std : : vector < ReferencePoint > ref_points ; <nl> const auto & points = lane_info_ptr - > points ( ) ; <nl> const auto & headings = lane_info_ptr - > headings ( ) ; <nl> + const auto & accumulate_s = lane_info_ptr - > accumulate_s ( ) ; <nl> for ( std : : size_t i = 0 ; i < points . size ( ) ; + + i ) { <nl> - ref_points . emplace_back ( points [ i ] , headings [ i ] , 0 . 0 , 0 . 0 , - 2 . 0 , 2 . 0 ) ; <nl> + std : : vector < hdmap : : LaneWaypoint > waypoint ; <nl> + waypoint . emplace_back ( lane_info_ptr , accumulate_s [ i ] ) ; <nl> + hdmap : : MapPathPoint map_path_point ( points [ i ] , headings [ i ] , waypoint ) ; <nl> + ref_points . emplace_back ( map_path_point , 0 . 0 , 0 . 0 , - 2 . 0 , 2 . 0 ) ; <nl> } <nl> reference_line_ . reset ( new ReferenceLine ( ref_points ) ) ; <nl> vehicle_position_ = points [ 0 ] ; <nl> mmm a / modules / planning / reference_line / reference_point . h <nl> ppp b / modules / planning / reference_line / reference_point . h <nl> class ReferencePoint : public hdmap : : MapPathPoint { <nl> public : <nl> ReferencePoint ( ) = default ; <nl> <nl> - ReferencePoint ( const MapPathPoint & map_path_point , <nl> - const double kappa , const double dkappa , <nl> - const double lower_bound , const double upper_bound ) ; <nl> + ReferencePoint ( const MapPathPoint & map_path_point , const double kappa , <nl> + const double dkappa , const double lower_bound , <nl> + const double upper_bound ) ; <nl> <nl> ReferencePoint ( const common : : math : : Vec2d & point , const double heading , <nl> const double kappa , const double dkappa , <nl> | refactor reference line interpolation function | ApolloAuto/apollo | 7adb1515ac1b53bf3e03bc403a79d2e4b67cd6fb | 2017-08-09T21:56:03Z |
mmm a / src / wasm / module - compiler . cc <nl> ppp b / src / wasm / module - compiler . cc <nl> bool ExecuteJSToWasmWrapperCompilationUnits ( <nl> return true ; <nl> } <nl> <nl> + bool NeedsDeterministicCompile ( ) { <nl> + return FLAG_trace_wasm_decoder | | FLAG_wasm_num_compilation_tasks < = 1 ; <nl> + } <nl> + <nl> / / Run by the main thread and background tasks to take part in compilation . <nl> / / Returns whether any units were executed . <nl> bool ExecuteCompilationUnits ( <nl> bool ExecuteCompilationUnits ( <nl> / / These fields are initialized in a { BackgroundCompileScope } before <nl> / / starting compilation . <nl> double deadline = 0 ; <nl> + const bool deterministic = NeedsDeterministicCompile ( ) ; <nl> base : : Optional < CompilationEnv > env ; <nl> std : : shared_ptr < WireBytesStorage > wire_bytes ; <nl> std : : shared_ptr < const WasmModule > module ; <nl> bool ExecuteCompilationUnits ( <nl> } <nl> <nl> / / Get next unit . <nl> - if ( deadline < platform - > MonotonicallyIncreasingTime ( ) ) { <nl> + if ( deterministic | | deadline < platform - > MonotonicallyIncreasingTime ( ) ) { <nl> unit = { } ; <nl> } else { <nl> unit = compile_scope . compilation_state ( ) - > GetNextCompilationUnit ( <nl> void InitializeCompilationUnits ( Isolate * isolate , NativeModule * native_module ) { <nl> builder . Commit ( ) ; <nl> } <nl> <nl> - bool NeedsDeterministicCompile ( ) { <nl> - return FLAG_trace_wasm_decoder | | FLAG_wasm_num_compilation_tasks < = 1 ; <nl> - } <nl> - <nl> bool MayCompriseLazyFunctions ( const WasmModule * module , <nl> const WasmFeatures & enabled_features , <nl> bool lazy_module ) { <nl> | [ wasm ] Make compilation more predictable | v8/v8 | 7dd85c1d2f0ba799ad29e46fcd6c021ba62ca58a | 2019-09-19T10:08:04Z |
mmm a / src / Access / LDAPAccessStorage . cpp <nl> ppp b / src / Access / LDAPAccessStorage . cpp <nl> void LDAPAccessStorage : : processRoleChange ( const UUID & id , const AccessEntityPtr <nl> auto update_func = [ & id ] ( const AccessEntityPtr & cached_entity ) - > AccessEntityPtr <nl> { <nl> auto user_ptr = typeid_cast < std : : shared_ptr < const User > > ( cached_entity ) ; <nl> - if ( user_ptr & & ! user_ptr - > granted_roles . roles . contains ( id ) ) <nl> + if ( user_ptr & & user_ptr - > granted_roles . roles . find ( id ) = = user_ptr - > granted_roles . roles . end ( ) ) <nl> { <nl> auto clone = user_ptr - > clone ( ) ; <nl> auto user_clone_ptr = typeid_cast < std : : shared_ptr < User > > ( clone ) ; <nl> void LDAPAccessStorage : : processRoleChange ( const UUID & id , const AccessEntityPtr <nl> auto update_func = [ & id ] ( const AccessEntityPtr & cached_entity ) - > AccessEntityPtr <nl> { <nl> auto user_ptr = typeid_cast < std : : shared_ptr < const User > > ( cached_entity ) ; <nl> - if ( user_ptr & & user_ptr - > granted_roles . roles . contains ( id ) ) <nl> + if ( user_ptr & & user_ptr - > granted_roles . roles . find ( id ) ! = user_ptr - > granted_roles . roles . end ( ) ) <nl> { <nl> auto clone = user_ptr - > clone ( ) ; <nl> auto user_clone_ptr = typeid_cast < std : : shared_ptr < User > > ( clone ) ; <nl> | GCC 9 compilation fix | ClickHouse/ClickHouse | c72765187b5d9f154fcffd4f3f254a75f176a5a0 | 2020-08-27T08:36:31Z |
mmm a / test / Compatibility / implicit_tupling_untupling_codegen . swift <nl> ppp b / test / Compatibility / implicit_tupling_untupling_codegen . swift <nl> <nl> / / RUN : % target - run - simple - swift | % FileCheck % s <nl> <nl> + / / REQUIRES : executable_test <nl> + <nl> / / Even though we test that type - checking and exhaustiveness checking work fine <nl> / / in the presence of implicit tupling / untupling in exhaustive_switch . swift , <nl> / / make sure that the " patched " patterns do not lead to incorrect codegen . <nl> | Merge pull request from drodriguez / mark - tupling - codegen - as - executable | apple/swift | e66ea71c51b4f438fc22a1e9a491658be88d52b2 | 2019-08-21T03:13:54Z |
mmm a / buildscripts / smoke . py <nl> ppp b / buildscripts / smoke . py <nl> def runTest ( test ) : <nl> # Blech . <nl> if os . path . basename ( path ) in [ " test " , " test . exe " , " perftest " , " perftest . exe " ] : <nl> argv = [ path ] <nl> - if " newUpdateFrameworkEnabled " in set_parameters : <nl> - argv + = [ " - - testNewUpdateFramework " ] <nl> # more blech <nl> elif os . path . basename ( path ) in [ ' mongos ' , ' mongos . exe ' ] : <nl> argv = [ path , " - - test " ] <nl> mmm a / src / mongo / SConscript <nl> ppp b / src / mongo / SConscript <nl> serverOnlyFiles = [ " db / curop . cpp " , <nl> " db / ops / delete . cpp " , <nl> " db / ops / query . cpp " , <nl> " db / ops / update . cpp " , <nl> - " db / ops / update_internal . cpp " , <nl> " db / parsed_query . cpp " , <nl> " db / query_runner . cpp " , <nl> " db / dbcommands . cpp " , <nl> mmm a / src / mongo / db / dbhelpers . cpp <nl> ppp b / src / mongo / db / dbhelpers . cpp <nl> namespace mongo { <nl> OpDebug debug ; <nl> Client : : Context context ( ns ) ; <nl> <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - <nl> - _updateObjectsNEW ( / * god = * / true , <nl> - ns , <nl> - obj , <nl> - / * pattern = * / BSONObj ( ) , <nl> - / * upsert = * / true , <nl> - / * multi = * / false , <nl> - logTheOp , <nl> - debug ) ; <nl> - <nl> - } <nl> - else { <nl> - <nl> - _updateObjects ( / * god = * / true , <nl> - ns , <nl> - obj , <nl> - / * pattern = * / BSONObj ( ) , <nl> - / * upsert = * / true , <nl> - / * multi = * / false , <nl> - logTheOp , <nl> - debug ) ; <nl> - <nl> - } <nl> + _updateObjects ( / * god = * / true , <nl> + ns , <nl> + obj , <nl> + / * pattern = * / BSONObj ( ) , <nl> + / * upsert = * / true , <nl> + / * multi = * / false , <nl> + logTheOp , <nl> + debug ) ; <nl> <nl> context . getClient ( ) - > curop ( ) - > done ( ) ; <nl> } <nl> mmm a / src / mongo / db / instance . cpp <nl> ppp b / src / mongo / db / instance . cpp <nl> namespace mongo { <nl> op . debug ( ) . query = query ; <nl> op . setQuery ( query ) ; <nl> <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - <nl> - / / New style . This only works with the new update framework , and moves mod parsing <nl> - / / out of the write lock . <nl> - / / <nl> - / / This code should look quite familiar , since it is basically the prelude code in <nl> - / / _updateObjectsNEW . We could factor it into a common function , but that would <nl> - / / require that we heap allocate the driver , which doesn ' t seem worth it right now , <nl> - / / especially considering that we will probably rewrite much of this code in the <nl> - / / near term . <nl> - <nl> - UpdateDriver : : Options options ; <nl> - options . multi = multi ; <nl> - options . upsert = upsert ; <nl> - <nl> - / / TODO : This is wasteful . We really shouldn ' t need to generate the oplog entry <nl> - / / just to throw it away if we are not generating an oplog . <nl> - options . logOp = true ; <nl> - <nl> - / / Select the right modifier options . We aren ' t in a replication context here , so <nl> - / / the only question is whether this update is against the ' config ' database , in <nl> - / / which case we want to disable checks , since config db docs can have field names <nl> - / / containing a dot ( " . " ) . <nl> - options . modOptions = ( NamespaceString ( ns ) . isConfigDB ( ) ) ? <nl> - ModifierInterface : : Options : : unchecked ( ) : <nl> - ModifierInterface : : Options : : normal ( ) ; <nl> - <nl> - UpdateDriver driver ( options ) ; <nl> - <nl> - Status status = driver . parse ( toupdate ) ; <nl> - if ( ! status . isOK ( ) ) { <nl> - uasserted ( 17009 , status . reason ( ) ) ; <nl> - } <nl> - <nl> - PageFaultRetryableSection s ; <nl> - while ( 1 ) { <nl> - try { <nl> - Lock : : DBWrite lk ( ns ) ; <nl> + / / This code should look quite familiar , since it is basically the prelude code in the <nl> + / / other overload of _updateObjectsNEW . We could factor it into a common function , but <nl> + / / that would require that we heap allocate the driver , which doesn ' t seem worth it <nl> + / / right now , especially considering that we will probably rewrite much of this code in <nl> + / / the near term . <nl> <nl> - / / void ReplSetImpl : : relinquish ( ) uses big write lock so this is thus <nl> - / / synchronized given our lock above . <nl> - uassert ( 17010 , " not master " , isMasterNs ( ns ) ) ; <nl> - <nl> - / / if this ever moves to outside of lock , need to adjust check <nl> - / / Client : : Context : : _finishInit <nl> - if ( ! broadcast & & handlePossibleShardedMessage ( m , 0 ) ) <nl> - return ; <nl> + UpdateDriver : : Options options ; <nl> + options . multi = multi ; <nl> + options . upsert = upsert ; <nl> <nl> - Client : : Context ctx ( ns ) ; <nl> + / / TODO : This is wasteful . We really shouldn ' t need to generate the oplog entry <nl> + / / just to throw it away if we are not generating an oplog . <nl> + options . logOp = true ; <nl> <nl> - UpdateResult res = updateObjects ( <nl> - & driver , <nl> - ns , toupdate , query , <nl> - upsert , multi , true , op . debug ( ) ) ; <nl> + / / Select the right modifier options . We aren ' t in a replication context here , so <nl> + / / the only question is whether this update is against the ' config ' database , in <nl> + / / which case we want to disable checks , since config db docs can have field names <nl> + / / containing a dot ( " . " ) . <nl> + options . modOptions = ( NamespaceString ( ns ) . isConfigDB ( ) ) ? <nl> + ModifierInterface : : Options : : unchecked ( ) : <nl> + ModifierInterface : : Options : : normal ( ) ; <nl> <nl> - / / for getlasterror <nl> - lastError . getSafe ( ) - > recordUpdate ( res . existing , res . num , res . upserted ) ; <nl> - break ; <nl> - } <nl> - catch ( PageFaultException & e ) { <nl> - e . touch ( ) ; <nl> - } <nl> - } <nl> + UpdateDriver driver ( options ) ; <nl> <nl> + status = driver . parse ( toupdate ) ; <nl> + if ( ! status . isOK ( ) ) { <nl> + uasserted ( 17009 , status . reason ( ) ) ; <nl> } <nl> - else { <nl> <nl> - / / This is the ' old style ' . We may or may not call the new update code , but we are <nl> - / / going to do so under the write lock in all cases . <nl> - <nl> - PageFaultRetryableSection s ; <nl> - while ( 1 ) { <nl> - try { <nl> - Lock : : DBWrite lk ( ns ) ; <nl> + PageFaultRetryableSection s ; <nl> + while ( 1 ) { <nl> + try { <nl> + Lock : : DBWrite lk ( ns ) ; <nl> <nl> - / / void ReplSetImpl : : relinquish ( ) uses big write lock so this is thus <nl> - / / synchronized given our lock above . <nl> - uassert ( 10054 , " not master " , isMasterNs ( ns ) ) ; <nl> + / / void ReplSetImpl : : relinquish ( ) uses big write lock so this is thus <nl> + / / synchronized given our lock above . <nl> + uassert ( 17010 , " not master " , isMasterNs ( ns ) ) ; <nl> <nl> - / / if this ever moves to outside of lock , need to adjust check <nl> - / / Client : : Context : : _finishInit <nl> - if ( ! broadcast & & handlePossibleShardedMessage ( m , 0 ) ) <nl> - return ; <nl> + / / if this ever moves to outside of lock , need to adjust check <nl> + / / Client : : Context : : _finishInit <nl> + if ( ! broadcast & & handlePossibleShardedMessage ( m , 0 ) ) <nl> + return ; <nl> <nl> - Client : : Context ctx ( ns ) ; <nl> + Client : : Context ctx ( ns ) ; <nl> <nl> - UpdateResult res = updateObjects ( <nl> - ns , toupdate , query , <nl> - upsert , multi , true , op . debug ( ) ) ; <nl> + UpdateResult res = updateObjects ( <nl> + & driver , <nl> + ns , toupdate , query , <nl> + upsert , multi , true , op . debug ( ) ) ; <nl> <nl> - / / for getlasterror <nl> - lastError . getSafe ( ) - > recordUpdate ( res . existing , res . num , res . upserted ) ; <nl> - break ; <nl> - } <nl> - catch ( PageFaultException & e ) { <nl> - e . touch ( ) ; <nl> - } <nl> + / / for getlasterror <nl> + lastError . getSafe ( ) - > recordUpdate ( res . existing , res . num , res . upserted ) ; <nl> + break ; <nl> + } <nl> + catch ( PageFaultException & e ) { <nl> + e . touch ( ) ; <nl> } <nl> } <nl> } <nl> namespace mongo { <nl> return ok ; <nl> } <nl> <nl> - void checkAndInsert ( const char * ns , / * modifies * / BSONObj & js ) { <nl> + void checkAndInsert ( const char * ns , / * modifies * / BSONObj & js ) { <nl> uassert ( 10059 , " object to insert too large " , js . objsize ( ) < = BSONObjMaxUserSize ) ; <nl> <nl> - / / Do not allow objects to be stored which violate okForStorageAsRoot <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - NamespaceString nsString ( ns ) ; <nl> - bool ok = nsString . isConfigDB ( ) | | nsString . isSystem ( ) | | js . okForStorageAsRoot ( ) ; <nl> - if ( ! ok ) { <nl> - LOG ( 1 ) < < " ns : " < < ns < < " , not okForStorageAsRoot : " < < js ; <nl> - } <nl> - uassert ( 17013 , <nl> - " Cannot insert object with _id field of array / regex or " <nl> - " with any field name prefixed with $ or containing a dot . " , <nl> - ok ) ; <nl> - } <nl> - else { <nl> - BSONObjIterator i ( js ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement e = i . next ( ) ; <nl> - <nl> - / / check no $ modifiers . note we only check top level . <nl> - / / ( scanning deep would be quite expensive ) <nl> - uassert ( 13511 , " document to insert can ' t have $ fields " , e . fieldName ( ) [ 0 ] ! = ' $ ' ) ; <nl> - <nl> - / / check no regexp for _id ( SERVER - 9502 ) <nl> - if ( str : : equals ( e . fieldName ( ) , " _id " ) ) { <nl> - uassert ( 16824 , " can ' t use a regex for _id " , e . type ( ) ! = RegEx ) ; <nl> - } <nl> - } <nl> + NamespaceString nsString ( ns ) ; <nl> + bool ok = nsString . isConfigDB ( ) | | nsString . isSystem ( ) | | js . okForStorageAsRoot ( ) ; <nl> + if ( ! ok ) { <nl> + LOG ( 1 ) < < " ns : " < < ns < < " , not okForStorageAsRoot : " < < js ; <nl> } <nl> + uassert ( 17013 , <nl> + " Cannot insert object with _id field of array / regex or " <nl> + " with any field name prefixed with $ or containing a dot . " , <nl> + ok ) ; <nl> <nl> theDataFileMgr . insertWithObjMod ( ns , <nl> / / May be modified in the call to add an _id field . <nl> mmm a / src / mongo / db / ops / update . cpp <nl> ppp b / src / mongo / db / ops / update . cpp <nl> <nl> # include " mongo / db / index_set . h " <nl> # include " mongo / db / namespace_details . h " <nl> # include " mongo / db / ops / update_driver . h " <nl> - # include " mongo / db / ops / update_internal . h " <nl> # include " mongo / db / pagefault . h " <nl> # include " mongo / db / pdfile . h " <nl> # include " mongo / db / query_optimizer . h " <nl> <nl> <nl> namespace mongo { <nl> <nl> - MONGO_EXPORT_SERVER_PARAMETER ( newUpdateFrameworkEnabled , bool , false ) ; <nl> - <nl> - bool isNewUpdateFrameworkEnabled ( ) { <nl> - return newUpdateFrameworkEnabled ; <nl> - } <nl> - <nl> - bool toggleNewUpdateFrameworkEnabled ( ) { <nl> - return newUpdateFrameworkEnabled = ! newUpdateFrameworkEnabled ; <nl> - } <nl> - <nl> void checkNoMods ( BSONObj o ) { <nl> BSONObjIterator i ( o ) ; <nl> while ( i . moreWithEOO ( ) ) { <nl> namespace mongo { <nl> } <nl> } <nl> <nl> - / * note : this is only ( as - is ) called for <nl> - <nl> - - not multi <nl> - - not mods is indexed <nl> - - not upsert <nl> - * / <nl> - static UpdateResult _updateById ( bool isOperatorUpdate , <nl> - int idIdxNo , <nl> - ModSet * mods , <nl> - NamespaceDetails * d , <nl> - NamespaceDetailsTransient * nsdt , <nl> - bool su , <nl> - const char * ns , <nl> - const BSONObj & updateobj , <nl> - BSONObj patternOrig , <nl> - bool logop , <nl> - OpDebug & debug , <nl> - bool fromMigrate = false ) { <nl> - <nl> - DiskLoc loc ; <nl> - { <nl> - IndexDetails & i = d - > idx ( idIdxNo ) ; <nl> - BSONObj key = i . getKeyFromQuery ( patternOrig ) ; <nl> - loc = QueryRunner : : fastFindSingle ( i , key ) ; <nl> - if ( loc . isNull ( ) ) { <nl> - / / no upsert support in _updateById yet , so we are done . <nl> - return UpdateResult ( 0 , 0 , 0 , BSONObj ( ) ) ; <nl> - } <nl> - } <nl> - Record * r = loc . rec ( ) ; <nl> - <nl> - if ( cc ( ) . allowedToThrowPageFaultException ( ) & & ! r - > likelyInPhysicalMemory ( ) ) { <nl> - throw PageFaultException ( r ) ; <nl> - } <nl> - <nl> - / * look for $ inc etc . note as listed here , all fields to inc must be this type , you can ' t set some <nl> - regular ones at the moment . * / <nl> - BSONObj newObj ; <nl> - if ( isOperatorUpdate ) { <nl> - const BSONObj & onDisk = loc . obj ( ) ; <nl> - auto_ptr < ModSetState > mss = mods - > prepare ( onDisk , false / * not an insertion * / ) ; <nl> - <nl> - if ( mss - > canApplyInPlace ( ) ) { <nl> - mss - > applyModsInPlace ( true ) ; <nl> - debug . fastmod = true ; <nl> - DEBUGUPDATE ( " \ t \ t \ t updateById doing in place update " ) ; <nl> - <nl> - newObj = onDisk ; <nl> - } <nl> - else { <nl> - newObj = mss - > createNewFromMods ( ) ; <nl> - checkTooLarge ( newObj ) ; <nl> - verify ( nsdt ) ; <nl> - theDataFileMgr . updateRecord ( ns , d , nsdt , r , loc , newObj . objdata ( ) , newObj . objsize ( ) , debug ) ; <nl> - } <nl> - <nl> - if ( logop ) { <nl> - DEV verify ( mods - > size ( ) ) ; <nl> - BSONObj pattern = patternOrig ; <nl> - BSONObj logObj = mss - > getOpLogRewrite ( ) ; <nl> - DEBUGUPDATE ( " \ t rewrite update : " < < logObj ) ; <nl> - <nl> - / / It is possible that the entire mod set was a no - op over this document . We <nl> - / / would have an empty log record in that case . If we call logOp , with an empty <nl> - / / record , that would be replicated as " clear this record " , which is not what <nl> - / / we want . Therefore , to get a no - op in the replica , we simply don ' t log . <nl> - if ( logObj . nFields ( ) ) { <nl> - logOp ( " u " , ns , logObj , & pattern , 0 , fromMigrate , & newObj ) ; <nl> - } <nl> - } <nl> - return UpdateResult ( 1 , 1 , 1 , BSONObj ( ) ) ; <nl> - <nl> - } / / end $ operator update <nl> - <nl> - / / regular update <nl> - BSONElementManipulator : : lookForTimestamps ( updateobj ) ; <nl> - checkNoMods ( updateobj ) ; <nl> - verify ( nsdt ) ; <nl> - theDataFileMgr . updateRecord ( ns , d , nsdt , r , loc , updateobj . objdata ( ) , updateobj . objsize ( ) , debug ) ; <nl> - if ( logop ) { <nl> - logOp ( " u " , ns , updateobj , & patternOrig , 0 , fromMigrate , & updateobj ) ; <nl> + void validateUpdate ( const char * ns , const BSONObj & updateobj , const BSONObj & patternOrig ) { <nl> + uassert ( 10155 , " cannot update reserved $ collection " , strchr ( ns , ' $ ' ) = = 0 ) ; <nl> + if ( strstr ( ns , " . system . " ) ) { <nl> + / * dm : it ' s very important that system . indexes is never updated as IndexDetails <nl> + has pointers into it * / <nl> + uassert ( 10156 , <nl> + str : : stream ( ) < < " cannot update system collection : " <nl> + < < ns < < " q : " < < patternOrig < < " u : " < < updateobj , <nl> + legalClientSystemNS ( ns , true ) ) ; <nl> } <nl> - return UpdateResult ( 1 , 0 , 1 , BSONObj ( ) ) ; <nl> } <nl> <nl> UpdateResult _updateObjects ( bool su , <nl> namespace mongo { <nl> const QueryPlanSelectionPolicy & planPolicy , <nl> bool forReplication ) { <nl> <nl> - DEBUGUPDATE ( " update : " < < ns <nl> - < < " update : " < < updateobj <nl> - < < " query : " < < patternOrig <nl> - < < " upsert : " < < upsert < < " multi : " < < multi ) ; <nl> - <nl> - Client & client = cc ( ) ; <nl> - <nl> - debug . updateobj = updateobj ; <nl> - <nl> - / / The idea with these here it to make them loop invariant for <nl> - / / multi updates , and thus be a bit faster for that case . The <nl> - / / pointers may be left invalid on a failed or terminal yield <nl> - / / recovery . <nl> - NamespaceDetails * d = nsdetails ( ns ) ; / / can be null if an upsert . . . <nl> - NamespaceDetailsTransient * nsdt = & NamespaceDetailsTransient : : get ( ns ) ; <nl> - <nl> - auto_ptr < ModSet > mods ; <nl> - bool isOperatorUpdate = updateobj . firstElementFieldName ( ) [ 0 ] = = ' $ ' ; <nl> - int modsIsIndexed = false ; / / really the # of indexes <nl> - if ( isOperatorUpdate ) { <nl> - mods . reset ( new ModSet ( updateobj , nsdt - > indexKeys ( ) , forReplication ) ) ; <nl> - modsIsIndexed = mods - > maxNumIndexUpdated ( ) ; <nl> - } <nl> - <nl> - if ( planPolicy . permitOptimalIdPlan ( ) & & ! multi & & isSimpleIdQuery ( patternOrig ) & & d & & <nl> - ! modsIsIndexed ) { <nl> - int idxNo = d - > findIdIndex ( ) ; <nl> - if ( idxNo > = 0 ) { <nl> - debug . idhack = true ; <nl> - <nl> - UpdateResult result = _updateById ( isOperatorUpdate , <nl> - idxNo , <nl> - mods . get ( ) , <nl> - d , <nl> - nsdt , <nl> - su , <nl> - ns , <nl> - updateobj , <nl> - patternOrig , <nl> - logop , <nl> - debug , <nl> - fromMigrate ) ; <nl> - if ( result . existing | | ! upsert ) { <nl> - return result ; <nl> - } <nl> - else if ( upsert & & ! isOperatorUpdate ) { <nl> - / / this handles repl inserts <nl> - checkNoMods ( updateobj ) ; <nl> - debug . upsert = true ; <nl> - BSONObj no = updateobj ; <nl> - theDataFileMgr . insertWithObjMod ( ns , no , false , su ) ; <nl> - if ( logop ) <nl> - logOp ( " i " , ns , no , 0 , 0 , fromMigrate , & no ) ; <nl> - <nl> - return UpdateResult ( 0 , 0 , 1 , no ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - int numModded = 0 ; <nl> - debug . nscanned = 0 ; <nl> - shared_ptr < Cursor > c = getOptimizedCursor ( ns , patternOrig , BSONObj ( ) , planPolicy ) ; <nl> - d = nsdetails ( ns ) ; <nl> - nsdt = & NamespaceDetailsTransient : : get ( ns ) ; <nl> - bool autoDedup = c - > autoDedup ( ) ; <nl> - <nl> - if ( c - > ok ( ) ) { <nl> - set < DiskLoc > seenObjects ; <nl> - MatchDetails details ; <nl> - auto_ptr < ClientCursor > cc ; <nl> - do { <nl> - <nl> - if ( cc . get ( ) = = 0 & & <nl> - client . allowedToThrowPageFaultException ( ) & & <nl> - ! c - > currLoc ( ) . isNull ( ) & & <nl> - ! c - > currLoc ( ) . rec ( ) - > likelyInPhysicalMemory ( ) ) { <nl> - throw PageFaultException ( c - > currLoc ( ) . rec ( ) ) ; <nl> - } <nl> - <nl> - bool atomic = c - > matcher ( ) & & c - > matcher ( ) - > docMatcher ( ) . atomic ( ) ; <nl> - <nl> - if ( ! atomic & & debug . nscanned > 0 ) { <nl> - / / we need to use a ClientCursor to yield <nl> - if ( cc . get ( ) = = 0 ) { <nl> - shared_ptr < Cursor > cPtr = c ; <nl> - cc . reset ( new ClientCursor ( QueryOption_NoCursorTimeout , cPtr , ns ) ) ; <nl> - } <nl> - <nl> - bool didYield ; <nl> - if ( ! cc - > yieldSometimes ( ClientCursor : : WillNeed , & didYield ) ) { <nl> - cc . release ( ) ; <nl> - break ; <nl> - } <nl> - if ( ! c - > ok ( ) ) { <nl> - break ; <nl> - } <nl> - <nl> - if ( didYield ) { <nl> - d = nsdetails ( ns ) ; <nl> - if ( ! d ) <nl> - break ; <nl> - nsdt = & NamespaceDetailsTransient : : get ( ns ) ; <nl> - if ( mods . get ( ) ) { <nl> - mods - > setIndexedStatus ( nsdt - > indexKeys ( ) ) ; <nl> - modsIsIndexed = mods - > maxNumIndexUpdated ( ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - } / / end yielding block <nl> - <nl> - debug . nscanned + + ; <nl> - <nl> - if ( mods . get ( ) & & mods - > hasDynamicArray ( ) ) { <nl> - details . requestElemMatchKey ( ) ; <nl> - } <nl> - <nl> - if ( ! c - > currentMatches ( & details ) ) { <nl> - c - > advance ( ) ; <nl> - continue ; <nl> - } <nl> - <nl> - Record * r = c - > _current ( ) ; <nl> - DiskLoc loc = c - > currLoc ( ) ; <nl> - <nl> - if ( c - > getsetdup ( loc ) & & autoDedup ) { <nl> - c - > advance ( ) ; <nl> - continue ; <nl> - } <nl> - <nl> - BSONObj pattern = patternOrig ; <nl> - <nl> - if ( logop ) { <nl> - BSONObj js = BSONObj : : make ( r ) ; <nl> - BSONObj idQuery = makeOplogEntryQuery ( js , multi ) ; <nl> - pattern = idQuery ; <nl> - } <nl> - <nl> - / * look for $ inc etc . note as listed here , all fields to inc must be this type , you can ' t set some <nl> - regular ones at the moment . * / <nl> - if ( isOperatorUpdate ) { <nl> - <nl> - if ( multi ) { <nl> - / / go to next record in case this one moves <nl> - c - > advance ( ) ; <nl> - <nl> - / / Update operations are deduped for cursors that implement their own <nl> - / / deduplication . In particular , some geo cursors are excluded . <nl> - if ( autoDedup ) { <nl> - <nl> - if ( seenObjects . count ( loc ) ) { <nl> - continue ; <nl> - } <nl> - <nl> - / / SERVER - 5198 Advance past the document to be modified , provided <nl> - / / deduplication is enabled , but see SERVER - 5725 . <nl> - while ( c - > ok ( ) & & loc = = c - > currLoc ( ) ) { <nl> - c - > advance ( ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - const BSONObj & onDisk = loc . obj ( ) ; <nl> - <nl> - ModSet * useMods = mods . get ( ) ; <nl> - <nl> - auto_ptr < ModSet > mymodset ; <nl> - if ( details . hasElemMatchKey ( ) & & mods - > hasDynamicArray ( ) ) { <nl> - useMods = mods - > fixDynamicArray ( details . elemMatchKey ( ) ) ; <nl> - mymodset . reset ( useMods ) ; <nl> - } <nl> - <nl> - auto_ptr < ModSetState > mss = useMods - > prepare ( onDisk , <nl> - false / * not an insertion * / ) ; <nl> - <nl> - bool willAdvanceCursor = multi & & c - > ok ( ) & & ( modsIsIndexed | | ! mss - > canApplyInPlace ( ) ) ; <nl> - <nl> - if ( willAdvanceCursor ) { <nl> - if ( cc . get ( ) ) { <nl> - cc - > setDoingDeletes ( true ) ; <nl> - } <nl> - c - > prepareToTouchEarlierIterate ( ) ; <nl> - } <nl> - <nl> - / / If we ' ve made it this far , " ns " must contain a valid collection name , and so <nl> - / / is of the form " db . collection " . Therefore , the following expression must <nl> - / / always be valid . " system . users " updates must never be done in place , in <nl> - / / order to ensure that they are validated inside DataFileMgr : : updateRecord ( . ) . <nl> - bool isSystemUsersMod = nsToCollectionSubstring ( ns ) = = " system . users " ; <nl> - <nl> - BSONObj newObj ; <nl> - if ( ! mss - > isUpdateIndexed ( ) & & mss - > canApplyInPlace ( ) & & ! isSystemUsersMod ) { <nl> - mss - > applyModsInPlace ( true ) ; / / const_cast < BSONObj & > ( onDisk ) ) ; <nl> - <nl> - DEBUGUPDATE ( " \ t \ t \ t doing in place update " ) ; <nl> - if ( ! multi ) <nl> - debug . fastmod = true ; <nl> - <nl> - if ( modsIsIndexed ) { <nl> - seenObjects . insert ( loc ) ; <nl> - } <nl> - newObj = loc . obj ( ) ; <nl> - d - > paddingFits ( ) ; <nl> - } <nl> - else { <nl> - newObj = mss - > createNewFromMods ( ) ; <nl> - checkTooLarge ( newObj ) ; <nl> - DiskLoc newLoc = theDataFileMgr . updateRecord ( ns , <nl> - d , <nl> - nsdt , <nl> - r , <nl> - loc , <nl> - newObj . objdata ( ) , <nl> - newObj . objsize ( ) , <nl> - debug ) ; <nl> - <nl> - if ( newLoc ! = loc | | modsIsIndexed ) { <nl> - / / log ( ) < < " Moved obj " < < newLoc . obj ( ) [ " _id " ] < < " from " < < loc < < " to " < < newLoc < < endl ; <nl> - / / object moved , need to make sure we don ' get again <nl> - seenObjects . insert ( newLoc ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - if ( logop ) { <nl> - DEV verify ( mods - > size ( ) ) ; <nl> - BSONObj logObj = mss - > getOpLogRewrite ( ) ; <nl> - DEBUGUPDATE ( " \ t rewrite update : " < < logObj ) ; <nl> - <nl> - / / It is possible that the entire mod set was a no - op over this <nl> - / / document . We would have an empty log record in that case . If we <nl> - / / call logOp , with an empty record , that would be replicated as " clear <nl> - / / this record " , which is not what we want . Therefore , to get a no - op <nl> - / / in the replica , we simply don ' t log . <nl> - if ( logObj . nFields ( ) ) { <nl> - logOp ( " u " , ns , logObj , & pattern , 0 , fromMigrate , & newObj ) ; <nl> - } <nl> - } <nl> - numModded + + ; <nl> - if ( ! multi ) <nl> - return UpdateResult ( 1 , 1 , numModded , BSONObj ( ) ) ; <nl> - if ( willAdvanceCursor ) <nl> - c - > recoverFromTouchingEarlierIterate ( ) ; <nl> - <nl> - getDur ( ) . commitIfNeeded ( ) ; <nl> - <nl> - continue ; <nl> - } <nl> - <nl> - uassert ( 10158 , " multi update only works with $ operators " , ! multi ) ; <nl> - <nl> - BSONElementManipulator : : lookForTimestamps ( updateobj ) ; <nl> - checkNoMods ( updateobj ) ; <nl> - theDataFileMgr . updateRecord ( ns , d , nsdt , r , loc , updateobj . objdata ( ) , updateobj . objsize ( ) , debug , su ) ; <nl> - if ( logop ) { <nl> - DEV wassert ( ! su ) ; / / super used doesn ' t get logged , this would be bad . <nl> - logOp ( " u " , ns , updateobj , & pattern , 0 , fromMigrate , & updateobj ) ; <nl> - } <nl> - return UpdateResult ( 1 , 0 , 1 , BSONObj ( ) ) ; <nl> - } while ( c - > ok ( ) ) ; <nl> - } / / endif <nl> - <nl> - if ( numModded ) <nl> - return UpdateResult ( 1 , 1 , numModded , BSONObj ( ) ) ; <nl> - <nl> - if ( upsert ) { <nl> - if ( updateobj . firstElementFieldName ( ) [ 0 ] = = ' $ ' ) { <nl> - / / upsert of an $ operation . build a default object <nl> - BSONObj newObj = mods - > createNewFromQuery ( patternOrig ) ; <nl> - checkNoMods ( newObj ) ; <nl> - debug . fastmodinsert = true ; <nl> - theDataFileMgr . insertWithObjMod ( ns , newObj , false , su ) ; <nl> - if ( logop ) <nl> - logOp ( " i " , ns , newObj , 0 , 0 , fromMigrate , & newObj ) ; <nl> - <nl> - return UpdateResult ( 0 , 1 , 1 , newObj ) ; <nl> - } <nl> - uassert ( 10159 , " multi update only works with $ operators " , ! multi ) ; <nl> - checkNoMods ( updateobj ) ; <nl> - debug . upsert = true ; <nl> - BSONObj no = updateobj ; <nl> - theDataFileMgr . insertWithObjMod ( ns , no , false , su ) ; <nl> - if ( logop ) <nl> - logOp ( " i " , ns , no , 0 , 0 , fromMigrate , & no ) ; <nl> - return UpdateResult ( 0 , 0 , 1 , no ) ; <nl> - } <nl> - <nl> - return UpdateResult ( 0 , isOperatorUpdate , 0 , BSONObj ( ) ) ; <nl> - } <nl> - <nl> - void validateUpdate ( const char * ns , const BSONObj & updateobj , const BSONObj & patternOrig ) { <nl> - uassert ( 10155 , " cannot update reserved $ collection " , strchr ( ns , ' $ ' ) = = 0 ) ; <nl> - if ( strstr ( ns , " . system . " ) ) { <nl> - / * dm : it ' s very important that system . indexes is never updated as IndexDetails <nl> - has pointers into it * / <nl> - uassert ( 10156 , <nl> - str : : stream ( ) < < " cannot update system collection : " <nl> - < < ns < < " q : " < < patternOrig < < " u : " < < updateobj , <nl> - legalClientSystemNS ( ns , true ) ) ; <nl> - } <nl> - } <nl> - <nl> - UpdateResult _updateObjectsNEW ( bool su , <nl> - const char * ns , <nl> - const BSONObj & updateobj , <nl> - const BSONObj & patternOrig , <nl> - bool upsert , <nl> - bool multi , <nl> - bool logop , <nl> - OpDebug & debug , <nl> - RemoveSaver * rs , <nl> - bool fromMigrate , <nl> - const QueryPlanSelectionPolicy & planPolicy , <nl> - bool forReplication ) { <nl> - <nl> / / TODO : Put this logic someplace central and check based on constants ( maybe using the <nl> / / list of actually excluded config collections , and not global for the config db ) . <nl> NamespaceString nsStr ( ns ) ; <nl> namespace mongo { <nl> uasserted ( 16840 , status . reason ( ) ) ; <nl> } <nl> <nl> - return _updateObjectsNEW ( & driver , su , ns , updateobj , patternOrig , <nl> - upsert , multi , logop , debug , rs , fromMigrate , <nl> - planPolicy , forReplication ) ; <nl> + return _updateObjects ( & driver , su , ns , updateobj , patternOrig , <nl> + upsert , multi , logop , debug , rs , fromMigrate , <nl> + planPolicy , forReplication ) ; <nl> } <nl> <nl> - UpdateResult _updateObjectsNEW ( UpdateDriver * driver , <nl> - bool su , <nl> - const char * ns , <nl> - const BSONObj & updateobj , <nl> - const BSONObj & patternOrig , <nl> - bool upsert , <nl> - bool multi , <nl> - bool logop , <nl> - OpDebug & debug , <nl> - RemoveSaver * rs , <nl> - bool fromMigrate , <nl> - const QueryPlanSelectionPolicy & planPolicy , <nl> - bool forReplication ) { <nl> + UpdateResult _updateObjects ( UpdateDriver * driver , <nl> + bool su , <nl> + const char * ns , <nl> + const BSONObj & updateobj , <nl> + const BSONObj & patternOrig , <nl> + bool upsert , <nl> + bool multi , <nl> + bool logop , <nl> + OpDebug & debug , <nl> + RemoveSaver * rs , <nl> + bool fromMigrate , <nl> + const QueryPlanSelectionPolicy & planPolicy , <nl> + bool forReplication ) { <nl> <nl> NamespaceDetails * d = nsdetails ( ns ) ; <nl> NamespaceDetailsTransient * nsdt = & NamespaceDetailsTransient : : get ( ns ) ; <nl> namespace mongo { <nl> <nl> validateUpdate ( ns , updateobj , patternOrig ) ; <nl> <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - <nl> - UpdateResult ur = _updateObjectsNEW ( false , ns , updateobj , patternOrig , <nl> - upsert , multi , logop , <nl> - debug , NULL , fromMigrate , planPolicy ) ; <nl> - debug . nupdated = ur . num ; <nl> - return ur ; <nl> - } <nl> - else { <nl> - <nl> - UpdateResult ur = _updateObjects ( false , ns , updateobj , patternOrig , <nl> - upsert , multi , logop , <nl> - debug , NULL , fromMigrate , planPolicy ) ; <nl> - debug . nupdated = ur . num ; <nl> - return ur ; <nl> - } <nl> + UpdateResult ur = _updateObjects ( false , ns , updateobj , patternOrig , <nl> + upsert , multi , logop , <nl> + debug , NULL , fromMigrate , planPolicy ) ; <nl> + debug . nupdated = ur . num ; <nl> + return ur ; <nl> } <nl> <nl> UpdateResult updateObjects ( UpdateDriver * driver , <nl> namespace mongo { <nl> <nl> validateUpdate ( ns , updateobj , patternOrig ) ; <nl> <nl> - verify ( isNewUpdateFrameworkEnabled ( ) ) ; <nl> - <nl> - UpdateResult ur = _updateObjectsNEW ( driver , false , ns , updateobj , patternOrig , <nl> - upsert , multi , logop , <nl> - debug , NULL , fromMigrate , planPolicy ) ; <nl> + UpdateResult ur = _updateObjects ( driver , false , ns , updateobj , patternOrig , <nl> + upsert , multi , logop , <nl> + debug , NULL , fromMigrate , planPolicy ) ; <nl> debug . nupdated = ur . num ; <nl> return ur ; <nl> } <nl> namespace mongo { <nl> <nl> validateUpdate ( ns , updateobj , patternOrig ) ; <nl> <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - <nl> - UpdateResult ur = _updateObjectsNEW ( false , <nl> - ns , <nl> - updateobj , <nl> - patternOrig , <nl> - upsert , <nl> - multi , <nl> - logop , <nl> - debug , <nl> - NULL / * no remove saver * / , <nl> - fromMigrate , <nl> - planPolicy , <nl> - true / * for replication * / ) ; <nl> - debug . nupdated = ur . num ; <nl> - return ur ; <nl> - <nl> - } <nl> - else { <nl> - <nl> - UpdateResult ur = _updateObjects ( false , <nl> - ns , <nl> - updateobj , <nl> - patternOrig , <nl> - upsert , <nl> - multi , <nl> - logop , <nl> - debug , <nl> - NULL / * no remove saver * / , <nl> - fromMigrate , <nl> - planPolicy , <nl> - true / * for replication * / ) ; <nl> - debug . nupdated = ur . num ; <nl> - return ur ; <nl> + UpdateResult ur = _updateObjects ( false , <nl> + ns , <nl> + updateobj , <nl> + patternOrig , <nl> + upsert , <nl> + multi , <nl> + logop , <nl> + debug , <nl> + NULL / * no remove saver * / , <nl> + fromMigrate , <nl> + planPolicy , <nl> + true / * for replication * / ) ; <nl> + debug . nupdated = ur . num ; <nl> + return ur ; <nl> <nl> - } <nl> } <nl> <nl> BSONObj applyUpdateOperators ( const BSONObj & from , const BSONObj & operators ) { <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - UpdateDriver : : Options opts ; <nl> - opts . multi = false ; <nl> - opts . upsert = false ; <nl> - UpdateDriver driver ( opts ) ; <nl> - Status status = driver . parse ( operators ) ; <nl> - if ( ! status . isOK ( ) ) { <nl> - uasserted ( 16838 , status . reason ( ) ) ; <nl> - } <nl> - <nl> - mutablebson : : Document doc ( from , mutablebson : : Document : : kInPlaceDisabled ) ; <nl> - status = driver . update ( StringData ( ) , & doc , NULL / * not oplogging * / ) ; <nl> - if ( ! status . isOK ( ) ) { <nl> - uasserted ( 16839 , status . reason ( ) ) ; <nl> - } <nl> - <nl> - return doc . getObject ( ) ; <nl> + UpdateDriver : : Options opts ; <nl> + opts . multi = false ; <nl> + opts . upsert = false ; <nl> + UpdateDriver driver ( opts ) ; <nl> + Status status = driver . parse ( operators ) ; <nl> + if ( ! status . isOK ( ) ) { <nl> + uasserted ( 16838 , status . reason ( ) ) ; <nl> } <nl> - else { <nl> - ModSet mods ( operators ) ; <nl> - return mods . prepare ( from , false / * not an insertion * / ) - > createNewFromMods ( ) ; <nl> + <nl> + mutablebson : : Document doc ( from , mutablebson : : Document : : kInPlaceDisabled ) ; <nl> + status = driver . update ( StringData ( ) , & doc , NULL / * not oplogging * / ) ; <nl> + if ( ! status . isOK ( ) ) { <nl> + uasserted ( 16839 , status . reason ( ) ) ; <nl> } <nl> + <nl> + return doc . getObject ( ) ; <nl> } <nl> <nl> } / / namespace mongo <nl> mmm a / src / mongo / db / ops / update . h <nl> ppp b / src / mongo / db / ops / update . h <nl> namespace mongo { <nl> <nl> class RemoveSaver ; <nl> <nl> - / * * Returns true if updates are supposed to be handle by the new update framework * / <nl> - bool isNewUpdateFrameworkEnabled ( ) ; <nl> - <nl> - / * * switches state from enabled / disabled ; returns new state * / <nl> - bool toggleNewUpdateFrameworkEnabled ( ) ; <nl> - <nl> / * returns true if an existing object was updated , false if no existing object was found . <nl> multi - update multiple objects - mostly useful with things like $ set <nl> su - allow access to system namespaces ( super user ) <nl> namespace mongo { <nl> = QueryPlanSelectionPolicy : : any ( ) , <nl> bool forReplication = false ) ; <nl> <nl> - UpdateResult _updateObjectsNEW ( bool su , <nl> - const char * ns , <nl> - const BSONObj & updateobj , <nl> - const BSONObj & pattern , <nl> - bool upsert , <nl> - bool multi , <nl> - bool logop , <nl> - OpDebug & debug , <nl> - RemoveSaver * rs = 0 , <nl> - bool fromMigrate = false , <nl> - const QueryPlanSelectionPolicy & planPolicy <nl> - = QueryPlanSelectionPolicy : : any ( ) , <nl> - bool forReplication = false ) ; <nl> - <nl> - UpdateResult _updateObjectsNEW ( UpdateDriver * driver , <nl> - bool su , <nl> - const char * ns , <nl> - const BSONObj & updateobj , <nl> - const BSONObj & pattern , <nl> - bool upsert , <nl> - bool multi , <nl> - bool logop , <nl> - OpDebug & debug , <nl> - RemoveSaver * rs = 0 , <nl> - bool fromMigrate = false , <nl> - const QueryPlanSelectionPolicy & planPolicy <nl> - = QueryPlanSelectionPolicy : : any ( ) , <nl> - bool forReplication = false ) ; <nl> + UpdateResult _updateObjects ( UpdateDriver * driver , <nl> + bool su , <nl> + const char * ns , <nl> + const BSONObj & updateobj , <nl> + const BSONObj & pattern , <nl> + bool upsert , <nl> + bool multi , <nl> + bool logop , <nl> + OpDebug & debug , <nl> + RemoveSaver * rs = 0 , <nl> + bool fromMigrate = false , <nl> + const QueryPlanSelectionPolicy & planPolicy <nl> + = QueryPlanSelectionPolicy : : any ( ) , <nl> + bool forReplication = false ) ; <nl> <nl> <nl> / * * <nl> deleted file mode 100644 <nl> index bb5856921c4d . . 000000000000 <nl> mmm a / src / mongo / db / ops / update_internal . cpp <nl> ppp / dev / null <nl> <nl> - / / @ file update_internal . cpp <nl> - <nl> - / * * <nl> - * Copyright ( C ) 2012 10gen Inc . <nl> - * <nl> - * This program is free software : you can redistribute it and / or modify <nl> - * it under the terms of the GNU Affero General Public License , version 3 , <nl> - * as published by the Free Software Foundation . <nl> - * <nl> - * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> - * GNU Affero General Public License for more details . <nl> - * <nl> - * You should have received a copy of the GNU Affero General Public License <nl> - * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> - * / <nl> - <nl> - # include " mongo / pch . h " <nl> - <nl> - # include " mongo / db / ops / update_internal . h " <nl> - <nl> - # include < algorithm > / / for max <nl> - <nl> - # include " mongo / db / field_ref . h " <nl> - # include " mongo / db / jsobjmanipulator . h " <nl> - # include " mongo / db / pdfile . h " <nl> - # include " mongo / db / repl / oplog . h " <nl> - # include " mongo / util / mongoutils / str . h " <nl> - <nl> - / / # define DEBUGUPDATE ( x ) cout < < x < < endl ; <nl> - # define DEBUGUPDATE ( x ) <nl> - <nl> - namespace mongo { <nl> - <nl> - const char * Mod : : modNames [ ] = { " $ inc " , " $ set " , " $ push " , " $ pushAll " , " $ pull " , " $ pullAll " , " $ pop " , " $ unset " , <nl> - " $ bitand " , " $ bitor " , " $ bit " , " $ addToSet " , " $ rename " , " $ rename " , <nl> - " $ setOnInsert " <nl> - } ; <nl> - unsigned Mod : : modNamesNum = sizeof ( Mod : : modNames ) / sizeof ( char * ) ; <nl> - <nl> - bool Mod : : _pullElementMatch ( BSONElement & toMatch ) const { <nl> - <nl> - if ( elt . type ( ) ! = Object ) { <nl> - / / if elt isn ' t an object , then comparison will work <nl> - return toMatch . woCompare ( elt , false ) = = 0 ; <nl> - } <nl> - <nl> - if ( matcherOnPrimitive ) <nl> - return matcher - > matches ( toMatch . wrap ( " " ) ) ; <nl> - <nl> - if ( toMatch . type ( ) ! = Object ) { <nl> - / / looking for an object , so this can ' t match <nl> - return false ; <nl> - } <nl> - <nl> - / / now we have an object on both sides <nl> - return matcher - > matches ( toMatch . embeddedObject ( ) ) ; <nl> - } <nl> - <nl> - void Mod : : appendIncremented ( BSONBuilderBase & builder , const BSONElement & in , ModState & ms ) const { <nl> - BSONType a = in . type ( ) ; <nl> - BSONType b = elt . type ( ) ; <nl> - <nl> - if ( a = = NumberDouble | | b = = NumberDouble ) { <nl> - ms . incType = NumberDouble ; <nl> - ms . incdouble = elt . numberDouble ( ) + in . numberDouble ( ) ; <nl> - } <nl> - else if ( a = = NumberLong | | b = = NumberLong ) { <nl> - ms . incType = NumberLong ; <nl> - ms . inclong = elt . numberLong ( ) + in . numberLong ( ) ; <nl> - } <nl> - else { <nl> - int x = elt . numberInt ( ) + in . numberInt ( ) ; <nl> - if ( x < 0 & & elt . numberInt ( ) > 0 & & in . numberInt ( ) > 0 ) { <nl> - / / overflow <nl> - ms . incType = NumberLong ; <nl> - ms . inclong = elt . numberLong ( ) + in . numberLong ( ) ; <nl> - } <nl> - else { <nl> - ms . incType = NumberInt ; <nl> - ms . incint = elt . numberInt ( ) + in . numberInt ( ) ; <nl> - } <nl> - } <nl> - <nl> - ms . appendIncValue ( builder , false ) ; <nl> - } <nl> - <nl> - void appendUnset ( BSONBuilderBase & builder ) { <nl> - if ( builder . isArray ( ) ) { <nl> - builder . appendNull ( ) ; <nl> - } <nl> - } <nl> - <nl> - void Mod : : apply ( BSONBuilderBase & builder , BSONElement in , ModState & ms ) const { <nl> - if ( ms . dontApply ) { <nl> - / / Pass the original element through unchanged . <nl> - builder < < in ; <nl> - return ; <nl> - } <nl> - <nl> - switch ( op ) { <nl> - <nl> - case INC : { <nl> - appendIncremented ( builder , in , ms ) ; <nl> - / / We don ' t need to " fix " this operation into a $ set , for oplog purposes , <nl> - / / here . ModState : : appendForOpLog will do that for us . It relies on the new value <nl> - / / being in inc { int , long , double } inside the ModState that wraps around this Mod . <nl> - break ; <nl> - } <nl> - <nl> - case SET_ON_INSERT : <nl> - / / There is a corner case that would land us here ( making a change to an existing <nl> - / / field with $ setOnInsert ) . If we ' re in an upsert , and the query portion of the <nl> - / / update creates a field , we can modify it with $ setOnInsert . This degenerates <nl> - / / into a $ set , so we fall through to the next case . <nl> - ms . fixedOpName = " $ set " ; <nl> - / / Fall through . <nl> - <nl> - case SET : { <nl> - _checkForAppending ( elt ) ; <nl> - builder . appendAs ( elt , shortFieldName ) ; <nl> - break ; <nl> - } <nl> - <nl> - case UNSET : { <nl> - appendUnset ( builder ) ; <nl> - break ; <nl> - } <nl> - <nl> - case PUSH : { <nl> - uassert ( 10131 , " $ push can only be applied to an array " , in . type ( ) = = Array ) ; <nl> - <nl> - / / <nl> - / / We can be in a single element push case , a " push all " case , or a " push all " case <nl> - / / with a slice requirement ( ie , a " push to size " ) . In each of these , we decide <nl> - / / differently how much of the existing - and of the parameter - array to copy to the <nl> - / / final object . <nl> - / / <nl> - <nl> - / / Start the resulting array ' s builder . <nl> - BSONArrayBuilder bb ( builder . subarrayStart ( shortFieldName ) ) ; <nl> - <nl> - / / If in the single element push case , we ' ll copy all elements of the existing <nl> - / / array and add the new one . <nl> - if ( ! isEach ( ) ) { <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - bb . append ( elt ) ; <nl> - <nl> - / / We don ' t want to log a positional $ set for which the ' _checkForAppending ' test <nl> - / / won ' t pass . If we ' re in that case , fall back to non - optimized logging . <nl> - if ( ( elt . type ( ) = = Object & & elt . embeddedObject ( ) . okForStorage ( ) ) | | <nl> - ( elt . type ( ) ! = Object ) ) { <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forcePositional = true ; <nl> - ms . position = bb . arrSize ( ) - 1 ; <nl> - bb . done ( ) ; <nl> - } <nl> - else { <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - / / If we ' re in the " push all " case , we ' ll copy all element of both the existing and <nl> - / / parameter arrays . <nl> - else if ( isEach ( ) & & ! isSliceOnly ( ) & & ! isSliceAndSort ( ) ) { <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - BSONObjIterator j ( getEach ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - bb . append ( j . next ( ) ) ; <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - <nl> - / / If we ' re in the " push with a $ each " case with slice , we have to decide how much <nl> - / / of each of the existing and parameter arrays to copy to the final object . <nl> - else if ( isSliceOnly ( ) ) { <nl> - long long slice = getSlice ( ) ; <nl> - BSONObj eachArray = getEach ( ) ; <nl> - long long arraySize = in . embeddedObject ( ) . nFields ( ) ; <nl> - long long eachArraySize = eachArray . nFields ( ) ; <nl> - <nl> - / / Zero slice is equivalent to resetting the array in the final object , so <nl> - / / we won ' t copy anything . <nl> - if ( slice = = 0 ) { <nl> - / / no - op <nl> - } <nl> - <nl> - / / If the parameter array alone is larger than the slice , then only copy <nl> - / / object from that array . <nl> - else if ( slice < = eachArraySize ) { <nl> - long long skip = eachArraySize - slice ; <nl> - BSONObjIterator j ( getEach ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - if ( skip - - > 0 ) { <nl> - j . next ( ) ; <nl> - continue ; <nl> - } <nl> - bb . append ( j . next ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - / / If the parameter array is not sufficient to fill the slice , then some ( or all ) <nl> - / / the elements from the existing array will be copied too . <nl> - else { <nl> - long long skip = std : : max ( 0LL , arraySize - ( slice - eachArraySize ) ) ; <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - if ( skip - - > 0 ) { <nl> - i . next ( ) ; <nl> - continue ; <nl> - } <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - BSONObjIterator j ( getEach ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - bb . append ( j . next ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - <nl> - / / If we ' re in the " push all " case ( $ push with a $ each ) with sort , we have to <nl> - / / concatenate the existing array with the $ each array , sort the result , and then <nl> - / / decide how much of each of the resulting work area to copy to the final object . <nl> - else { <nl> - long long slice = getSlice ( ) ; <nl> - <nl> - / / Zero slice is equivalent to resetting the array in the final object , so <nl> - / / we only go into sorting if there is anything to sort . <nl> - if ( slice > 0 ) { <nl> - vector < BSONObj > workArea ; <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - workArea . push_back ( i . next ( ) . Obj ( ) ) ; <nl> - } <nl> - BSONObjIterator j ( getEach ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - workArea . push_back ( j . next ( ) . Obj ( ) ) ; <nl> - } <nl> - ProjectKeyCmp cmp ( getSort ( ) ) ; <nl> - sort ( workArea . begin ( ) , workArea . end ( ) , cmp ) ; <nl> - <nl> - long long skip = std : : max ( 0LL , <nl> - ( long long ) workArea . size ( ) - slice ) ; <nl> - for ( vector < BSONObj > : : iterator it = workArea . begin ( ) ; <nl> - it ! = workArea . end ( ) ; <nl> - + + it ) { <nl> - if ( skip - - > 0 ) { <nl> - continue ; <nl> - } <nl> - bb . append ( * it ) ; <nl> - } <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - <nl> - break ; <nl> - } <nl> - <nl> - case ADDTOSET : { <nl> - uassert ( 12592 , " $ addToSet can only be applied to an array " , in . type ( ) = = Array ) ; <nl> - BSONArrayBuilder bb ( builder . subarrayStart ( shortFieldName ) ) ; <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - <nl> - if ( isEach ( ) ) { <nl> - <nl> - BSONElementSet toadd ; <nl> - parseEach ( toadd ) ; <nl> - <nl> - while ( i . more ( ) ) { <nl> - BSONElement cur = i . next ( ) ; <nl> - bb . append ( cur ) ; <nl> - toadd . erase ( cur ) ; <nl> - } <nl> - <nl> - { <nl> - BSONObjIterator i ( getEach ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement e = i . next ( ) ; <nl> - if ( toadd . count ( e ) ) { <nl> - bb . append ( e ) ; <nl> - toadd . erase ( e ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - else { <nl> - <nl> - bool found = false ; <nl> - int pos = 0 ; <nl> - int count = 0 ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement cur = i . next ( ) ; <nl> - bb . append ( cur ) ; <nl> - if ( elt . woCompare ( cur , false ) = = 0 ) { <nl> - found = true ; <nl> - pos = count ; <nl> - } <nl> - count + + ; <nl> - } <nl> - <nl> - if ( ! found ) { <nl> - bb . append ( elt ) ; <nl> - } <nl> - <nl> - / / We don ' t want to log a positional $ set for which the ' _checkForAppending ' <nl> - / / test won ' t pass . If we ' re in that case , fall back to non - optimized logging . <nl> - if ( ( elt . type ( ) = = Object & & elt . embeddedObject ( ) . okForStorage ( ) ) | | <nl> - ( elt . type ( ) ! = Object ) ) { <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forcePositional = true ; <nl> - ms . position = found ? pos : bb . arrSize ( ) - 1 ; <nl> - bb . done ( ) ; <nl> - } <nl> - else { <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - break ; <nl> - } <nl> - <nl> - case PUSH_ALL : { <nl> - uassert ( 10132 , " $ pushAll can only be applied to an array " , in . type ( ) = = Array ) ; <nl> - uassert ( 10133 , " $ pushAll has to be passed an array " , elt . type ( ) ) ; <nl> - <nl> - BSONArrayBuilder bb ( builder . subarrayStart ( shortFieldName ) ) ; <nl> - <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - <nl> - i = BSONObjIterator ( elt . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - break ; <nl> - } <nl> - <nl> - case PULL : <nl> - case PULL_ALL : { <nl> - uassert ( 10134 , " $ pull / $ pullAll can only be applied to an array " , in . type ( ) = = Array ) ; <nl> - BSONArrayBuilder bb ( builder . subarrayStart ( shortFieldName ) ) ; <nl> - <nl> - / / temporarily record the things to pull . only use this set while ' elt ' in scope . <nl> - BSONElementSet toPull ; <nl> - if ( op = = PULL_ALL ) { <nl> - BSONObjIterator j ( elt . embeddedObject ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - toPull . insert ( j . next ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement e = i . next ( ) ; <nl> - bool allowed = true ; <nl> - <nl> - if ( op = = PULL ) { <nl> - allowed = ! _pullElementMatch ( e ) ; <nl> - } <nl> - else { <nl> - allowed = ( toPull . find ( e ) = = toPull . end ( ) ) ; <nl> - } <nl> - <nl> - if ( allowed ) <nl> - bb . append ( e ) ; <nl> - } <nl> - <nl> - / / If this is the last element of the array , then we want to write the empty array to the <nl> - / / oplog . <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - break ; <nl> - } <nl> - <nl> - case POP : { <nl> - uassert ( 10135 , " $ pop can only be applied to an array " , in . type ( ) = = Array ) ; <nl> - BSONArrayBuilder bb ( builder . subarrayStart ( shortFieldName ) ) ; <nl> - <nl> - <nl> - BSONObjIterator i ( in . embeddedObject ( ) ) ; <nl> - if ( elt . isNumber ( ) & & elt . number ( ) < 0 ) { <nl> - / / pop from front <nl> - if ( i . more ( ) ) { <nl> - i . next ( ) ; <nl> - } <nl> - <nl> - while ( i . more ( ) ) { <nl> - bb . append ( i . next ( ) ) ; <nl> - } <nl> - } <nl> - else { <nl> - / / pop from back <nl> - while ( i . more ( ) ) { <nl> - BSONElement arrI = i . next ( ) ; <nl> - if ( i . more ( ) ) { <nl> - bb . append ( arrI ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( bb . done ( ) . getOwned ( ) ) ; <nl> - break ; <nl> - } <nl> - <nl> - case BIT : { <nl> - uassert ( 10136 , " $ bit needs an object " , elt . type ( ) = = Object ) ; <nl> - uassert ( 10137 , " $ bit can only be applied to numbers " , in . isNumber ( ) ) ; <nl> - uassert ( 10138 , " $ bit cannot update a value of type double " , in . type ( ) ! = NumberDouble ) ; <nl> - <nl> - int x = in . numberInt ( ) ; <nl> - long long y = in . numberLong ( ) ; <nl> - <nl> - BSONObjIterator it ( elt . embeddedObject ( ) ) ; <nl> - while ( it . more ( ) ) { <nl> - BSONElement e = it . next ( ) ; <nl> - uassert ( 10139 , " $ bit field must be number " , e . isNumber ( ) ) ; <nl> - if ( str : : equals ( e . fieldName ( ) , " and " ) ) { <nl> - switch ( in . type ( ) ) { <nl> - case NumberInt : x = x & e . numberInt ( ) ; break ; <nl> - case NumberLong : y = y & e . numberLong ( ) ; break ; <nl> - default : verify ( 0 ) ; <nl> - } <nl> - } <nl> - else if ( str : : equals ( e . fieldName ( ) , " or " ) ) { <nl> - switch ( in . type ( ) ) { <nl> - case NumberInt : x = x | e . numberInt ( ) ; break ; <nl> - case NumberLong : y = y | e . numberLong ( ) ; break ; <nl> - default : verify ( 0 ) ; <nl> - } <nl> - } <nl> - else { <nl> - uasserted ( 9016 , str : : stream ( ) < < " unknown $ bit operation : " < < e . fieldName ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - switch ( in . type ( ) ) { <nl> - <nl> - case NumberInt : <nl> - builder . append ( shortFieldName , x ) ; <nl> - / / By recording the result of the bit manipulation into the ModSet , we ' ll be <nl> - / / set up so that this $ bit operation be " fixed " as a $ set of the final result <nl> - / / in the oplog . This will happen in appendForOpLog and what triggers it is <nl> - / / setting the incType in the ModSet that is around this Mod . <nl> - ms . incType = NumberInt ; <nl> - ms . incint = x ; <nl> - break ; <nl> - <nl> - case NumberLong : <nl> - / / Please see comment on fixing this $ bit into a $ set for logging purposes in <nl> - / / the NumberInt case . <nl> - builder . append ( shortFieldName , y ) ; <nl> - ms . incType = NumberLong ; <nl> - ms . inclong = y ; <nl> - break ; <nl> - <nl> - default : verify ( 0 ) ; <nl> - } <nl> - <nl> - break ; <nl> - } <nl> - <nl> - case RENAME_FROM : { <nl> - / / We don ' t need to " fix " this operation into a $ set here . ModState : : appendForOpLog <nl> - / / will do that for us . It relies on the field name being stored on this Mod . <nl> - break ; <nl> - } <nl> - <nl> - case RENAME_TO : { <nl> - / / We don ' t need to " fix " this operation into a $ set here , for the same reason we <nl> - / / didn ' t either with RENAME_FROM . <nl> - ms . handleRename ( builder , shortFieldName ) ; <nl> - break ; <nl> - } <nl> - <nl> - default : <nl> - uasserted ( 9017 , str : : stream ( ) < < " Mod : : apply can ' t handle type : " < < op ) ; <nl> - } <nl> - } <nl> - <nl> - / / - 1 inside a non - object ( non - object could be array ) <nl> - / / 0 missing <nl> - / / 1 found <nl> - int validRenamePath ( BSONObj obj , const char * path ) { <nl> - while ( const char * p = strchr ( path , ' . ' ) ) { <nl> - string left ( path , p - path ) ; <nl> - BSONElement e = obj . getField ( left ) ; <nl> - if ( e . eoo ( ) ) { <nl> - return 0 ; <nl> - } <nl> - if ( e . type ( ) ! = Object ) { <nl> - return - 1 ; <nl> - } <nl> - obj = e . embeddedObject ( ) ; <nl> - path = p + 1 ; <nl> - } <nl> - return ! obj . getField ( path ) . eoo ( ) ; <nl> - } <nl> - <nl> - auto_ptr < ModSetState > ModSet : : prepare ( const BSONObj & obj , bool insertion ) const { <nl> - DEBUGUPDATE ( " \ t start prepare " ) ; <nl> - auto_ptr < ModSetState > mss ( new ModSetState ( obj , <nl> - _numIndexAlwaysUpdated , <nl> - _numIndexMaybeUpdated ) ) ; <nl> - <nl> - <nl> - / / Perform this check first , so that we don ' t leave a partially modified object on uassert . <nl> - for ( ModHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; + + i ) { <nl> - DEBUGUPDATE ( " \ t \ t prepare : " < < i - > first ) ; <nl> - mss - > _mods [ i - > first ] . reset ( new ModState ( ) ) ; <nl> - ModState & ms = * mss - > _mods [ i - > first ] ; <nl> - <nl> - const Mod & m = i - > second ; <nl> - <nl> - / / Check for any positional operators that have not been replaced with a numeric field <nl> - / / name ( from a query match element ) . <nl> - / / Only perform this positional operator validation in ' strictApply ' mode . When <nl> - / / replicating from a legacy primary that does not implement this validation , the <nl> - / / secondary bypasses validation and remains consistent with the primary . <nl> - if ( m . strictApply ) { <nl> - FieldRef fieldRef ; <nl> - fieldRef . parse ( m . fieldName ) ; <nl> - StringData positionalOpField ( " $ " ) ; <nl> - for ( size_t i = 0 ; i < fieldRef . numParts ( ) ; + + i ) { <nl> - uassert ( 16650 , <nl> - " Cannot apply the positional operator without a corresponding query " <nl> - " field containing an array . " , <nl> - fieldRef . getPart ( i ) . compare ( positionalOpField ) ! = 0 ) ; <nl> - } <nl> - } <nl> - <nl> - BSONElement e = obj . getFieldDotted ( m . fieldName ) ; <nl> - <nl> - ms . m = & m ; <nl> - ms . old = e ; <nl> - <nl> - if ( m . op = = Mod : : RENAME_FROM ) { <nl> - int source = validRenamePath ( obj , m . fieldName ) ; <nl> - uassert ( 13489 , " $ rename source field invalid " , source ! = - 1 ) ; <nl> - if ( source ! = 1 ) { <nl> - ms . dontApply = true ; <nl> - } <nl> - continue ; <nl> - } <nl> - <nl> - if ( m . op = = Mod : : RENAME_TO ) { <nl> - int source = validRenamePath ( obj , m . renameFrom ( ) ) ; <nl> - if ( source = = 1 ) { <nl> - int target = validRenamePath ( obj , m . fieldName ) ; <nl> - uassert ( 13490 , " $ rename target field invalid " , target ! = - 1 ) ; <nl> - ms . newVal = obj . getFieldDotted ( m . renameFrom ( ) ) ; <nl> - mss - > amIInPlacePossible ( false ) ; <nl> - } <nl> - else { <nl> - ms . dontApply = true ; <nl> - } <nl> - continue ; <nl> - } <nl> - <nl> - if ( m . op ! = Mod : : SET_ON_INSERT & & e . eoo ( ) ) { <nl> - mss - > amIInPlacePossible ( m . op = = Mod : : UNSET ) ; <nl> - continue ; <nl> - } <nl> - <nl> - switch ( m . op ) { <nl> - case Mod : : INC : <nl> - uassert ( 10140 , " Cannot apply $ inc modifier to non - number " , e . isNumber ( ) | | e . eoo ( ) ) ; <nl> - if ( mss - > amIInPlacePossible ( e . isNumber ( ) ) ) { <nl> - / / check more typing info here <nl> - if ( m . elt . type ( ) ! = e . type ( ) ) { <nl> - / / if i ' m incrementing with a double , then the storage has to be a double <nl> - mss - > amIInPlacePossible ( m . elt . type ( ) ! = NumberDouble ) ; <nl> - } <nl> - <nl> - / / check for overflow <nl> - if ( e . type ( ) = = NumberInt & & e . numberLong ( ) + m . elt . numberLong ( ) > numeric_limits < int > : : max ( ) ) { <nl> - mss - > amIInPlacePossible ( false ) ; <nl> - } <nl> - } <nl> - break ; <nl> - <nl> - case Mod : : SET : <nl> - mss - > amIInPlacePossible ( m . elt . type ( ) = = e . type ( ) & & <nl> - m . elt . valuesize ( ) = = e . valuesize ( ) ) ; <nl> - break ; <nl> - <nl> - case Mod : : SET_ON_INSERT : <nl> - / / If the document exist ( i . e this is an update , not an insert ) $ setOnInsert <nl> - / / becomes a no - op . <nl> - if ( ! insertion ) { <nl> - ms . dontApply = true ; <nl> - mss - > amIInPlacePossible ( true ) ; <nl> - } <nl> - else { <nl> - mss - > amIInPlacePossible ( false ) ; <nl> - } <nl> - break ; <nl> - <nl> - case Mod : : PUSH : <nl> - case Mod : : PUSH_ALL : <nl> - uassert ( 10141 , <nl> - " Cannot apply $ push / $ pushAll modifier to non - array " , <nl> - e . type ( ) = = Array | | e . eoo ( ) ) ; <nl> - <nl> - / / Currently , we require the base array of a $ sort to be made of <nl> - / / objects ( as opposed to base types ) . <nl> - if ( ! e . eoo ( ) & & m . isEach ( ) & & m . isSliceAndSort ( ) ) { <nl> - BSONObjIterator i ( e . embeddedObject ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement arrayItem = i . next ( ) ; <nl> - uassert ( 16638 , <nl> - " $ sort can only be applied to an array of objects " , <nl> - arrayItem . type ( ) = = Object ) ; <nl> - } <nl> - } <nl> - mss - > amIInPlacePossible ( false ) ; <nl> - break ; <nl> - <nl> - case Mod : : PULL : <nl> - case Mod : : PULL_ALL : { <nl> - uassert ( 10142 , <nl> - " Cannot apply $ pull / $ pullAll modifier to non - array " , <nl> - e . type ( ) = = Array | | e . eoo ( ) ) ; <nl> - <nl> - / / temporarily record the things to pull . only use this set while ' m . elt ' in scope . <nl> - BSONElementSet toPull ; <nl> - if ( m . op = = Mod : : PULL_ALL ) { <nl> - BSONObjIterator j ( m . elt . embeddedObject ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - toPull . insert ( j . next ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - BSONObjIterator i ( e . embeddedObject ( ) ) ; <nl> - while ( mss - > _inPlacePossible & & i . more ( ) ) { <nl> - BSONElement arrI = i . next ( ) ; <nl> - if ( m . op = = Mod : : PULL ) { <nl> - mss - > amIInPlacePossible ( ! m . _pullElementMatch ( arrI ) ) ; <nl> - } <nl> - else if ( m . op = = Mod : : PULL_ALL ) { <nl> - mss - > amIInPlacePossible ( toPull . find ( arrI ) = = toPull . end ( ) ) ; <nl> - } <nl> - } <nl> - break ; <nl> - } <nl> - <nl> - case Mod : : POP : { <nl> - uassert ( 10143 , <nl> - " Cannot apply $ pop modifier to non - array " , <nl> - e . type ( ) = = Array | | e . eoo ( ) ) ; <nl> - mss - > amIInPlacePossible ( e . embeddedObject ( ) . isEmpty ( ) ) ; <nl> - break ; <nl> - } <nl> - <nl> - case Mod : : ADDTOSET : { <nl> - uassert ( 12591 , <nl> - " Cannot apply $ addToSet modifier to non - array " , <nl> - e . type ( ) = = Array | | e . eoo ( ) ) ; <nl> - <nl> - BSONObjIterator i ( e . embeddedObject ( ) ) ; <nl> - if ( m . isEach ( ) ) { <nl> - BSONElementSet toadd ; <nl> - m . parseEach ( toadd ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement arrI = i . next ( ) ; <nl> - toadd . erase ( arrI ) ; <nl> - } <nl> - mss - > amIInPlacePossible ( toadd . size ( ) = = 0 ) ; <nl> - } <nl> - else { <nl> - bool found = false ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement arrI = i . next ( ) ; <nl> - if ( arrI . woCompare ( m . elt , false ) = = 0 ) { <nl> - found = true ; <nl> - break ; <nl> - } <nl> - } <nl> - mss - > amIInPlacePossible ( found ) ; <nl> - } <nl> - break ; <nl> - } <nl> - <nl> - default : <nl> - / / mods we don ' t know about shouldn ' t be done in place <nl> - mss - > amIInPlacePossible ( false ) ; <nl> - } <nl> - } <nl> - <nl> - DEBUGUPDATE ( " \ t mss \ n " < < mss - > toString ( ) < < " \ t - - " ) ; <nl> - <nl> - return mss ; <nl> - } <nl> - <nl> - const char * ModState : : getOpLogName ( ) const { <nl> - if ( dontApply ) { <nl> - return NULL ; <nl> - } <nl> - <nl> - if ( incType ) { <nl> - return " $ set " ; <nl> - } <nl> - <nl> - if ( m - > op = = Mod : : RENAME_FROM ) { <nl> - return " $ unset " ; <nl> - } <nl> - <nl> - if ( m - > op = = Mod : : RENAME_TO ) { <nl> - return " $ set " ; <nl> - } <nl> - <nl> - return fixedOpName ? fixedOpName : Mod : : modNames [ op ( ) ] ; <nl> - } <nl> - <nl> - <nl> - void ModState : : appendForOpLog ( BSONObjBuilder & bb ) const { <nl> - / / dontApply logic is deprecated for all but $ rename . <nl> - if ( dontApply ) { <nl> - return ; <nl> - } <nl> - <nl> - if ( incType ) { <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t \ t appendForOpLog inc fieldname : " < < m - > fieldName <nl> - < < " short : " < < m - > shortFieldName ) ; <nl> - appendIncValue ( bb , true ) ; <nl> - return ; <nl> - } <nl> - <nl> - if ( m - > op = = Mod : : RENAME_FROM ) { <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t \ t appendForOpLog RENAME_FROM fieldName : " < < m - > fieldName ) ; <nl> - bb . append ( m - > fieldName , 1 ) ; <nl> - return ; <nl> - } <nl> - <nl> - if ( m - > op = = Mod : : RENAME_TO ) { <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t \ t appendForOpLog RENAME_TO fieldName : " < < m - > fieldName ) ; <nl> - bb . appendAs ( newVal , m - > fieldName ) ; <nl> - return ; <nl> - } <nl> - <nl> - const char * name = fixedOpName ? fixedOpName : Mod : : modNames [ op ( ) ] ; <nl> - <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t \ t appendForOpLog name : " < < name < < " fixed : " < < fixed <nl> - < < " fn : " < < m - > fieldName ) ; <nl> - <nl> - if ( strcmp ( name , " $ unset " ) = = 0 ) { <nl> - bb . append ( m - > fieldName , 1 ) ; <nl> - return ; <nl> - } <nl> - <nl> - if ( fixed ) { <nl> - bb . appendAs ( * fixed , m - > fieldName ) ; <nl> - } <nl> - else if ( ! fixedArray . isEmpty ( ) | | forceEmptyArray ) { <nl> - bb . append ( m - > fieldName , fixedArray ) ; <nl> - } <nl> - else if ( forcePositional ) { <nl> - string positionalField = str : : stream ( ) < < m - > fieldName < < " . " < < position ; <nl> - bb . appendAs ( m - > elt , positionalField . c_str ( ) ) ; <nl> - } <nl> - else { <nl> - bb . appendAs ( m - > elt , m - > fieldName ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - typedef map < string , vector < ModState * > > NamedModMap ; <nl> - <nl> - BSONObj ModSetState : : getOpLogRewrite ( ) const { <nl> - NamedModMap names ; <nl> - for ( ModStateHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; + + i ) { <nl> - const char * name = i - > second - > getOpLogName ( ) ; <nl> - if ( ! name ) <nl> - continue ; <nl> - names [ name ] . push_back ( i - > second . get ( ) ) ; <nl> - } <nl> - <nl> - BSONObjBuilder b ; <nl> - for ( NamedModMap : : const_iterator i = names . begin ( ) ; <nl> - i ! = names . end ( ) ; <nl> - + + i ) { <nl> - BSONObjBuilder bb ( b . subobjStart ( i - > first ) ) ; <nl> - const vector < ModState * > & mods = i - > second ; <nl> - for ( unsigned j = 0 ; j < mods . size ( ) ; j + + ) { <nl> - mods [ j ] - > appendForOpLog ( bb ) ; <nl> - } <nl> - bb . doneFast ( ) ; <nl> - } <nl> - return b . obj ( ) ; <nl> - } <nl> - <nl> - string ModState : : toString ( ) const { <nl> - stringstream ss ; <nl> - if ( fixedOpName ) <nl> - ss < < " fixedOpName : " < < fixedOpName ; <nl> - if ( fixed ) <nl> - ss < < " fixed : " < < fixed ; <nl> - return ss . str ( ) ; <nl> - } <nl> - <nl> - void ModState : : handleRename ( BSONBuilderBase & newObjBuilder , const char * shortFieldName ) { <nl> - newObjBuilder . appendAs ( newVal , shortFieldName ) ; <nl> - BSONObjBuilder b ; <nl> - b . appendAs ( newVal , shortFieldName ) ; <nl> - verify ( _objData . isEmpty ( ) ) ; <nl> - _objData = b . obj ( ) ; <nl> - newVal = _objData . firstElement ( ) ; <nl> - } <nl> - <nl> - void ModSetState : : applyModsInPlace ( bool isOnDisk ) { <nl> - / / TODO i think this assert means that we can get rid of the isOnDisk param <nl> - / / and just use isOwned as the determination <nl> - DEV verify ( isOnDisk = = ! _obj . isOwned ( ) ) ; <nl> - <nl> - for ( ModStateHolder : : iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; + + i ) { <nl> - ModState & m = * i - > second ; <nl> - <nl> - if ( m . dontApply ) { <nl> - continue ; <nl> - } <nl> - <nl> - switch ( m . m - > op ) { <nl> - case Mod : : UNSET : <nl> - m . fixedOpName = " $ unset " ; <nl> - break ; <nl> - <nl> - case Mod : : ADDTOSET : <nl> - m . fixedOpName = " $ set " ; <nl> - m . fixed = & ( m . old ) ; <nl> - break ; <nl> - <nl> - case Mod : : RENAME_FROM : <nl> - case Mod : : RENAME_TO : <nl> - / / this should have been handled by prepare <nl> - break ; <nl> - <nl> - case Mod : : PULL : <nl> - case Mod : : PULL_ALL : <nl> - / / this should have been handled by prepare <nl> - m . fixedOpName = " $ set " ; <nl> - m . fixed = & ( m . old ) ; <nl> - break ; <nl> - <nl> - case Mod : : POP : <nl> - verify ( m . old . isABSONObj ( ) & & m . old . Obj ( ) . isEmpty ( ) ) ; <nl> - m . fixedOpName = " $ set " ; <nl> - m . fixed = & ( m . old ) ; <nl> - break ; <nl> - / / [ dm ] the BSONElementManipulator statements below are for replication ( correct ? ) <nl> - <nl> - case Mod : : INC : <nl> - if ( isOnDisk ) <nl> - m . m - > IncrementMe ( m . old ) ; <nl> - else <nl> - m . m - > incrementMe ( m . old ) ; <nl> - m . fixedOpName = " $ set " ; <nl> - m . fixed = & ( m . old ) ; <nl> - break ; <nl> - <nl> - case Mod : : SET : <nl> - if ( isOnDisk ) <nl> - BSONElementManipulator ( m . old ) . ReplaceTypeAndValue ( m . m - > elt ) ; <nl> - else <nl> - BSONElementManipulator ( m . old ) . replaceTypeAndValue ( m . m - > elt ) ; <nl> - break ; <nl> - <nl> - case Mod : : SET_ON_INSERT : <nl> - / / this should have been handled by prepare <nl> - break ; <nl> - <nl> - default : <nl> - uassert ( 13478 , " can ' t apply mod in place - shouldn ' t have gotten here " , 0 ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - void ModSetState : : _appendNewFromMods ( const string & root , <nl> - ModState & modState , <nl> - BSONBuilderBase & builder , <nl> - set < string > & onedownseen ) { <nl> - Mod & m = * ( ( Mod * ) ( modState . m ) ) ; / / HACK <nl> - switch ( m . op ) { <nl> - / / unset / pull / pullAll on nothing does nothing , so don ' t append anything . Still , <nl> - / / explicitly log that the target array was reset . <nl> - case Mod : : POP : <nl> - case Mod : : PULL : <nl> - case Mod : : PULL_ALL : <nl> - case Mod : : UNSET : <nl> - modState . fixedOpName = " $ unset " ; <nl> - return ; <nl> - <nl> - / / $ rename / $ setOnInsert may involve dotted path creation , so we want to make sure we ' re <nl> - / / not creating a path here for a rename that ' s a no - op . In other words if we ' re <nl> - / / issuing a { $ rename : { a . b : c . d } } that ' s a no - op , we don ' t want to create the a and <nl> - / / c paths here . See test NestedNoName in the ' repl ' suite . <nl> - case Mod : : RENAME_FROM : <nl> - case Mod : : RENAME_TO : <nl> - case Mod : : SET_ON_INSERT : <nl> - if ( modState . dontApply ) { <nl> - return ; <nl> - } <nl> - <nl> - default : <nl> - ; / / fall through <nl> - } <nl> - const char * temp = modState . fieldName ( ) ; <nl> - temp + = root . size ( ) ; <nl> - const char * dot = strchr ( temp , ' . ' ) ; <nl> - if ( dot ) { <nl> - string nr ( modState . fieldName ( ) , 0 , 1 + ( dot - modState . fieldName ( ) ) ) ; <nl> - string nf ( temp , 0 , dot - temp ) ; <nl> - if ( onedownseen . count ( nf ) ) <nl> - return ; <nl> - onedownseen . insert ( nf ) ; <nl> - BSONObjBuilder bb ( builder . subobjStart ( nf ) ) ; <nl> - / / Always insert an object , even if the field name is numeric . <nl> - createNewObjFromMods ( nr , bb , BSONObj ( ) ) ; <nl> - bb . done ( ) ; <nl> - } <nl> - else { <nl> - appendNewFromMod ( modState , builder ) ; <nl> - } <nl> - } <nl> - <nl> - bool ModSetState : : duplicateFieldName ( const BSONElement & a , const BSONElement & b ) { <nl> - return <nl> - ! a . eoo ( ) & & <nl> - ! b . eoo ( ) & & <nl> - ( a . rawdata ( ) ! = b . rawdata ( ) ) & & <nl> - str : : equals ( a . fieldName ( ) , b . fieldName ( ) ) ; <nl> - } <nl> - <nl> - ModSetState : : ModStateRange ModSetState : : modsForRoot ( const string & root ) { <nl> - ModStateHolder : : iterator mstart = _mods . lower_bound ( root ) ; <nl> - StringBuilder buf ; <nl> - buf < < root < < ( char ) 255 ; <nl> - ModStateHolder : : iterator mend = _mods . lower_bound ( buf . str ( ) ) ; <nl> - return make_pair ( mstart , mend ) ; <nl> - } <nl> - <nl> - void ModSetState : : createNewObjFromMods ( const string & root , <nl> - BSONObjBuilder & builder , <nl> - const BSONObj & obj ) { <nl> - BSONObjIteratorSorted es ( obj ) ; <nl> - createNewFromMods ( root , builder , es , modsForRoot ( root ) , LexNumCmp ( true ) ) ; <nl> - } <nl> - <nl> - void ModSetState : : createNewArrayFromMods ( const string & root , <nl> - BSONArrayBuilder & builder , <nl> - const BSONArray & arr ) { <nl> - BSONArrayIteratorSorted es ( arr ) ; <nl> - ModStateRange objectOrderedRange = modsForRoot ( root ) ; <nl> - ModStateHolder arrayOrderedMods ( LexNumCmp ( false ) ) ; <nl> - arrayOrderedMods . insert ( objectOrderedRange . first , objectOrderedRange . second ) ; <nl> - ModStateRange arrayOrderedRange ( arrayOrderedMods . begin ( ) , arrayOrderedMods . end ( ) ) ; <nl> - createNewFromMods ( root , builder , es , arrayOrderedRange , LexNumCmp ( false ) ) ; <nl> - } <nl> - <nl> - void ModSetState : : createNewFromMods ( const string & root , <nl> - BSONBuilderBase & builder , <nl> - BSONIteratorSorted & es , <nl> - const ModStateRange & modRange , <nl> - const LexNumCmp & lexNumCmp ) { <nl> - <nl> - DEBUGUPDATE ( " \ t \ t createNewFromMods root : " < < root ) ; <nl> - ModStateHolder : : iterator m = modRange . first ; <nl> - const ModStateHolder : : const_iterator mend = modRange . second ; <nl> - BSONElement e = es . next ( ) ; <nl> - <nl> - set < string > onedownseen ; <nl> - BSONElement prevE ; <nl> - while ( ! e . eoo ( ) & & m ! = mend ) { <nl> - <nl> - if ( duplicateFieldName ( prevE , e ) ) { <nl> - / / Just copy through an element with a duplicate field name . <nl> - builder . append ( e ) ; <nl> - prevE = e ; <nl> - e = es . next ( ) ; <nl> - continue ; <nl> - } <nl> - prevE = e ; <nl> - <nl> - string field = root + e . fieldName ( ) ; <nl> - FieldCompareResult cmp = compareDottedFieldNames ( m - > second - > m - > fieldName , field , <nl> - lexNumCmp ) ; <nl> - <nl> - DEBUGUPDATE ( " \ t \ t \ t field : " < < field < < " \ t mod : " <nl> - < < m - > second - > m - > fieldName < < " \ t cmp : " < < cmp <nl> - < < " \ t short : " < < e . fieldName ( ) ) ; <nl> - <nl> - switch ( cmp ) { <nl> - <nl> - case LEFT_SUBFIELD : { / / Mod is embedded under this element <nl> - <nl> - / / SERVER - 4781 <nl> - bool isObjOrArr = e . type ( ) = = Object | | e . type ( ) = = Array ; <nl> - if ( ! isObjOrArr ) { <nl> - if ( m - > second - > m - > strictApply ) { <nl> - uasserted ( 10145 , <nl> - str : : stream ( ) < < " LEFT_SUBFIELD only supports Object : " < < field <nl> - < < " not : " < < e . type ( ) ) ; <nl> - } <nl> - else { <nl> - / / Since we ' re not applying the mod , we keep what was there before <nl> - builder . append ( e ) ; <nl> - <nl> - / / Skip both as we ' re not applying this mod . Note that we ' ll advance <nl> - / / the iterator on the mod side for all the mods that are under the <nl> - / / root we are now . <nl> - e = es . next ( ) ; <nl> - m + + ; <nl> - while ( m ! = mend & & <nl> - ( compareDottedFieldNames ( m - > second - > m - > fieldName , <nl> - field , <nl> - lexNumCmp ) = = LEFT_SUBFIELD ) ) { <nl> - m + + ; <nl> - } <nl> - continue ; <nl> - } <nl> - } <nl> - <nl> - if ( onedownseen . count ( e . fieldName ( ) ) = = 0 ) { <nl> - onedownseen . insert ( e . fieldName ( ) ) ; <nl> - if ( e . type ( ) = = Object ) { <nl> - BSONObjBuilder bb ( builder . subobjStart ( e . fieldName ( ) ) ) ; <nl> - stringstream nr ; nr < < root < < e . fieldName ( ) < < " . " ; <nl> - createNewObjFromMods ( nr . str ( ) , bb , e . Obj ( ) ) ; <nl> - bb . done ( ) ; <nl> - } <nl> - else { <nl> - BSONArrayBuilder ba ( builder . subarrayStart ( e . fieldName ( ) ) ) ; <nl> - stringstream nr ; nr < < root < < e . fieldName ( ) < < " . " ; <nl> - createNewArrayFromMods ( nr . str ( ) , ba , BSONArray ( e . embeddedObject ( ) ) ) ; <nl> - ba . done ( ) ; <nl> - } <nl> - / / inc both as we handled both <nl> - e = es . next ( ) ; <nl> - m + + ; <nl> - while ( m ! = mend & & <nl> - ( compareDottedFieldNames ( m - > second - > m - > fieldName , field , lexNumCmp ) = = <nl> - LEFT_SUBFIELD ) ) { <nl> - m + + ; <nl> - } <nl> - } <nl> - else { <nl> - massert ( 16069 , " ModSet : : createNewFromMods - " <nl> - " SERVER - 4777 unhandled duplicate field " , 0 ) ; <nl> - } <nl> - continue ; <nl> - } <nl> - case LEFT_BEFORE : / / Mod on a field that doesn ' t exist <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t creating new field for : " < < m - > second - > m - > fieldName ) ; <nl> - _appendNewFromMods ( root , * m - > second , builder , onedownseen ) ; <nl> - m + + ; <nl> - continue ; <nl> - case SAME : <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t applying mod on : " < < m - > second - > m - > fieldName ) ; <nl> - m - > second - > apply ( builder , e ) ; <nl> - e = es . next ( ) ; <nl> - m + + ; <nl> - continue ; <nl> - case RIGHT_BEFORE : / / field that doesn ' t have a MOD <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t just copying " ) ; <nl> - builder . append ( e ) ; / / if array , ignore field name <nl> - e = es . next ( ) ; <nl> - continue ; <nl> - case RIGHT_SUBFIELD : <nl> - massert ( 10399 , " ModSet : : createNewFromMods - RIGHT_SUBFIELD should be impossible " , 0 ) ; <nl> - break ; <nl> - default : <nl> - massert ( 10400 , " unhandled case " , 0 ) ; <nl> - } <nl> - } <nl> - <nl> - / / finished looping the mods , just adding the rest of the elements <nl> - while ( ! e . eoo ( ) ) { <nl> - DEBUGUPDATE ( " \ t \ t \ t copying : " < < e . fieldName ( ) ) ; <nl> - builder . append ( e ) ; / / if array , ignore field name <nl> - e = es . next ( ) ; <nl> - } <nl> - <nl> - / / do mods that don ' t have fields already <nl> - for ( ; m ! = mend ; m + + ) { <nl> - DEBUGUPDATE ( " \ t \ t \ t \ t appending from mod at end : " < < m - > second - > m - > fieldName ) ; <nl> - _appendNewFromMods ( root , * m - > second , builder , onedownseen ) ; <nl> - } <nl> - } <nl> - <nl> - BSONObj ModSetState : : createNewFromMods ( ) { <nl> - BSONObjBuilder b ( ( int ) ( _obj . objsize ( ) * 1 . 1 ) ) ; <nl> - createNewObjFromMods ( " " , b , _obj ) ; <nl> - return _newFromMods = b . obj ( ) ; <nl> - } <nl> - <nl> - string ModSetState : : toString ( ) const { <nl> - stringstream ss ; <nl> - for ( ModStateHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; + + i ) { <nl> - ss < < " \ t \ t " < < i - > first < < " \ t " < < i - > second - > toString ( ) < < " \ n " ; <nl> - } <nl> - return ss . str ( ) ; <nl> - } <nl> - <nl> - bool ModSetState : : isUpdateIndexedSlow ( ) const { <nl> - / / There may be indices over fields for which Mods are no - ops . In other words , if a <nl> - / / Mod touches an index field but that Mod is a no - op , this update may be <nl> - / / considered one that does not update indices . <nl> - if ( _numIndexMaybeUpdated = = 0 ) { <nl> - return false ; <nl> - } <nl> - else { <nl> - for ( ModStateHolder : : const_iterator it = _mods . begin ( ) ; <nl> - it ! = _mods . end ( ) ; <nl> - + + it ) { <nl> - const Mod * m = it - > second - > m ; <nl> - shared_ptr < ModState > ms = it - > second ; <nl> - <nl> - switch ( m - > op ) { <nl> - case Mod : : SET_ON_INSERT : <nl> - case Mod : : RENAME_FROM : <nl> - case Mod : : RENAME_TO : <nl> - if ( m - > isIndexed & & ! ms - > dontApply ) { <nl> - return true ; <nl> - } <nl> - break ; <nl> - <nl> - default : <nl> - / / no - op <nl> - break ; <nl> - } <nl> - } <nl> - <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - <nl> - BSONObj ModSet : : createNewFromQuery ( const BSONObj & query ) { <nl> - BSONObj newObj ; <nl> - <nl> - { <nl> - BSONObjBuilder bb ; <nl> - EmbeddedBuilder eb ( & bb ) ; <nl> - BSONObjIteratorSorted i ( query ) ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement e = i . next ( ) ; <nl> - if ( e . fieldName ( ) [ 0 ] = = ' $ ' ) / / for $ atomic and anything else we add <nl> - continue ; <nl> - <nl> - if ( e . type ( ) = = Object & & e . embeddedObject ( ) . firstElementFieldName ( ) [ 0 ] = = ' $ ' ) { <nl> - / / we have something like { x : { $ gt : 5 } } <nl> - / / this can be a query piece <nl> - / / or can be a dbref or something <nl> - <nl> - int op = e . embeddedObject ( ) . firstElement ( ) . getGtLtOp ( ) ; <nl> - if ( op > 0 ) { <nl> - / / This means this is a $ gt type filter , so don ' t make it part of the new <nl> - / / object . <nl> - continue ; <nl> - } <nl> - <nl> - if ( str : : equals ( e . embeddedObject ( ) . firstElement ( ) . fieldName ( ) , " $ not " ) ) { <nl> - / / A $ not filter operator is not detected in getGtLtOp ( ) and should not <nl> - / / become part of the new object . <nl> - continue ; <nl> - } <nl> - } <nl> - <nl> - eb . appendAs ( e , e . fieldName ( ) ) ; <nl> - } <nl> - eb . done ( ) ; <nl> - newObj = bb . obj ( ) ; <nl> - } <nl> - <nl> - auto_ptr < ModSetState > mss = prepare ( newObj , true / * this is an insertion * / ) ; <nl> - <nl> - if ( mss - > canApplyInPlace ( ) ) <nl> - mss - > applyModsInPlace ( false ) ; <nl> - else <nl> - newObj = mss - > createNewFromMods ( ) ; <nl> - <nl> - return newObj ; <nl> - } <nl> - <nl> - / * get special operations like $ inc <nl> - { $ inc : { a : 1 , b : 1 } } <nl> - { $ set : { a : 77 } } <nl> - { $ push : { a : 55 } } <nl> - { $ pushAll : { a : [ 77 , 88 ] } } <nl> - { $ pull : { a : 66 } } <nl> - { $ pullAll : { a : [ 99 , 1010 ] } } <nl> - NOTE : MODIFIES source from object ! <nl> - * / <nl> - ModSet : : ModSet ( const BSONObj & from , <nl> - const IndexPathSet & idxKeys , <nl> - bool forReplication ) <nl> - : _numIndexMaybeUpdated ( 0 ) <nl> - , _numIndexAlwaysUpdated ( 0 ) <nl> - , _hasDynamicArray ( false ) { <nl> - <nl> - BSONObjIterator it ( from ) ; <nl> - <nl> - while ( it . more ( ) ) { <nl> - BSONElement e = it . next ( ) ; <nl> - const char * fn = e . fieldName ( ) ; <nl> - <nl> - uassert ( 10147 , " Invalid modifier specified : " + string ( fn ) , e . type ( ) = = Object ) ; <nl> - BSONObj j = e . embeddedObject ( ) ; <nl> - DEBUGUPDATE ( " \ t " < < j ) ; <nl> - <nl> - BSONObjIterator jt ( j ) ; <nl> - Mod : : Op op = opFromStr ( fn ) ; <nl> - <nl> - while ( jt . more ( ) ) { <nl> - BSONElement f = jt . next ( ) ; / / x : 44 <nl> - <nl> - const char * fieldName = f . fieldName ( ) ; <nl> - <nl> - / / Allow remove of invalid field name in case it was inserted before this check <nl> - / / was added ( ~ version 2 . 1 ) . <nl> - uassert ( 15896 , <nl> - " Modified field name may not start with $ " , <nl> - fieldName [ 0 ] ! = ' $ ' | | op = = Mod : : UNSET ) ; <nl> - uassert ( 10148 , <nl> - " Mod on _id not allowed " , <nl> - strcmp ( fieldName , " _id " ) ! = 0 ) ; <nl> - uassert ( 10149 , <nl> - " Invalid mod field name , may not end in a period " , <nl> - fieldName [ strlen ( fieldName ) - 1 ] ! = ' . ' ) ; <nl> - uassert ( 10150 , <nl> - " Field name duplication not allowed with modifiers " , <nl> - ! haveModForField ( fieldName ) ) ; <nl> - uassert ( 10151 , <nl> - " have conflicting mods in update " , <nl> - ! haveConflictingMod ( fieldName ) ) ; <nl> - uassert ( 10152 , <nl> - " Modifier $ inc allowed for numbers only " , <nl> - f . isNumber ( ) | | op ! = Mod : : INC ) ; <nl> - uassert ( 10153 , <nl> - " Modifier $ pushAll / pullAll allowed for arrays only " , <nl> - f . type ( ) = = Array | | ( op ! = Mod : : PUSH_ALL & & op ! = Mod : : PULL_ALL ) ) ; <nl> - <nl> - / / Check whether $ each , $ slice , and $ sort syntax for $ push is correct . <nl> - if ( ( op = = Mod : : PUSH ) & & ( f . type ( ) = = Object ) ) { <nl> - BSONObj pushObj = f . embeddedObject ( ) ; <nl> - if ( pushObj . nFields ( ) > 0 & & <nl> - strcmp ( pushObj . firstElement ( ) . fieldName ( ) , " $ each " ) = = 0 ) { <nl> - uassert ( 16564 , <nl> - " $ each term needs to occur alone ( or with $ slice / $ sort ) " , <nl> - pushObj . nFields ( ) < = 3 ) ; <nl> - uassert ( 16565 , <nl> - " $ each requires an array value " , <nl> - pushObj . firstElement ( ) . type ( ) = = Array ) ; <nl> - <nl> - / / If both $ slice and $ sort are present , they may be switched . <nl> - if ( pushObj . nFields ( ) > 1 ) { <nl> - BSONObjIterator i ( pushObj ) ; <nl> - i . next ( ) ; <nl> - <nl> - bool seenSlice = false ; <nl> - bool seenSort = false ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement nextElem = i . next ( ) ; <nl> - <nl> - if ( str : : equals ( nextElem . fieldName ( ) , " $ slice " ) ) { <nl> - uassert ( 16567 , " $ slice appeared twice " , ! seenSlice ) ; <nl> - seenSlice = true ; <nl> - uassert ( 16568 , <nl> - " $ slice value must be a numeric integer " , <nl> - nextElem . type ( ) = = NumberInt | | <nl> - nextElem . type ( ) = = NumberLong | | <nl> - ( nextElem . type ( ) = = NumberDouble & & <nl> - nextElem . numberDouble ( ) = = <nl> - ( long long ) nextElem . numberDouble ( ) ) ) ; <nl> - uassert ( 16640 , <nl> - " $ slice value must be negative or zero " , <nl> - nextElem . number ( ) < = 0 ) ; <nl> - } <nl> - else if ( str : : equals ( nextElem . fieldName ( ) , " $ sort " ) ) { <nl> - uassert ( 16647 , " $ sort appeared twice " , ! seenSort ) ; <nl> - seenSort = true ; <nl> - uassert ( 16648 , <nl> - " $ sort component of $ push must be an object " , <nl> - nextElem . type ( ) = = Object ) ; <nl> - <nl> - BSONObjIterator j ( nextElem . embeddedObject ( ) ) ; <nl> - while ( j . more ( ) ) { <nl> - BSONElement fieldSortElem = j . next ( ) ; <nl> - uassert ( 16641 , <nl> - " $ sort elements ' values must either 1 or - 1 " , <nl> - ( fieldSortElem . type ( ) = = NumberInt | | <nl> - fieldSortElem . type ( ) = = NumberLong | | <nl> - ( fieldSortElem . type ( ) = = NumberDouble & & <nl> - fieldSortElem . numberDouble ( ) = = <nl> - ( long long ) fieldSortElem . numberDouble ( ) ) ) & & <nl> - ( fieldSortElem . Number ( ) = = 1 | | <nl> - fieldSortElem . Number ( ) = = - 1 ) ) ; <nl> - <nl> - FieldRef sortField ; <nl> - sortField . parse ( fieldSortElem . fieldName ( ) ) ; <nl> - uassert ( 16690 , <nl> - " $ sort field cannot be empty " , <nl> - sortField . numParts ( ) > 0 ) ; <nl> - <nl> - for ( size_t i = 0 ; i < sortField . numParts ( ) ; i + + ) { <nl> - uassert ( 16691 , <nl> - " empty field in dotted sort pattern " , <nl> - sortField . getPart ( i ) . size ( ) > 0 ) ; <nl> - } <nl> - } <nl> - <nl> - / / Finally , check if the $ each is made of objects ( as opposed <nl> - / / to basic types ) . Currently , $ sort only supports operating <nl> - / / on arrays of objects . <nl> - BSONObj eachArray = pushObj . firstElement ( ) . embeddedObject ( ) ; <nl> - BSONObjIterator k ( eachArray ) ; <nl> - while ( k . more ( ) ) { <nl> - BSONElement eachItem = k . next ( ) ; <nl> - uassert ( 16642 , <nl> - " $ sort requires $ each to be an array of objects " , <nl> - eachItem . type ( ) = = Object ) ; <nl> - } <nl> - <nl> - } <nl> - else { <nl> - uasserted ( 16643 , <nl> - " $ each term takes only $ slice ( and optionally " <nl> - " $ sort ) as complements " ) ; <nl> - } <nl> - } <nl> - <nl> - uassert ( 16644 , " cannot have a $ sort without a $ slice " , seenSlice ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - if ( op = = Mod : : RENAME_TO ) { <nl> - uassert ( 13494 , " $ rename target must be a string " , f . type ( ) = = String ) ; <nl> - const char * target = f . valuestr ( ) ; <nl> - uassert ( 13495 , <nl> - " $ rename source must differ from target " , <nl> - strcmp ( fieldName , target ) ! = 0 ) ; <nl> - uassert ( 13496 , <nl> - " invalid mod field name , source may not be empty " , <nl> - fieldName [ 0 ] ) ; <nl> - uassert ( 13479 , <nl> - " invalid mod field name , target may not be empty " , <nl> - target [ 0 ] ) ; <nl> - uassert ( 13480 , <nl> - " invalid mod field name , source may not begin or end in period " , <nl> - fieldName [ 0 ] ! = ' . ' & & fieldName [ strlen ( fieldName ) - 1 ] ! = ' . ' ) ; <nl> - uassert ( 13481 , <nl> - " invalid mod field name , target may not begin or end in period " , <nl> - target [ 0 ] ! = ' . ' & & target [ strlen ( target ) - 1 ] ! = ' . ' ) ; <nl> - uassert ( 13482 , <nl> - " $ rename affecting _id not allowed " , <nl> - ! ( fieldName [ 0 ] = = ' _ ' & & fieldName [ 1 ] = = ' i ' & & fieldName [ 2 ] = = ' d ' <nl> - & & ( ! fieldName [ 3 ] | | fieldName [ 3 ] = = ' . ' ) ) ) ; <nl> - uassert ( 13483 , <nl> - " $ rename affecting _id not allowed " , <nl> - ! ( target [ 0 ] = = ' _ ' & & target [ 1 ] = = ' i ' & & target [ 2 ] = = ' d ' <nl> - & & ( ! target [ 3 ] | | target [ 3 ] = = ' . ' ) ) ) ; <nl> - uassert ( 13484 , <nl> - " field name duplication not allowed with $ rename target " , <nl> - ! haveModForField ( target ) ) ; <nl> - uassert ( 13485 , <nl> - " conflicting mods not allowed with $ rename target " , <nl> - ! haveConflictingMod ( target ) ) ; <nl> - uassert ( 13486 , <nl> - " $ rename target may not be a parent of source " , <nl> - ! ( strncmp ( fieldName , target , strlen ( target ) ) = = 0 <nl> - & & fieldName [ strlen ( target ) ] = = ' . ' ) ) ; <nl> - uassert ( 13487 , <nl> - " $ rename source may not be dynamic array " , <nl> - strstr ( fieldName , " . $ " ) = = 0 ) ; <nl> - uassert ( 13488 , <nl> - " $ rename target may not be dynamic array " , <nl> - strstr ( target , " . $ " ) = = 0 ) ; <nl> - <nl> - Mod from ; <nl> - from . init ( Mod : : RENAME_FROM , f , forReplication ) ; <nl> - from . setFieldName ( fieldName ) ; <nl> - setIndexedStatus ( from , idxKeys ) ; <nl> - _mods [ from . fieldName ] = from ; <nl> - <nl> - Mod to ; <nl> - to . init ( Mod : : RENAME_TO , f , forReplication ) ; <nl> - to . setFieldName ( target ) ; <nl> - setIndexedStatus ( to , idxKeys ) ; <nl> - _mods [ to . fieldName ] = to ; <nl> - <nl> - DEBUGUPDATE ( " \ t \ t " < < fieldName < < " \ t " < < from . fieldName < < " \ t " < < to . fieldName ) ; <nl> - continue ; <nl> - } <nl> - <nl> - _hasDynamicArray = _hasDynamicArray | | strstr ( fieldName , " . $ " ) > 0 ; <nl> - <nl> - Mod m ; <nl> - m . init ( op , f , forReplication ) ; <nl> - m . setFieldName ( f . fieldName ( ) ) ; <nl> - setIndexedStatus ( m , idxKeys ) ; <nl> - _mods [ m . fieldName ] = m ; <nl> - <nl> - DEBUGUPDATE ( " \ t \ t " < < fieldName < < " \ t " < < m . fieldName < < " \ t " < < _hasDynamicArray ) ; <nl> - } <nl> - } <nl> - <nl> - } <nl> - <nl> - ModSet * ModSet : : fixDynamicArray ( const string & elemMatchKey ) const { <nl> - ModSet * n = new ModSet ( ) ; <nl> - n - > _numIndexMaybeUpdated = _numIndexMaybeUpdated ; <nl> - n - > _numIndexAlwaysUpdated = _numIndexAlwaysUpdated ; <nl> - n - > _hasDynamicArray = _hasDynamicArray ; <nl> - for ( ModHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; i + + ) { <nl> - string s = i - > first ; <nl> - size_t idx = s . find ( " . $ " ) ; <nl> - if ( idx = = string : : npos ) { <nl> - n - > _mods [ s ] = i - > second ; <nl> - continue ; <nl> - } <nl> - StringBuilder buf ; <nl> - buf < < s . substr ( 0 , idx + 1 ) < < elemMatchKey < < s . substr ( idx + 2 ) ; <nl> - string fixed = buf . str ( ) ; <nl> - DEBUGUPDATE ( " fixed dynamic : " < < s < < " - - > > " < < fixed ) ; <nl> - n - > _mods [ fixed ] = i - > second ; <nl> - ModHolder : : iterator temp = n - > _mods . find ( fixed ) ; <nl> - temp - > second . setFieldName ( temp - > first . c_str ( ) ) ; <nl> - } <nl> - return n ; <nl> - } <nl> - <nl> - void ModSet : : setIndexedStatus ( const IndexPathSet & idxKeys ) { <nl> - for ( ModHolder : : iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; + + i ) <nl> - setIndexedStatus ( i - > second , idxKeys ) ; <nl> - } <nl> - <nl> - void ModSet : : setIndexedStatus ( Mod & m , const IndexPathSet & idxKeys ) { <nl> - if ( idxKeys . mightBeIndexed ( m . fieldName ) ) { <nl> - m . isIndexed = true ; <nl> - <nl> - / / Some mods may be no - ops depending on the document they are applied <nl> - / / on . Determining how many indices will actually be used can only be <nl> - / / determined for sure after looking at that target document . <nl> - switch ( m . op ) { <nl> - <nl> - case Mod : : SET_ON_INSERT : <nl> - case Mod : : RENAME_FROM : <nl> - case Mod : : RENAME_TO : <nl> - _numIndexMaybeUpdated + + ; <nl> - break ; <nl> - <nl> - default : <nl> - _numIndexAlwaysUpdated + + ; <nl> - <nl> - } <nl> - } <nl> - else { <nl> - m . isIndexed = false ; <nl> - } <nl> - } <nl> - <nl> - <nl> - } / / namespace mongo <nl> deleted file mode 100644 <nl> index 620f5d4889c4 . . 000000000000 <nl> mmm a / src / mongo / db / ops / update_internal . h <nl> ppp / dev / null <nl> <nl> - / / @ file update_internal . h <nl> - <nl> - / * * <nl> - * Copyright ( C ) 2012 10gen Inc . <nl> - * <nl> - * This program is free software : you can redistribute it and / or modify <nl> - * it under the terms of the GNU Affero General Public License , version 3 , <nl> - * as published by the Free Software Foundation . <nl> - * <nl> - * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> - * GNU Affero General Public License for more details . <nl> - * <nl> - * You should have received a copy of the GNU Affero General Public License <nl> - * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> - * / <nl> - <nl> - # include " mongo / db / index_set . h " <nl> - # include " mongo / db / jsobj . h " <nl> - # include " mongo / db / jsobjmanipulator . h " <nl> - # include " mongo / db / matcher . h " <nl> - # include " mongo / util / embedded_builder . h " <nl> - # include " mongo / util / mongoutils / str . h " <nl> - # include " mongo / util / stringutils . h " <nl> - <nl> - using namespace mongoutils ; <nl> - <nl> - namespace mongo { <nl> - <nl> - class ModState ; <nl> - class ModSetState ; <nl> - <nl> - / * Used for modifiers such as $ inc , $ set , $ push , . . . <nl> - * stores the info about a single operation <nl> - * once created should never be modified <nl> - * / <nl> - struct Mod { <nl> - / / See opFromStr below <nl> - / / 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 <nl> - enum Op { INC , SET , PUSH , PUSH_ALL , PULL , PULL_ALL , POP , UNSET , BITAND , BITOR , BIT , ADDTOSET , RENAME_FROM , RENAME_TO , SET_ON_INSERT } op ; <nl> - <nl> - static const char * modNames [ ] ; <nl> - static unsigned modNamesNum ; <nl> - <nl> - const char * fieldName ; <nl> - const char * shortFieldName ; <nl> - <nl> - / / Determines if this mod must absoluetly be applied . In some replication scenarios , a <nl> - / / failed apply of a mod does not constitute an error . In those cases , setting strict <nl> - / / to off would not throw errors . <nl> - bool strictApply ; <nl> - <nl> - / / Determines if an index is going to be updated as part of the application of this <nl> - / / mod . <nl> - bool isIndexed ; <nl> - <nl> - BSONElement elt ; / / x : 5 note : this is the actual element from the updateobj <nl> - boost : : shared_ptr < Matcher > matcher ; <nl> - bool matcherOnPrimitive ; <nl> - <nl> - void init ( Op o , BSONElement & e , bool forReplication ) { <nl> - op = o ; <nl> - elt = e ; <nl> - strictApply = ! forReplication ; <nl> - isIndexed = false ; <nl> - if ( op = = PULL & & e . type ( ) = = Object ) { <nl> - BSONObj t = e . embeddedObject ( ) ; <nl> - if ( t . firstElement ( ) . getGtLtOp ( ) = = 0 ) { <nl> - matcher . reset ( new Matcher ( t ) ) ; <nl> - matcherOnPrimitive = false ; <nl> - } <nl> - else { <nl> - matcher . reset ( new Matcher ( BSON ( " " < < t ) ) ) ; <nl> - matcherOnPrimitive = true ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - void setFieldName ( const char * s ) { <nl> - fieldName = s ; <nl> - shortFieldName = strrchr ( fieldName , ' . ' ) ; <nl> - if ( shortFieldName ) <nl> - shortFieldName + + ; <nl> - else <nl> - shortFieldName = fieldName ; <nl> - } <nl> - <nl> - / * * <nl> - * @ param in increments the actual value inside in <nl> - * / <nl> - void incrementMe ( BSONElement & in ) const { <nl> - BSONElementManipulator manip ( in ) ; <nl> - switch ( in . type ( ) ) { <nl> - case NumberDouble : <nl> - manip . setNumber ( elt . numberDouble ( ) + in . numberDouble ( ) ) ; <nl> - break ; <nl> - case NumberLong : <nl> - manip . setLong ( elt . numberLong ( ) + in . numberLong ( ) ) ; <nl> - break ; <nl> - case NumberInt : <nl> - manip . setInt ( elt . numberInt ( ) + in . numberInt ( ) ) ; <nl> - break ; <nl> - default : <nl> - verify ( 0 ) ; <nl> - } <nl> - } <nl> - void IncrementMe ( BSONElement & in ) const { <nl> - BSONElementManipulator manip ( in ) ; <nl> - switch ( in . type ( ) ) { <nl> - case NumberDouble : <nl> - manip . SetNumber ( elt . numberDouble ( ) + in . numberDouble ( ) ) ; <nl> - break ; <nl> - case NumberLong : <nl> - manip . SetLong ( elt . numberLong ( ) + in . numberLong ( ) ) ; <nl> - break ; <nl> - case NumberInt : <nl> - manip . SetInt ( elt . numberInt ( ) + in . numberInt ( ) ) ; <nl> - break ; <nl> - default : <nl> - verify ( 0 ) ; <nl> - } <nl> - } <nl> - <nl> - void appendIncremented ( BSONBuilderBase & bb , const BSONElement & in , ModState & ms ) const ; <nl> - <nl> - bool operator < ( const Mod & other ) const { <nl> - return strcmp ( fieldName , other . fieldName ) < 0 ; <nl> - } <nl> - <nl> - bool arrayDep ( ) const { <nl> - switch ( op ) { <nl> - case PUSH : <nl> - case PUSH_ALL : <nl> - case POP : <nl> - return true ; <nl> - default : <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - void apply ( BSONBuilderBase & b , BSONElement in , ModState & ms ) const ; <nl> - <nl> - / * * <nl> - * @ return true iff toMatch should be removed from the array <nl> - * / <nl> - bool _pullElementMatch ( BSONElement & toMatch ) const ; <nl> - <nl> - void _checkForAppending ( const BSONElement & e ) const { <nl> - if ( e . type ( ) = = Object ) { <nl> - / / this is a tiny bit slow , but rare and important <nl> - / / only when setting something TO an object , not setting something in an object <nl> - / / and it checks for { $ set : { x : { ' a . b ' : 1 } } } <nl> - / / which is feel has been common <nl> - uassert ( 12527 , " not okForStorage " , e . embeddedObject ( ) . okForStorage ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - bool isEach ( ) const { <nl> - if ( elt . type ( ) ! = Object ) <nl> - return false ; <nl> - BSONElement e = elt . embeddedObject ( ) . firstElement ( ) ; <nl> - if ( e . type ( ) ! = Array ) <nl> - return false ; <nl> - return strcmp ( e . fieldName ( ) , " $ each " ) = = 0 ; <nl> - } <nl> - <nl> - BSONObj getEach ( ) const { <nl> - return elt . embeddedObjectUserCheck ( ) . firstElement ( ) . embeddedObjectUserCheck ( ) ; <nl> - } <nl> - <nl> - void parseEach ( BSONElementSet & s ) const { <nl> - BSONObjIterator i ( getEach ( ) ) ; <nl> - while ( i . more ( ) ) { <nl> - s . insert ( i . next ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - bool isSliceOnly ( ) const { <nl> - if ( elt . type ( ) ! = Object ) <nl> - return false ; <nl> - BSONObj obj = elt . embeddedObject ( ) ; <nl> - if ( obj . nFields ( ) ! = 2 ) <nl> - return false ; <nl> - BSONObjIterator i ( obj ) ; <nl> - i . next ( ) ; <nl> - BSONElement elemSlice = i . next ( ) ; <nl> - return strcmp ( elemSlice . fieldName ( ) , " $ slice " ) = = 0 ; <nl> - } <nl> - <nl> - long long getSlice ( ) const { <nl> - / / The $ slice may be the second or the third element in the field object . <nl> - / / { < field name > : { $ each : [ < each array > ] , $ slice : - N , $ sort : < pattern > } } <nl> - / / ' elt ' here is the BSONElement above . <nl> - BSONObj obj = elt . embeddedObject ( ) ; <nl> - BSONObjIterator i ( obj ) ; <nl> - i . next ( ) ; <nl> - BSONElement elem = i . next ( ) ; <nl> - if ( ! str : : equals ( elem . fieldName ( ) , " $ slice " ) ) { <nl> - elem = i . next ( ) ; <nl> - } <nl> - dassert ( elem . isNumber ( ) ) ; <nl> - <nl> - / / For now , we ' re only supporting slicing from the back of the array , i . e . <nl> - / / negative slice . But the code now is wired in the opposite way : trimming from the <nl> - / / back of the array is positive . <nl> - / / TODO : fix this . <nl> - return - elem . numberLong ( ) ; <nl> - } <nl> - <nl> - bool isSliceAndSort ( ) const { <nl> - if ( elt . type ( ) ! = Object ) <nl> - return false ; <nl> - BSONObj obj = elt . embeddedObject ( ) ; <nl> - if ( obj . nFields ( ) ! = 3 ) <nl> - return false ; <nl> - BSONObjIterator i ( obj ) ; <nl> - i . next ( ) ; <nl> - <nl> - / / Slice and sort may be switched . <nl> - bool seenSlice = false ; <nl> - bool seenSort = false ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement elem = i . next ( ) ; <nl> - if ( str : : equals ( elem . fieldName ( ) , " $ slice " ) ) { <nl> - if ( seenSlice ) return false ; <nl> - seenSlice = true ; <nl> - } <nl> - else if ( str : : equals ( elem . fieldName ( ) , " $ sort " ) ) { <nl> - if ( seenSort ) return false ; <nl> - seenSort = true ; <nl> - if ( elem . type ( ) ! = Object ) return false ; <nl> - } <nl> - else { <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - / / If present , the $ sort element would have been checked during ModSet construction . <nl> - return seenSlice & & seenSort ; <nl> - } <nl> - <nl> - BSONObj getSort ( ) const { <nl> - / / The $ sort may be the second or the third element in the field object . <nl> - / / { < field name > : { $ each : [ < each array > ] , $ slice : - N , $ sort : < pattern > } } <nl> - / / ' elt ' here is the BSONElement above . <nl> - BSONObj obj = elt . embeddedObject ( ) ; <nl> - BSONObjIterator i ( obj ) ; <nl> - i . next ( ) ; <nl> - BSONElement elem = i . next ( ) ; <nl> - if ( ! str : : equals ( elem . fieldName ( ) , " $ sort " ) ) { <nl> - elem = i . next ( ) ; <nl> - } <nl> - return elem . embeddedObject ( ) ; <nl> - } <nl> - <nl> - const char * renameFrom ( ) const { <nl> - massert ( 13492 , " mod must be RENAME_TO type " , op = = Mod : : RENAME_TO ) ; <nl> - return elt . fieldName ( ) ; <nl> - } <nl> - } ; <nl> - <nl> - / * * <nl> - * stores a set of Mods <nl> - * once created , should never be changed <nl> - * / <nl> - class ModSet : boost : : noncopyable { <nl> - typedef map < string , Mod > ModHolder ; <nl> - ModHolder _mods ; <nl> - int _numIndexMaybeUpdated ; <nl> - int _numIndexAlwaysUpdated ; <nl> - bool _hasDynamicArray ; <nl> - <nl> - static Mod : : Op opFromStr ( const char * fn ) { <nl> - verify ( fn [ 0 ] = = ' $ ' ) ; <nl> - switch ( fn [ 1 ] ) { <nl> - case ' i ' : { <nl> - if ( fn [ 2 ] = = ' n ' & & fn [ 3 ] = = ' c ' & & fn [ 4 ] = = 0 ) <nl> - return Mod : : INC ; <nl> - break ; <nl> - } <nl> - case ' s ' : { <nl> - if ( fn [ 2 ] = = ' e ' & & fn [ 3 ] = = ' t ' ) { <nl> - if ( fn [ 4 ] = = 0 ) { <nl> - return Mod : : SET ; <nl> - } <nl> - else if ( fn [ 4 ] = = ' O ' & & fn [ 5 ] = = ' n ' & & fn [ 6 ] = = ' I ' & & fn [ 7 ] = = ' n ' & & <nl> - fn [ 8 ] = = ' s ' & & fn [ 9 ] = = ' e ' & & fn [ 10 ] = = ' r ' & & fn [ 11 ] = = ' t ' & & <nl> - fn [ 12 ] = = 0 ) { <nl> - return Mod : : SET_ON_INSERT ; <nl> - } <nl> - } <nl> - break ; <nl> - } <nl> - case ' p ' : { <nl> - if ( fn [ 2 ] = = ' u ' ) { <nl> - if ( fn [ 3 ] = = ' s ' & & fn [ 4 ] = = ' h ' ) { <nl> - if ( fn [ 5 ] = = 0 ) <nl> - return Mod : : PUSH ; <nl> - if ( fn [ 5 ] = = ' A ' & & fn [ 6 ] = = ' l ' & & fn [ 7 ] = = ' l ' & & fn [ 8 ] = = 0 ) <nl> - return Mod : : PUSH_ALL ; <nl> - } <nl> - else if ( fn [ 3 ] = = ' l ' & & fn [ 4 ] = = ' l ' ) { <nl> - if ( fn [ 5 ] = = 0 ) <nl> - return Mod : : PULL ; <nl> - if ( fn [ 5 ] = = ' A ' & & fn [ 6 ] = = ' l ' & & fn [ 7 ] = = ' l ' & & fn [ 8 ] = = 0 ) <nl> - return Mod : : PULL_ALL ; <nl> - } <nl> - } <nl> - else if ( fn [ 2 ] = = ' o ' & & fn [ 3 ] = = ' p ' & & fn [ 4 ] = = 0 ) <nl> - return Mod : : POP ; <nl> - break ; <nl> - } <nl> - case ' u ' : { <nl> - if ( fn [ 2 ] = = ' n ' & & fn [ 3 ] = = ' s ' & & fn [ 4 ] = = ' e ' & & fn [ 5 ] = = ' t ' & & fn [ 6 ] = = 0 ) <nl> - return Mod : : UNSET ; <nl> - break ; <nl> - } <nl> - case ' b ' : { <nl> - if ( fn [ 2 ] = = ' i ' & & fn [ 3 ] = = ' t ' ) { <nl> - if ( fn [ 4 ] = = 0 ) <nl> - return Mod : : BIT ; <nl> - if ( fn [ 4 ] = = ' a ' & & fn [ 5 ] = = ' n ' & & fn [ 6 ] = = ' d ' & & fn [ 7 ] = = 0 ) <nl> - return Mod : : BITAND ; <nl> - if ( fn [ 4 ] = = ' o ' & & fn [ 5 ] = = ' r ' & & fn [ 6 ] = = 0 ) <nl> - return Mod : : BITOR ; <nl> - } <nl> - break ; <nl> - } <nl> - case ' a ' : { <nl> - if ( fn [ 2 ] = = ' d ' & & fn [ 3 ] = = ' d ' ) { <nl> - / / add <nl> - if ( fn [ 4 ] = = ' T ' & & fn [ 5 ] = = ' o ' & & fn [ 6 ] = = ' S ' & & fn [ 7 ] = = ' e ' & & fn [ 8 ] = = ' t ' & & fn [ 9 ] = = 0 ) <nl> - return Mod : : ADDTOSET ; <nl> - <nl> - } <nl> - break ; <nl> - } <nl> - case ' r ' : { <nl> - if ( fn [ 2 ] = = ' e ' & & fn [ 3 ] = = ' n ' & & fn [ 4 ] = = ' a ' & & fn [ 5 ] = = ' m ' & & fn [ 6 ] = = ' e ' ) { <nl> - return Mod : : RENAME_TO ; / / with this return code we handle both RENAME_TO and RENAME_FROM <nl> - } <nl> - break ; <nl> - } <nl> - default : break ; <nl> - } <nl> - uassert ( 10161 , " Invalid modifier specified " + string ( fn ) , false ) ; <nl> - return Mod : : INC ; <nl> - } <nl> - <nl> - ModSet ( ) { } <nl> - <nl> - / * * <nl> - * if if applying this mod would require updating an index , set such condition in ' m ' , <nl> - * and update the number of indices touched in ' this ' ModSet . <nl> - * / <nl> - void setIndexedStatus ( Mod & m , const IndexPathSet & idxKeys ) ; <nl> - <nl> - public : <nl> - <nl> - ModSet ( const BSONObj & from , <nl> - const IndexPathSet & idxKeys = IndexPathSet ( ) , <nl> - bool forReplication = false ) ; <nl> - <nl> - / * * <nl> - * re - check if this mod is impacted by indexes <nl> - * / <nl> - void setIndexedStatus ( const IndexPathSet & idxKeys ) ; <nl> - <nl> - / / TODO : this is inefficient - should probably just handle when iterating <nl> - ModSet * fixDynamicArray ( const string & elemMatchKey ) const ; <nl> - <nl> - bool hasDynamicArray ( ) const { return _hasDynamicArray ; } <nl> - <nl> - / * * <nl> - * creates a ModSetState suitable for operation on obj <nl> - * doesn ' t change or modify this ModSet or any underlying Mod <nl> - * <nl> - * flag ' insertion ' differentiates between obj existing prior to this update . <nl> - * / <nl> - auto_ptr < ModSetState > prepare ( const BSONObj & obj , bool insertion = false ) const ; <nl> - <nl> - / * * <nl> - * given a query pattern , builds an object suitable for an upsert <nl> - * will take the query spec and combine all $ operators <nl> - * / <nl> - BSONObj createNewFromQuery ( const BSONObj & query ) ; <nl> - <nl> - int maxNumIndexUpdated ( ) const { return _numIndexMaybeUpdated + _numIndexAlwaysUpdated ; } <nl> - <nl> - unsigned size ( ) const { return _mods . size ( ) ; } <nl> - <nl> - bool haveModForField ( const char * fieldName ) const { <nl> - return _mods . find ( fieldName ) ! = _mods . end ( ) ; <nl> - } <nl> - <nl> - bool haveConflictingMod ( const string & fieldName ) { <nl> - size_t idx = fieldName . find ( ' . ' ) ; <nl> - if ( idx = = string : : npos ) <nl> - idx = fieldName . size ( ) ; <nl> - <nl> - ModHolder : : const_iterator start = _mods . lower_bound ( fieldName . substr ( 0 , idx ) ) ; <nl> - for ( ; start ! = _mods . end ( ) ; start + + ) { <nl> - FieldCompareResult r = compareDottedFieldNames ( fieldName , start - > first , <nl> - LexNumCmp ( true ) ) ; <nl> - switch ( r ) { <nl> - case LEFT_SUBFIELD : return true ; <nl> - case LEFT_BEFORE : return false ; <nl> - case SAME : return true ; <nl> - case RIGHT_BEFORE : return false ; <nl> - case RIGHT_SUBFIELD : return true ; <nl> - } <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - } ; <nl> - <nl> - / * * <nl> - * Comparator between two BSONObjects that takes in consideration only the keys and <nl> - * direction described in the sort pattern . <nl> - * / <nl> - struct ProjectKeyCmp { <nl> - BSONObj sortPattern ; <nl> - <nl> - ProjectKeyCmp ( BSONObj pattern ) : sortPattern ( pattern ) { } <nl> - <nl> - int operator ( ) ( const BSONObj & left , const BSONObj & right ) const { <nl> - BSONObj keyLeft = left . extractFields ( sortPattern , true ) ; <nl> - BSONObj keyRight = right . extractFields ( sortPattern , true ) ; <nl> - return keyLeft . woCompare ( keyRight , sortPattern ) < 0 ; <nl> - } <nl> - } ; <nl> - <nl> - / * * <nl> - * stores any information about a single Mod operating on a single Object <nl> - * / <nl> - class ModState : boost : : noncopyable { <nl> - public : <nl> - const Mod * m ; <nl> - BSONElement old ; <nl> - BSONElement newVal ; <nl> - BSONObj _objData ; <nl> - <nl> - const char * fixedOpName ; <nl> - BSONElement * fixed ; <nl> - BSONArray fixedArray ; <nl> - bool forceEmptyArray ; <nl> - bool forcePositional ; <nl> - int position ; <nl> - int DEPRECATED_pushStartSize ; <nl> - <nl> - BSONType incType ; <nl> - int incint ; <nl> - double incdouble ; <nl> - long long inclong ; <nl> - <nl> - bool dontApply ; <nl> - <nl> - ModState ( ) { <nl> - fixedOpName = 0 ; <nl> - fixed = 0 ; <nl> - forceEmptyArray = false ; <nl> - forcePositional = false ; <nl> - position = 0 ; <nl> - DEPRECATED_pushStartSize = - 1 ; <nl> - incType = EOO ; <nl> - dontApply = false ; <nl> - } <nl> - <nl> - Mod : : Op op ( ) const { <nl> - return m - > op ; <nl> - } <nl> - <nl> - const char * fieldName ( ) const { <nl> - return m - > fieldName ; <nl> - } <nl> - <nl> - bool DEPRECATED_needOpLogRewrite ( ) const { <nl> - if ( dontApply ) <nl> - return false ; <nl> - <nl> - if ( fixed | | fixedOpName | | incType ) <nl> - return true ; <nl> - <nl> - switch ( op ( ) ) { <nl> - case Mod : : RENAME_FROM : <nl> - case Mod : : RENAME_TO : <nl> - return true ; <nl> - case Mod : : BIT : <nl> - case Mod : : BITAND : <nl> - case Mod : : BITOR : <nl> - return true ; <nl> - default : <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - const char * getOpLogName ( ) const ; <nl> - void appendForOpLog ( BSONObjBuilder & b ) const ; <nl> - <nl> - void apply ( BSONBuilderBase & b , BSONElement in ) { <nl> - m - > apply ( b , in , * this ) ; <nl> - } <nl> - <nl> - void appendIncValue ( BSONBuilderBase & b , bool useFullName ) const { <nl> - const char * n = useFullName ? m - > fieldName : m - > shortFieldName ; <nl> - <nl> - switch ( incType ) { <nl> - case NumberDouble : <nl> - b . append ( n , incdouble ) ; break ; <nl> - case NumberLong : <nl> - b . append ( n , inclong ) ; break ; <nl> - case NumberInt : <nl> - b . append ( n , incint ) ; break ; <nl> - default : <nl> - verify ( 0 ) ; <nl> - } <nl> - } <nl> - <nl> - string toString ( ) const ; <nl> - <nl> - void handleRename ( BSONBuilderBase & newObjBuilder , const char * shortFieldName ) ; <nl> - } ; <nl> - <nl> - / * * <nl> - * this is used to hold state , meta data while applying a ModSet to a BSONObj <nl> - * the goal is to make ModSet const so its re - usable <nl> - * / <nl> - class ModSetState : boost : : noncopyable { <nl> - typedef map < string , shared_ptr < ModState > , LexNumCmp > ModStateHolder ; <nl> - typedef pair < const ModStateHolder : : iterator , const ModStateHolder : : iterator > ModStateRange ; <nl> - const BSONObj & _obj ; <nl> - ModStateHolder _mods ; <nl> - bool _inPlacePossible ; <nl> - BSONObj _newFromMods ; / / keep this data alive , as oplog generation may depend on it <nl> - int _numIndexAlwaysUpdated ; <nl> - int _numIndexMaybeUpdated ; <nl> - <nl> - ModSetState ( const BSONObj & obj , int numIndexAlwaysUpdated , int numIndexMaybeUpdated ) <nl> - : _obj ( obj ) <nl> - , _mods ( LexNumCmp ( true ) ) <nl> - , _inPlacePossible ( true ) <nl> - , _numIndexAlwaysUpdated ( numIndexAlwaysUpdated ) <nl> - , _numIndexMaybeUpdated ( numIndexMaybeUpdated ) { <nl> - } <nl> - <nl> - / * * <nl> - * @ return if in place is still possible <nl> - * / <nl> - bool amIInPlacePossible ( bool inPlacePossible ) { <nl> - if ( ! inPlacePossible ) <nl> - _inPlacePossible = false ; <nl> - return _inPlacePossible ; <nl> - } <nl> - <nl> - ModStateRange modsForRoot ( const string & root ) ; <nl> - <nl> - void createNewObjFromMods ( const string & root , BSONObjBuilder & b , const BSONObj & obj ) ; <nl> - void createNewArrayFromMods ( const string & root , BSONArrayBuilder & b , <nl> - const BSONArray & arr ) ; <nl> - <nl> - void createNewFromMods ( const string & root , BSONBuilderBase & b , BSONIteratorSorted & es , <nl> - const ModStateRange & modRange , const LexNumCmp & lexNumCmp ) ; <nl> - <nl> - void _appendNewFromMods ( const string & root , ModState & m , BSONBuilderBase & b , set < string > & onedownseen ) ; <nl> - <nl> - void appendNewFromMod ( ModState & ms , BSONBuilderBase & b ) { <nl> - if ( ms . dontApply ) { <nl> - return ; <nl> - } <nl> - <nl> - / / const Mod & m = * ( ms . m ) ; / / HACK <nl> - Mod & m = * ( ( Mod * ) ( ms . m ) ) ; / / HACK <nl> - <nl> - switch ( m . op ) { <nl> - <nl> - case Mod : : PUSH : { <nl> - ms . fixedOpName = " $ set " ; <nl> - if ( m . isEach ( ) ) { <nl> - BSONObj arr = m . getEach ( ) ; <nl> - if ( ! m . isSliceOnly ( ) & & ! m . isSliceAndSort ( ) ) { <nl> - b . appendArray ( m . shortFieldName , arr ) ; <nl> - <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arr . getOwned ( ) ) ; <nl> - } <nl> - else if ( m . isSliceOnly ( ) & & ( m . getSlice ( ) > = arr . nFields ( ) ) ) { <nl> - b . appendArray ( m . shortFieldName , arr ) ; <nl> - <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arr . getOwned ( ) ) ; <nl> - } <nl> - else if ( m . isSliceOnly ( ) ) { <nl> - BSONArrayBuilder arrBuilder ( b . subarrayStart ( m . shortFieldName ) ) ; <nl> - long long skip = arr . nFields ( ) - m . getSlice ( ) ; <nl> - BSONObjIterator j ( arr ) ; <nl> - while ( j . more ( ) ) { <nl> - if ( skip - - > 0 ) { <nl> - j . next ( ) ; <nl> - continue ; <nl> - } <nl> - arrBuilder . append ( j . next ( ) ) ; <nl> - } <nl> - <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arrBuilder . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - else if ( m . isSliceAndSort ( ) ) { <nl> - long long slice = m . getSlice ( ) ; <nl> - <nl> - / / Sort the $ each array over sortPattern . <nl> - vector < BSONObj > workArea ; <nl> - BSONObjIterator j ( arr ) ; <nl> - while ( j . more ( ) ) { <nl> - workArea . push_back ( j . next ( ) . Obj ( ) ) ; <nl> - } <nl> - ProjectKeyCmp cmp ( m . getSort ( ) ) ; <nl> - sort ( workArea . begin ( ) , workArea . end ( ) , cmp ) ; <nl> - <nl> - / / Slice to the appropriate size . If slice is zero , that ' s equivalent <nl> - / / to resetting the array , ie , a no - op . <nl> - BSONArrayBuilder arrBuilder ( b . subarrayStart ( m . shortFieldName ) ) ; <nl> - if ( slice > 0 ) { <nl> - long long skip = std : : max ( 0LL , <nl> - ( long long ) workArea . size ( ) - slice ) ; <nl> - for ( vector < BSONObj > : : iterator it = workArea . begin ( ) ; <nl> - it ! = workArea . end ( ) ; <nl> - + + it ) { <nl> - if ( skip - - > 0 ) { <nl> - continue ; <nl> - } <nl> - arrBuilder . append ( * it ) ; <nl> - } <nl> - } <nl> - <nl> - / / Log the full resulting array . <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arrBuilder . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - } <nl> - else { <nl> - BSONObjBuilder arr ( b . subarrayStart ( m . shortFieldName ) ) ; <nl> - arr . appendAs ( m . elt , " 0 " ) ; <nl> - <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arr . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - break ; <nl> - } <nl> - <nl> - case Mod : : ADDTOSET : { <nl> - ms . fixedOpName = " $ set " ; <nl> - if ( m . isEach ( ) ) { <nl> - / / Remove any duplicates in given array <nl> - BSONArrayBuilder arr ( b . subarrayStart ( m . shortFieldName ) ) ; <nl> - BSONElementSet toadd ; <nl> - m . parseEach ( toadd ) ; <nl> - BSONObjIterator i ( m . getEach ( ) ) ; <nl> - / / int n = 0 ; <nl> - while ( i . more ( ) ) { <nl> - BSONElement e = i . next ( ) ; <nl> - if ( toadd . count ( e ) ) { <nl> - arr . append ( e ) ; <nl> - toadd . erase ( e ) ; <nl> - } <nl> - } <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arr . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - else { <nl> - BSONArrayBuilder arr ( b . subarrayStart ( m . shortFieldName ) ) ; <nl> - arr . append ( m . elt ) ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( arr . done ( ) . getOwned ( ) ) ; <nl> - } <nl> - break ; <nl> - } <nl> - <nl> - case Mod : : PUSH_ALL : { <nl> - b . appendAs ( m . elt , m . shortFieldName ) ; <nl> - ms . fixedOpName = " $ set " ; <nl> - ms . forceEmptyArray = true ; <nl> - ms . fixedArray = BSONArray ( m . elt . Obj ( ) ) ; <nl> - break ; <nl> - } <nl> - <nl> - case Mod : : POP : <nl> - case Mod : : PULL : <nl> - case Mod : : PULL_ALL : <nl> - case Mod : : UNSET : <nl> - / / No - op b / c unset / pull of nothing does nothing . Still , explicilty log that <nl> - / / the target array was reset . <nl> - ms . fixedOpName = " $ unset " ; <nl> - break ; <nl> - <nl> - case Mod : : INC : <nl> - case Mod : : SET_ON_INSERT : <nl> - ms . fixedOpName = " $ set " ; <nl> - case Mod : : SET : { <nl> - m . _checkForAppending ( m . elt ) ; <nl> - b . appendAs ( m . elt , m . shortFieldName ) ; <nl> - break ; <nl> - } <nl> - <nl> - / / shouldn ' t see RENAME_FROM here <nl> - case Mod : : RENAME_TO : <nl> - ms . handleRename ( b , m . shortFieldName ) ; <nl> - break ; <nl> - <nl> - default : <nl> - stringstream ss ; <nl> - ss < < " unknown mod in appendNewFromMod : " < < m . op ; <nl> - throw UserException ( 9015 , ss . str ( ) ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - / * * @ return true iff the elements aren ' t eoo ( ) , are distinct , and share a field name . * / <nl> - static bool duplicateFieldName ( const BSONElement & a , const BSONElement & b ) ; <nl> - <nl> - public : <nl> - <nl> - bool canApplyInPlace ( ) const { <nl> - return _inPlacePossible ; <nl> - } <nl> - <nl> - bool isUpdateIndexed ( ) const { <nl> - if ( _numIndexAlwaysUpdated ! = 0 ) { <nl> - return true ; <nl> - } <nl> - <nl> - return isUpdateIndexedSlow ( ) ; <nl> - } <nl> - <nl> - bool isUpdateIndexedSlow ( ) const ; <nl> - <nl> - / * * <nl> - * modified underlying _obj <nl> - * @ param isOnDisk - true means this is an on disk object , and this update needs to be made durable <nl> - * / <nl> - void applyModsInPlace ( bool isOnDisk ) ; <nl> - <nl> - BSONObj createNewFromMods ( ) ; <nl> - <nl> - / / re - writing for oplog <nl> - <nl> - bool DEPRECATED_needOpLogRewrite ( ) const { <nl> - for ( ModStateHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; i + + ) <nl> - if ( i - > second - > DEPRECATED_needOpLogRewrite ( ) ) <nl> - return true ; <nl> - return false ; <nl> - } <nl> - <nl> - BSONObj getOpLogRewrite ( ) const ; <nl> - <nl> - bool DEPRECATED_haveArrayDepMod ( ) const { <nl> - for ( ModStateHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; i + + ) <nl> - if ( i - > second - > m - > arrayDep ( ) ) <nl> - return true ; <nl> - return false ; <nl> - } <nl> - <nl> - void DEPRECATED_appendSizeSpecForArrayDepMods ( BSONObjBuilder & b ) const { <nl> - for ( ModStateHolder : : const_iterator i = _mods . begin ( ) ; i ! = _mods . end ( ) ; i + + ) { <nl> - const ModState & m = * i - > second ; <nl> - if ( m . m - > arrayDep ( ) ) { <nl> - if ( m . DEPRECATED_pushStartSize = = - 1 ) <nl> - b . appendNull ( m . fieldName ( ) ) ; <nl> - else <nl> - b < < m . fieldName ( ) < < BSON ( " $ size " < < m . DEPRECATED_pushStartSize ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - string toString ( ) const ; <nl> - <nl> - friend class ModSet ; <nl> - } ; <nl> - <nl> - } / / namespace mongo <nl> mmm a / src / mongo / db / repl / rs_rollback . cpp <nl> ppp b / src / mongo / db / repl / rs_rollback . cpp <nl> namespace mongo { <nl> / / todo faster . . . <nl> OpDebug debug ; <nl> updates + + ; <nl> - if ( isNewUpdateFrameworkEnabled ( ) ) { <nl> - <nl> - _updateObjectsNEW ( / * god * / true , <nl> - d . ns , <nl> - i - > second , <nl> - pattern , <nl> - / * upsert = * / true , <nl> - / * multi = * / false , <nl> - / * logtheop = * / false , <nl> - debug , <nl> - rs . get ( ) ) ; <nl> - <nl> - } <nl> - else { <nl> - <nl> - _updateObjects ( / * god * / true , <nl> - d . ns , <nl> - i - > second , <nl> - pattern , <nl> - / * upsert = * / true , <nl> - / * multi = * / false , <nl> - / * logtheop = * / false , <nl> - debug , <nl> - rs . get ( ) ) ; <nl> - <nl> - } <nl> + _updateObjects ( / * god * / true , <nl> + d . ns , <nl> + i - > second , <nl> + pattern , <nl> + / * upsert = * / true , <nl> + / * multi = * / false , <nl> + / * logtheop = * / false , <nl> + debug , <nl> + rs . get ( ) ) ; <nl> } <nl> } <nl> catch ( DBException & e ) { <nl> mmm a / src / mongo / dbtests / framework . cpp <nl> ppp b / src / mongo / dbtests / framework . cpp <nl> namespace mongo { <nl> ( " bigfiles " , " use big datafiles instead of smallfiles which is the default " ) <nl> ( " filter , f " , po : : value < string > ( ) , " string substring filter on test name " ) <nl> ( " verbose , v " , " verbose " ) <nl> - ( " testNewUpdateFramework " , " test the new update framework " ) <nl> ( " useNewQueryFramework " , " use the new query framework " ) <nl> ( " dur " , " enable journaling ( currently the default ) " ) <nl> ( " nodur " , " disable journaling " ) <nl> namespace mongo { <nl> return EXIT_CLEAN ; <nl> } <nl> <nl> - if ( params . count ( " testNewUpdateFramework " ) & & ! mongo : : isNewUpdateFrameworkEnabled ( ) ) { <nl> - mongo : : toggleNewUpdateFrameworkEnabled ( ) ; <nl> - } <nl> - <nl> if ( params . count ( " useNewQueryFramework " ) ) { <nl> mongo : : enableNewQueryFramework ( ) ; <nl> } <nl> mmm a / src / mongo / dbtests / repltests . cpp <nl> ppp b / src / mongo / dbtests / repltests . cpp <nl> namespace ReplTests { <nl> add < Idempotence : : EmptyPush > ( ) ; <nl> add < Idempotence : : EmptyPushSparseIndex > ( ) ; <nl> add < Idempotence : : PushAll > ( ) ; <nl> - <nl> - / / The new update framework does not allow field names with a leading ' $ ' to be <nl> - / / pushed . <nl> - if ( ! isNewUpdateFrameworkEnabled ( ) ) <nl> - add < Idempotence : : PushWithDollarSigns > ( ) ; <nl> - <nl> add < Idempotence : : PushSlice > ( ) ; <nl> add < Idempotence : : PushSliceInitiallyInexistent > ( ) ; <nl> add < Idempotence : : PushSliceToZero > ( ) ; <nl> namespace ReplTests { <nl> add < Idempotence : : SingletonNoRename > ( ) ; <nl> add < Idempotence : : IndexedSingletonNoRename > ( ) ; <nl> add < Idempotence : : AddToSetEmptyMissing > ( ) ; <nl> - <nl> - / / The new update framework does not allow field names with a leading ' $ ' to be <nl> - / / added to a set . <nl> - if ( ! isNewUpdateFrameworkEnabled ( ) ) <nl> - add < Idempotence : : AddToSetWithDollarSigns > ( ) ; <nl> - <nl> add < Idempotence : : ReplaySetPreexistingNoOpPull > ( ) ; <nl> add < Idempotence : : ReplayArrayFieldNotAppended > ( ) ; <nl> add < DeleteOpIsIdBased > ( ) ; <nl> mmm a / src / mongo / dbtests / updatetests . cpp <nl> ppp b / src / mongo / dbtests / updatetests . cpp <nl> <nl> # include " mongo / db / json . h " <nl> # include " mongo / db / lasterror . h " <nl> # include " mongo / db / ops / update . h " <nl> - # include " mongo / db / ops / update_internal . h " <nl> # include " mongo / dbtests / dbtests . h " <nl> <nl> namespace UpdateTests { <nl> namespace UpdateTests { <nl> } <nl> } ; <nl> <nl> + namespace { <nl> + <nl> + / * * <nl> + * Comparator between two BSONObjects that takes in consideration only the keys and <nl> + * direction described in the sort pattern . <nl> + * <nl> + * TODO : This was pulled from update_internal . h , we should verify that these tests work <nl> + * with the new update framework $ push sorter . <nl> + * / <nl> + struct ProjectKeyCmp { <nl> + BSONObj sortPattern ; <nl> + <nl> + ProjectKeyCmp ( BSONObj pattern ) : sortPattern ( pattern ) { } <nl> + <nl> + int operator ( ) ( const BSONObj & left , const BSONObj & right ) const { <nl> + BSONObj keyLeft = left . extractFields ( sortPattern , true ) ; <nl> + BSONObj keyRight = right . extractFields ( sortPattern , true ) ; <nl> + return keyLeft . woCompare ( keyRight , sortPattern ) < 0 ; <nl> + } <nl> + } ; <nl> + <nl> + } / / namespace <nl> + <nl> class PushSortSortMixed { <nl> public : <nl> void run ( ) { <nl> namespace UpdateTests { <nl> } <nl> } ; <nl> <nl> - class IndexModSet : public SetBase { <nl> - public : <nl> - void run ( ) { <nl> - client ( ) . ensureIndex ( ns ( ) , BSON ( " a . b " < < 1 ) ) ; <nl> - client ( ) . insert ( ns ( ) , fromjson ( " { ' _id ' : 0 , a : { b : 3 } } " ) ) ; <nl> - client ( ) . update ( ns ( ) , Query ( ) , fromjson ( " { $ set : { ' a . b ' : 4 } } " ) ) ; <nl> - ASSERT_EQUALS ( fromjson ( " { ' _id ' : 0 , a : { b : 4 } } " ) , client ( ) . findOne ( ns ( ) , Query ( ) ) ) ; <nl> - ASSERT_EQUALS ( fromjson ( " { ' _id ' : 0 , a : { b : 4 } } " ) , client ( ) . findOne ( ns ( ) , fromjson ( " { ' a . b ' : 4 } " ) ) ) ; / / make sure the index works <nl> - } <nl> - } ; <nl> - <nl> - <nl> class PreserveIdWithIndex : public SetBase { / / Not using $ set , but base class is still useful <nl> public : <nl> void run ( ) { <nl> namespace UpdateTests { <nl> mutablebson : : unordered ( client ( ) . findOne ( ns ( ) , BSONObj ( ) ) ) ) ; <nl> } <nl> } ; <nl> - <nl> - namespace ModSetTests { <nl> - <nl> - class internal1 { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj b = BSON ( " $ inc " < < BSON ( " x " < < 1 < < " a . b " < < 1 ) ) ; <nl> - ModSet m ( b ) ; <nl> - <nl> - ASSERT ( m . haveModForField ( " x " ) ) ; <nl> - ASSERT ( m . haveModForField ( " a . b " ) ) ; <nl> - ASSERT ( ! m . haveModForField ( " y " ) ) ; <nl> - ASSERT ( ! m . haveModForField ( " a . c " ) ) ; <nl> - ASSERT ( ! m . haveModForField ( " a " ) ) ; <nl> - <nl> - ASSERT ( m . haveConflictingMod ( " x " ) ) ; <nl> - ASSERT ( m . haveConflictingMod ( " a " ) ) ; <nl> - ASSERT ( m . haveConflictingMod ( " a . b " ) ) ; <nl> - ASSERT ( ! m . haveConflictingMod ( " a . bc " ) ) ; <nl> - ASSERT ( ! m . haveConflictingMod ( " a . c " ) ) ; <nl> - ASSERT ( ! m . haveConflictingMod ( " a . a " ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class Base { <nl> - public : <nl> - <nl> - virtual ~ Base ( ) { } <nl> - <nl> - <nl> - void test ( BSONObj morig , BSONObj in , BSONObj wanted ) { <nl> - BSONObj m = morig . copy ( ) ; <nl> - ModSet set ( m ) ; <nl> - <nl> - BSONObj out = set . prepare ( in ) - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( wanted , out ) ; <nl> - } <nl> - } ; <nl> - <nl> - class inc1 : public Base { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj m = BSON ( " $ inc " < < BSON ( " x " < < 1 ) ) ; <nl> - test ( m , BSON ( " x " < < 5 ) , BSON ( " x " < < 6 ) ) ; <nl> - test ( m , BSON ( " a " < < 5 ) , BSON ( " a " < < 5 < < " x " < < 1 ) ) ; <nl> - test ( m , BSON ( " z " < < 5 ) , BSON ( " x " < < 1 < < " z " < < 5 ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class inc2 : public Base { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj m = BSON ( " $ inc " < < BSON ( " a . b " < < 1 ) ) ; <nl> - test ( m , BSONObj ( ) , BSON ( " a " < < BSON ( " b " < < 1 ) ) ) ; <nl> - test ( m , BSON ( " a " < < BSON ( " b " < < 2 ) ) , BSON ( " a " < < BSON ( " b " < < 3 ) ) ) ; <nl> - <nl> - m = BSON ( " $ inc " < < BSON ( " a . b " < < 1 < < " a . c " < < 1 ) ) ; <nl> - test ( m , BSONObj ( ) , BSON ( " a " < < BSON ( " b " < < 1 < < " c " < < 1 ) ) ) ; <nl> - <nl> - <nl> - } <nl> - } ; <nl> - <nl> - class set1 : public Base { <nl> - public : <nl> - void run ( ) { <nl> - test ( BSON ( " $ set " < < BSON ( " x " < < 17 ) ) , BSONObj ( ) , BSON ( " x " < < 17 ) ) ; <nl> - test ( BSON ( " $ set " < < BSON ( " x " < < 17 ) ) , BSON ( " x " < < 5 ) , BSON ( " x " < < 17 ) ) ; <nl> - <nl> - test ( BSON ( " $ set " < < BSON ( " x . a " < < 17 ) ) , BSON ( " z " < < 5 ) , BSON ( " x " < < BSON ( " a " < < 17 ) < < " z " < < 5 ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class push1 : public Base { <nl> - public : <nl> - void run ( ) { <nl> - test ( BSON ( " $ push " < < BSON ( " a " < < 5 ) ) , fromjson ( " { a : [ 1 ] } " ) , fromjson ( " { a : [ 1 , 5 ] } " ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - <nl> - class IncRewriteInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 2 ) ; <nl> - BSONObj mod = BSON ( " $ inc " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < 3 ) ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / Check if not applying in place changes anything . <nl> - class InRewriteForceNotInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 2 ) ; <nl> - BSONObj mod = BSON ( " $ inc " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < 3 ) ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class IncRewriteNestedArray { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ inc " < < BSON ( " a . 0 " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a . 0 " < < 3 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class IncRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 2 ) ; <nl> - BSONObj mod = BSON ( " $ inc " < < BSON ( " a " < < 1 ) < < " $ set " < < BSON ( " b " < < 2 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < 3 < < " b " < < 2 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class IncRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " c " < < 1 ) ; <nl> - BSONObj mod = BSON ( " $ inc " < < BSON ( " a " < < 1 ) < < " $ set " < < BSON ( " b " < < 2 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < 1 < < " b " < < 2 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / A no - op $ setOnInsert would not interfere with in - placeness and won ' t log . <nl> - class SetOnInsertRewriteInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 2 ) ; <nl> - BSONObj mod = BSON ( " $ setOnInsert " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSONObj ( ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / A no - op $ setOnInsert that was forced not in - place doesn ' t log . <nl> - class SetOnInsertRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 2 ) ; <nl> - BSONObj mod = BSON ( " $ setOnInsert " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - / / force not in place <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSONObj ( ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / Push is never applied in place <nl> - class PushRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " a " < < 3 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a . 2 " < < 3 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushSliceRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - / / { $ push : { a : { $ each : [ 3 ] , $ slice : - 2 } } } <nl> - BSONObj pushObj = BSON ( " $ each " < < BSON_ARRAY ( 3 ) < < " $ slice " < < - 2 ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " a " < < pushObj ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 2 < < 3 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushSortRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " x " < < BSON_ARRAY ( BSON ( " a " < < 1 ) < < <nl> - BSON ( " a " < < 2 ) ) ) ; <nl> - / / { $ push : { a : { $ each : [ { a : 3 } ] , $ slice : - 2 , $ sort : { a : 1 } } } } <nl> - BSONObj pushObj = BSON ( " $ each " < < BSON_ARRAY ( BSON ( " a " < < 3 ) ) < < <nl> - " $ slice " < < - 2 < < <nl> - " $ sort " < < BSON ( " a " < < 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " x " < < pushObj ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " x " < < <nl> - BSON_ARRAY ( BSON ( " a " < < 2 ) < < <nl> - BSON ( " a " < < 3 ) ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 1 ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushSliceRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 1 ) ; <nl> - / / { $ push : { a : { $ each : [ 1 , 2 ] , $ slice : - 2 } } } <nl> - BSONObj pushObj = BSON ( " $ each " < < BSON_ARRAY ( 1 < < 2 ) < < " $ slice " < < - 2 ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " a " < < pushObj ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushSliceRewriteNested { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = fromjson ( " { a : { b : [ 1 , 2 ] } } " ) ; <nl> - BSONObj mod = fromjson ( " { $ push : { ' a . b ' : { $ each : [ 3 ] , $ slice : - 2 } } } " ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a . b " < < BSON_ARRAY ( 2 < < 3 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushSortRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 1 ) ; <nl> - / / { $ push : { x : { $ each : [ { a : 1 } , { a : 2 } ] , $ slice : - 2 , $ sort : { a : 1 } } } } <nl> - BSONObj pushObj = BSON ( " $ each " < < BSON_ARRAY ( BSON ( " a " < < 1 ) < < <nl> - BSON ( " a " < < 2 ) ) < < <nl> - " $ slice " < < - 2 < < <nl> - " $ sort " < < BSON ( " a " < < 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ push " < < BSON ( " x " < < pushObj ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " x " < < BSON_ARRAY ( BSON ( " a " < < 1 ) < < <nl> - BSON ( " a " < < 2 ) ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushAllRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj modAll = BSON ( " $ pushAll " < < BSON ( " a " < < BSON_ARRAY ( 3 < < 4 < < 5 ) ) ) ; <nl> - ModSet modSetAll ( modAll ) ; <nl> - auto_ptr < ModSetState > modSetStateAll = modSetAll . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetStateAll - > canApplyInPlace ( ) ) ; <nl> - modSetStateAll - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 < < 3 < < 4 < < 5 ) ) ) , <nl> - modSetStateAll - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PushAllRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 1 ) ; <nl> - BSONObj modAll = BSON ( " $ pushAll " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 < < 3 ) ) ) ; <nl> - ModSet modSetAll ( modAll ) ; <nl> - auto_ptr < ModSetState > modSetStateAll = modSetAll . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetStateAll - > canApplyInPlace ( ) ) ; <nl> - modSetStateAll - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 < < 3 ) ) ) , <nl> - modSetStateAll - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / Pull is only in place if it ' s a no - op . <nl> - class PullRewriteInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj modMatcher = BSON ( " $ pull " < < BSON ( " a " < < BSON ( " $ gt " < < 3 ) ) ) ; <nl> - ModSet modSetMatcher ( modMatcher ) ; <nl> - auto_ptr < ModSetState > modSetStateMatcher = modSetMatcher . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetStateMatcher - > canApplyInPlace ( ) ) ; <nl> - modSetStateMatcher - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ) , <nl> - modSetStateMatcher - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PullRewriteForceNotInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj modMatcher = BSON ( " $ pull " < < BSON ( " a " < < BSON ( " $ gt " < < 3 ) ) ) ; <nl> - ModSet modSetMatcher ( modMatcher ) ; <nl> - auto_ptr < ModSetState > modSetStateMatcher = modSetMatcher . prepare ( obj ) ; <nl> - modSetStateMatcher - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ) , <nl> - modSetStateMatcher - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PullRewriteNonExistingUnsets { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj ; <nl> - BSONObj modMatcher = BSON ( " $ pull " < < BSON ( " a " < < BSON ( " $ gt " < < 3 ) ) ) ; <nl> - ModSet modSetMatcher ( modMatcher ) ; <nl> - auto_ptr < ModSetState > modSetStateMatcher = modSetMatcher . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetStateMatcher - > canApplyInPlace ( ) ) ; <nl> - modSetStateMatcher - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ unset " < < BSON ( " a " < < 1 ) ) , <nl> - modSetStateMatcher - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PullRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ pull " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 2 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PullRewriteLastExistingField { <nl> - public : <nl> - void run ( ) { <nl> - / / check last pull corner case <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ pull " < < BSON ( " a " < < 2 ) ) ; <nl> - ModSet modSetLast ( mod ) ; <nl> - auto_ptr < ModSetState > modSetStateLast = modSetLast . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetStateLast - > canApplyInPlace ( ) ) ; <nl> - modSetStateLast - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSONArray ( ) ) ) , <nl> - modSetStateLast - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PullRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 1 ) ; <nl> - BSONObj mod = BSON ( " $ pull " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ unset " < < BSON ( " a " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class TwoNestedPulls { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = fromjson ( " { a : { b : [ 1 , 2 ] , c : [ 1 , 2 ] } } " ) ; <nl> - BSONObj mod = fromjson ( " { $ pull : { ' a . b ' : 2 , ' a . c ' : 2 } } " ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( fromjson ( " { $ set : { ' a . b ' : [ 1 ] , ' a . c ' : [ 1 ] } } " ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / Pop is only applied in place if the target array remains the same size ( i . e . if <nl> - / / it is empty already . <nl> - class PopRewriteEmptyArray { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSONArray ( ) ) ; <nl> - BSONObj mod = BSON ( " $ pop " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSONArray ( ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PopRewriteLastElement { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ pop " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSONArray ( ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PopRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ pop " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PopRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ pop " < < BSON ( " b " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ unset " < < BSON ( " b " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / AddToSet is in place if it is a no - op . <nl> - class AddToSetRewriteInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ addToSet " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class AddToSetRewriteForceNotInPlace { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 < < 2 ) ) ; <nl> - BSONObj mod = BSON ( " $ addToSet " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a . 0 " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class AddToSetRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ addToSet " < < BSON ( " a " < < 2 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a . 1 " < < 2 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class AddToSetRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < BSON_ARRAY ( 1 ) ) ; <nl> - BSONObj mod = BSON ( " $ addToSet " < < BSON ( " b " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " b " < < BSON_ARRAY ( 1 ) ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / Rename doesn ' t log if both fields are not present . <nl> - class RenameRewriteBothNonExistent { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 1 ) ; <nl> - BSONObj mod = BSON ( " $ rename " < < BSON ( " b " < < " c " ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSONObj ( ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class RenameRewriteExistingToField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " b " < < 100 ) ; <nl> - BSONObj mod = BSON ( " $ rename " < < BSON ( " a " < < " b " ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > applyModsInPlace ( false ) ; <nl> - ASSERT_EQUALS ( BSONObj ( ) , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class RenameRewriteExistingFromField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 100 ) ; <nl> - BSONObj mod = BSON ( " $ rename " < < BSON ( " a " < < " b " ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " b " < < 100 ) < < " $ unset " < < BSON ( " a " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class RenameRewriteBothExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 100 < < " b " < < 200 ) ; <nl> - BSONObj mod = BSON ( " $ rename " < < BSON ( " a " < < " b " ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " b " < < 100 ) < < " $ unset " < < BSON ( " a " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - / / $ bit is never applied in place currently <nl> - class BitRewriteExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 0 ) ; <nl> - BSONObj mod = BSON ( " $ bit " < < BSON ( " a " < < BSON ( " or " < < 1 ) ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " a " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class BitRewriteNonExistingField { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 0 ) ; <nl> - BSONObj mod = BSON ( " $ bit " < < BSON ( " b " < < BSON ( " or " < < 1 ) ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " b " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class SetIsNotRewritten { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 0 ) ; <nl> - BSONObj mod = BSON ( " $ set " < < BSON ( " b " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ set " < < BSON ( " b " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class UnsetIsNotRewritten { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " a " < < 0 ) ; <nl> - BSONObj mod = BSON ( " $ unset " < < BSON ( " a " < < 1 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_FALSE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - modSetState - > createNewFromMods ( ) ; <nl> - ASSERT_EQUALS ( BSON ( " $ unset " < < BSON ( " a " < < 1 ) ) , <nl> - modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class MultiSets { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj obj = BSON ( " _id " < < 1 < < " a " < < 1 < < " b " < < 1 ) ; <nl> - BSONObj mod = BSON ( " $ set " < < BSON ( " a " < < 2 < < " b " < < 2 ) ) ; <nl> - ModSet modSet ( mod ) ; <nl> - auto_ptr < ModSetState > modSetState = modSet . prepare ( obj ) ; <nl> - ASSERT_TRUE ( modSetState - > canApplyInPlace ( ) ) ; <nl> - ASSERT_EQUALS ( mod , modSetState - > getOpLogRewrite ( ) ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PositionalWithoutElemMatchKey { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSONObj ( ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " a . $ " < < 1 ) ) ; <nl> - ModSet modSet ( modSpec ) ; <nl> - <nl> - / / A positional operator must be replaced with an array index before calling <nl> - / / prepare ( ) . <nl> - ASSERT_THROWS ( modSet . prepare ( querySpec ) , UserException ) ; <nl> - } <nl> - } ; <nl> - <nl> - class PositionalWithoutNestedElemMatchKey { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSONObj ( ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " a . b . c . $ . e . f " < < 1 ) ) ; <nl> - ModSet modSet ( modSpec ) ; <nl> - <nl> - / / A positional operator must be replaced with an array index before calling <nl> - / / prepare ( ) . <nl> - ASSERT_THROWS ( modSet . prepare ( querySpec ) , UserException ) ; <nl> - } <nl> - } ; <nl> - <nl> - class DbrefPassesPositionalValidation { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSONObj ( ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " a . $ ref " < < " foo " < < " a . $ id " < < 0 ) ) ; <nl> - ModSet modSet ( modSpec ) ; <nl> - <nl> - / / A positional operator must be replaced with an array index before calling <nl> - / / prepare ( ) , but $ prefixed fields encoding dbrefs are allowed . <nl> - modSet . prepare ( querySpec ) ; / / Does not throw . <nl> - } <nl> - } ; <nl> - <nl> - class NoPositionalValidationOnReplication { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSONObj ( ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " a . $ " < < 1 ) ) ; <nl> - ModSet modSet ( modSpec , IndexPathSet ( ) , true ) ; <nl> - <nl> - / / No positional operator validation is performed if a ModSet is ' forReplication ' . <nl> - modSet . prepare ( querySpec ) ; / / Does not throw . <nl> - } <nl> - } ; <nl> - <nl> - class NoPositionalValidationOnPartialFixedArrayReplication { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSONObj ( BSON ( " a . b " < < 1 ) ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " a . $ . b . $ " < < 1 ) ) ; <nl> - ModSet modSet ( modSpec , IndexPathSet ( ) , true ) ; <nl> - <nl> - / / Attempt to fix the positional operator fields . <nl> - scoped_ptr < ModSet > fixedMods ( modSet . fixDynamicArray ( " 0 " ) ) ; <nl> - <nl> - / / The first positional field is replaced , but the second is not ( until SERVER - 831 <nl> - / / is implemented ) . <nl> - ASSERT ( fixedMods - > haveModForField ( " a . 0 . b . $ " ) ) ; <nl> - <nl> - / / No positional operator validation is performed if a ModSet is ' forReplication ' , <nl> - / / even after an attempt to fix the positional operator fields . <nl> - fixedMods - > prepare ( querySpec ) ; / / Does not throw . <nl> - } <nl> - } ; <nl> - <nl> - class CreateNewFromQueryExcludeNot { <nl> - public : <nl> - void run ( ) { <nl> - BSONObj querySpec = BSON ( " a " < < BSON ( " $ not " < < BSON ( " $ lt " < < 1 ) ) ) ; <nl> - BSONObj modSpec = BSON ( " $ set " < < BSON ( " b " < < 1 ) ) ; <nl> - ModSet modSet ( modSpec ) ; <nl> - <nl> - / / Because a $ not operator is applied to the ' a ' field , the ' a ' field is excluded <nl> - / / from the resulting document . <nl> - ASSERT_EQUALS ( BSON ( " b " < < 1 ) , modSet . createNewFromQuery ( querySpec ) ) ; <nl> - } <nl> - } ; <nl> - } ; <nl> <nl> namespace basic { <nl> class Base : public ClientBase { <nl> namespace UpdateTests { <nl> add < DontDropEmpty > ( ) ; <nl> add < InsertInEmpty > ( ) ; <nl> add < IndexParentOfMod > ( ) ; <nl> - add < IndexModSet > ( ) ; <nl> add < PreserveIdWithIndex > ( ) ; <nl> add < CheckNoMods > ( ) ; <nl> add < UpdateMissingToNull > ( ) ; <nl> add < TwoModsWithinDuplicatedField > ( ) ; <nl> add < ThreeModsWithinDuplicatedField > ( ) ; <nl> add < TwoModsBeforeExistingField > ( ) ; <nl> - <nl> - add < ModSetTests : : internal1 > ( ) ; <nl> - add < ModSetTests : : inc1 > ( ) ; <nl> - add < ModSetTests : : inc2 > ( ) ; <nl> - add < ModSetTests : : set1 > ( ) ; <nl> - add < ModSetTests : : push1 > ( ) ; <nl> - <nl> - add < ModSetTests : : IncRewriteInPlace > ( ) ; <nl> - add < ModSetTests : : InRewriteForceNotInPlace > ( ) ; <nl> - add < ModSetTests : : IncRewriteNestedArray > ( ) ; <nl> - add < ModSetTests : : IncRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : IncRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : SetOnInsertRewriteInPlace > ( ) ; <nl> - add < ModSetTests : : SetOnInsertRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PushRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PushSliceRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PushSortRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PushRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : PushSliceRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : PushSliceRewriteNested > ( ) ; <nl> - add < ModSetTests : : PushSortRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : PushAllRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PushAllRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : PullRewriteInPlace > ( ) ; <nl> - add < ModSetTests : : PullRewriteForceNotInPlace > ( ) ; <nl> - add < ModSetTests : : PullRewriteNonExistingUnsets > ( ) ; <nl> - add < ModSetTests : : PullRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PullRewriteLastExistingField > ( ) ; <nl> - add < ModSetTests : : PullRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : TwoNestedPulls > ( ) ; <nl> - add < ModSetTests : : PopRewriteEmptyArray > ( ) ; <nl> - add < ModSetTests : : PopRewriteLastElement > ( ) ; <nl> - add < ModSetTests : : PopRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : PopRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : AddToSetRewriteInPlace > ( ) ; <nl> - add < ModSetTests : : AddToSetRewriteForceNotInPlace > ( ) ; <nl> - add < ModSetTests : : AddToSetRewriteExistingField > ( ) ; <nl> - add < ModSetTests : : AddToSetRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : RenameRewriteBothNonExistent > ( ) ; <nl> - add < ModSetTests : : RenameRewriteExistingToField > ( ) ; <nl> - add < ModSetTests : : RenameRewriteExistingFromField > ( ) ; <nl> - add < ModSetTests : : RenameRewriteBothExistingField > ( ) ; <nl> - add < ModSetTests : : BitRewriteExistingField > ( ) ; <nl> - / / XXX $ bit over non - existing field is missing . Probably out of scope to fix it here . <nl> - / / add < ModSetTests : : BitRewriteNonExistingField > ( ) ; <nl> - add < ModSetTests : : SetIsNotRewritten > ( ) ; <nl> - add < ModSetTests : : UnsetIsNotRewritten > ( ) ; <nl> - add < ModSetTests : : MultiSets > ( ) ; <nl> - add < ModSetTests : : PositionalWithoutElemMatchKey > ( ) ; <nl> - add < ModSetTests : : PositionalWithoutNestedElemMatchKey > ( ) ; <nl> - add < ModSetTests : : DbrefPassesPositionalValidation > ( ) ; <nl> - add < ModSetTests : : NoPositionalValidationOnReplication > ( ) ; <nl> - add < ModSetTests : : NoPositionalValidationOnPartialFixedArrayReplication > ( ) ; <nl> - add < ModSetTests : : CreateNewFromQueryExcludeNot > ( ) ; <nl> - <nl> add < basic : : inc1 > ( ) ; <nl> add < basic : : inc2 > ( ) ; <nl> add < basic : : inc3 > ( ) ; <nl> | SERVER - 10497 Remove legacy update framework | mongodb/mongo | dda99507af1eef8368d5d26a226d4d94f50d5a30 | 2013-08-13T22:02:42Z |
mmm a / tensorflow / lite / micro / examples / micro_speech / CMSIS / Makefile . inc <nl> ppp b / tensorflow / lite / micro / examples / micro_speech / CMSIS / Makefile . inc <nl> ifneq ( $ ( filter CMSIS , $ ( ALL_TAGS ) ) , ) <nl> tensorflow / lite / micro / examples / micro_speech / CMSIS / hanning . h \ <nl> tensorflow / lite / micro / examples / micro_speech / CMSIS / sin_1k . h \ <nl> third_party / CMSIS_ext / README . md \ <nl> - third_party / CMSIS_ext / arm_cmplx_mag_squared_q10p6 . h <nl> <nl> PREPROCESSOR_TEST_SRCS + = $ ( CMSIS_PREPROCESSOR_SRCS ) <nl> PREPROCESSOR_TEST_HDRS + = $ ( CMSIS_PREPROCESSOR_HDRS ) <nl> ifneq ( $ ( filter CMSIS , $ ( ALL_TAGS ) ) , ) <nl> MICRO_SPEECH_HDRS + = $ ( CMSIS_PREPROCESSOR_HDRS ) <nl> <nl> THIRD_PARTY_CC_SRCS + = \ <nl> - $ ( MAKEFILE_DIR ) / downloads / CMSIS_ext / arm_cmplx_mag_squared_q10p6 . c \ <nl> $ ( MAKEFILE_DIR ) / downloads / cmsis / CMSIS / DSP / Source / BasicMathFunctions / arm_mult_q15 . c \ <nl> $ ( MAKEFILE_DIR ) / downloads / cmsis / CMSIS / DSP / Source / TransformFunctions / arm_bitreversal . c \ <nl> $ ( MAKEFILE_DIR ) / downloads / cmsis / CMSIS / DSP / Source / TransformFunctions / arm_rfft_init_q15 . c \ <nl> | Removed non - existant ` arm_cmplx_mag_squared_q10p6 . c ` from ` micro_speech ` example Make | tensorflow/tensorflow | 95a7f08f1eacebb6d436dde8ab3d03e29c5a9536 | 2020-02-07T09:24:33Z |
mmm a / jstests / core / getmore_invalidation . js <nl> ppp b / jstests / core / getmore_invalidation . js <nl> <nl> <nl> var count ; <nl> var cursor ; <nl> + var nextDoc ; <nl> var x ; <nl> var y ; <nl> <nl> <nl> / / of the geo near search . Just make sure that we can exhaust the cursor without crashing . <nl> assert . gte ( cursor . itcount ( ) , 0 ) ; <nl> <nl> + / / Case # 8 : 2d near with mutation invalidation . <nl> + t . drop ( ) ; <nl> + t . ensureIndex ( { geo : " 2d " } ) ; <nl> + for ( x = - 1 ; x < 1 ; x + + ) { <nl> + for ( y = - 1 ; y < 1 ; y + + ) { <nl> + assert . writeOK ( t . insert ( { geo : [ x , y ] } ) ) ; <nl> + } <nl> + } <nl> + <nl> + cursor = t . find ( { geo : { $ near : [ 0 , 0 ] , $ maxDistance : 5 } } ) . batchSize ( 2 ) ; <nl> + cursor . next ( ) ; <nl> + cursor . next ( ) ; <nl> + <nl> + / / Update all documents in the collection to have position [ 15 , 15 ] . <nl> + assert . writeOK ( t . update ( { } , { $ set : { geo : [ 15 , 15 ] } } , false , true ) ) ; <nl> + <nl> + / / The old version of the document should be returned ( the update should not be reflected in the <nl> + / / results of the near search ) . <nl> + nextDoc = cursor . next ( ) ; <nl> + printjson ( nextDoc ) ; <nl> + assert . neq ( [ 15 , 15 ] , nextDoc . geo ) ; <nl> + assert ( nextDoc . geo [ 0 ] = = = 0 | | nextDoc . geo [ 1 ] = = = 0 ) ; <nl> + <nl> + / / Case # 9 : 2dsphere near with mutation invalidation . <nl> + t . drop ( ) ; <nl> + t . ensureIndex ( { geo : " 2dsphere " } ) ; <nl> + for ( x = - 1 ; x < 1 ; x + + ) { <nl> + for ( y = - 1 ; y < 1 ; y + + ) { <nl> + assert . writeOK ( t . insert ( { geo : [ x , y ] } ) ) ; <nl> + } <nl> + } <nl> + <nl> + cursor = t . find ( { geo : { $ nearSphere : [ 0 , 0 ] , $ maxDistance : 5 } } ) . batchSize ( 2 ) ; <nl> + cursor . next ( ) ; <nl> + cursor . next ( ) ; <nl> + <nl> + / / Update all documents in the collection to have position [ 15 , 15 ] . <nl> + assert . writeOK ( t . update ( { } , { $ set : { geo : [ 15 , 15 ] } } , false , true ) ) ; <nl> + <nl> + / / The old version of the document should be returned ( the update should not be reflected in the <nl> + / / results of the near search ) . <nl> + nextDoc = cursor . next ( ) ; <nl> + printjson ( nextDoc ) ; <nl> + assert . neq ( [ 15 , 15 ] , nextDoc . geo ) ; <nl> + assert ( nextDoc . geo [ 0 ] = = = 0 | | nextDoc . geo [ 1 ] = = = 0 ) ; <nl> + <nl> } ) ( ) ; <nl> mmm a / src / mongo / db / catalog / collection . cpp <nl> ppp b / src / mongo / db / catalog / collection . cpp <nl> namespace mongo { <nl> } <nl> } <nl> <nl> - / / Broadcast the mutation so that query results stay correct . <nl> - _cursorManager . invalidateDocument ( txn , oldLocation , INVALIDATION_MUTATION ) ; <nl> invariant ( txnId = = txn - > recoveryUnit ( ) - > getMyTransactionCount ( ) ) ; <nl> return newLocation ; <nl> } <nl> namespace mongo { <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> + Status Collection : : recordStoreGoingToUpdateInPlace ( OperationContext * txn , <nl> + const RecordId & loc ) { <nl> + / / Broadcast the mutation so that query results stay correct . <nl> + _cursorManager . invalidateDocument ( txn , loc , INVALIDATION_MUTATION ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> <nl> Status Collection : : updateDocumentWithDamages ( OperationContext * txn , <nl> const RecordId & loc , <nl> mmm a / src / mongo / db / catalog / collection . h <nl> ppp b / src / mongo / db / catalog / collection . h <nl> namespace mongo { <nl> * this is NOT safe through a yield right now <nl> * not sure if it will be , or what yet <nl> * / <nl> - class Collection : CappedDocumentDeleteCallback , UpdateMoveNotifier { <nl> + class Collection : CappedDocumentDeleteCallback , UpdateNotifier { <nl> public : <nl> Collection ( OperationContext * txn , <nl> const StringData & fullNS , <nl> namespace mongo { <nl> const char * oldBuffer , <nl> size_t oldSize ) ; <nl> <nl> + Status recordStoreGoingToUpdateInPlace ( OperationContext * txn , <nl> + const RecordId & loc ) ; <nl> + <nl> Status aboutToDeleteCapped ( OperationContext * txn , const RecordId & loc ) ; <nl> <nl> / * * <nl> mmm a / src / mongo / db / storage / devnull / devnull_kv_engine . cpp <nl> ppp b / src / mongo / db / storage / devnull / devnull_kv_engine . cpp <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> return StatusWith < RecordId > ( oldLocation ) ; <nl> } <nl> <nl> mmm a / src / mongo / db / storage / in_memory / in_memory_record_store . cpp <nl> ppp b / src / mongo / db / storage / in_memory / in_memory_record_store . cpp <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> InMemoryRecord * oldRecord = recordFor ( loc ) ; <nl> int oldLen = oldRecord - > size ; <nl> <nl> namespace mongo { <nl> 10003 ) ; <nl> } <nl> <nl> + if ( notifier ) { <nl> + / / The in - memory KV engine uses the invalidation framework ( does not support <nl> + / / doc - locking ) , and therefore must notify that it is updating a document . <nl> + Status callbackStatus = notifier - > recordStoreGoingToUpdateInPlace ( txn , loc ) ; <nl> + if ( ! callbackStatus . isOK ( ) ) { <nl> + return StatusWith < RecordId > ( callbackStatus ) ; <nl> + } <nl> + } <nl> + <nl> InMemoryRecord newRecord ( len ) ; <nl> memcpy ( newRecord . data . get ( ) , data , len ) ; <nl> <nl> mmm a / src / mongo / db / storage / in_memory / in_memory_record_store . h <nl> ppp b / src / mongo / db / storage / in_memory / in_memory_record_store . h <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) ; <nl> + UpdateNotifier * notifier ) ; <nl> <nl> virtual bool updateWithDamagesSupported ( ) const ; <nl> <nl> mmm a / src / mongo / db / storage / mmap_v1 / heap_record_store_btree . h <nl> ppp b / src / mongo / db / storage / mmap_v1 / heap_record_store_btree . h <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> invariant ( false ) ; <nl> } <nl> <nl> mmm a / src / mongo / db / storage / mmap_v1 / record_store_v1_base . cpp <nl> ppp b / src / mongo / db / storage / mmap_v1 / record_store_v1_base . cpp <nl> namespace mongo { <nl> const char * data , <nl> int dataSize , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> Record * oldRecord = recordFor ( DiskLoc : : fromRecordId ( oldLocation ) ) ; <nl> if ( oldRecord - > netLength ( ) > = dataSize ) { <nl> + / / Make sure to notify other queries before we do an in - place update . <nl> + if ( notifier ) { <nl> + Status callbackStatus = notifier - > recordStoreGoingToUpdateInPlace ( txn , <nl> + oldLocation ) ; <nl> + if ( ! callbackStatus . isOK ( ) ) <nl> + return StatusWith < RecordId > ( callbackStatus ) ; <nl> + } <nl> + <nl> / / we fit <nl> memcpy ( txn - > recoveryUnit ( ) - > writingPtr ( oldRecord - > data ( ) , dataSize ) , data , dataSize ) ; <nl> return StatusWith < RecordId > ( oldLocation ) ; <nl> mmm a / src / mongo / db / storage / mmap_v1 / record_store_v1_base . h <nl> ppp b / src / mongo / db / storage / mmap_v1 / record_store_v1_base . h <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) ; <nl> + UpdateNotifier * notifier ) ; <nl> <nl> virtual bool updateWithDamagesSupported ( ) const ; <nl> <nl> mmm a / src / mongo / db / storage / record_store . h <nl> ppp b / src / mongo / db / storage / record_store . h <nl> namespace mongo { <nl> / * * <nl> * @ see RecordStore : : updateRecord <nl> * / <nl> - class UpdateMoveNotifier { <nl> + class UpdateNotifier { <nl> public : <nl> - virtual ~ UpdateMoveNotifier ( ) { } <nl> + virtual ~ UpdateNotifier ( ) { } <nl> virtual Status recordStoreGoingToMove ( OperationContext * txn , <nl> const RecordId & oldLocation , <nl> const char * oldBuffer , <nl> size_t oldSize ) = 0 ; <nl> + virtual Status recordStoreGoingToUpdateInPlace ( OperationContext * txn , <nl> + const RecordId & loc ) = 0 ; <nl> } ; <nl> <nl> / * * <nl> namespace mongo { <nl> bool enforceQuota ) = 0 ; <nl> <nl> / * * <nl> - * @ param notifier - this is called if the document is moved <nl> - * it is to be called after the document has been written to new <nl> - * location , before deleted from old . <nl> + * @ param notifier - Only used by record stores which do not support doc - locking . <nl> + * In the case of a document move , this is called after the document <nl> + * has been written to the new location , but before it is deleted from <nl> + * the old location . <nl> + * In the case of an in - place update , this is called just before the <nl> + * in - place write occurs . <nl> * @ return Status or RecordId , RecordId might be different <nl> * / <nl> virtual StatusWith < RecordId > updateRecord ( OperationContext * txn , <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) = 0 ; <nl> + UpdateNotifier * notifier ) = 0 ; <nl> <nl> / * * <nl> * @ return Returns ' false ' if this record store does not implement <nl> mmm a / src / mongo / db / storage / record_store_test_updaterecord . cpp <nl> ppp b / src / mongo / db / storage / record_store_test_updaterecord . cpp <nl> namespace mongo { <nl> } <nl> } <nl> <nl> - / / Insert a record , try to update it , and examine how the UpdateMoveNotifier is called . <nl> + / / Insert a record , try to update it , and examine how the UpdateNotifier is called . <nl> TEST ( RecordStoreTestHarness , UpdateRecordWithMoveNotifier ) { <nl> scoped_ptr < HarnessHelper > harnessHelper ( newHarnessHelper ( ) ) ; <nl> scoped_ptr < RecordStore > rs ( harnessHelper - > newNonCappedRecordStore ( ) ) ; <nl> namespace mongo { <nl> { <nl> scoped_ptr < OperationContext > opCtx ( harnessHelper - > newOperationContext ( ) ) ; <nl> { <nl> - UpdateMoveNotifierSpy umn ( opCtx . get ( ) , loc , oldData . c_str ( ) , oldData . size ( ) ) ; <nl> + UpdateNotifierSpy umn ( opCtx . get ( ) , loc , oldData . c_str ( ) , oldData . size ( ) ) ; <nl> <nl> WriteUnitOfWork uow ( opCtx . get ( ) ) ; <nl> StatusWith < RecordId > res = rs - > updateRecord ( opCtx . get ( ) , <nl> namespace mongo { <nl> false , <nl> & umn ) ; <nl> ASSERT_OK ( res . getStatus ( ) ) ; <nl> - / / UpdateMoveNotifier : : recordStoreGoingToMove ( ) called only if <nl> + / / UpdateNotifier : : recordStoreGoingToMove ( ) called only if <nl> / / the RecordId for the record changes <nl> if ( loc = = res . getValue ( ) ) { <nl> - ASSERT_EQUALS ( 0 , umn . getNumCalls ( ) ) ; <nl> + ASSERT_EQUALS ( 0 , umn . numMoveCallbacks ( ) ) ; <nl> + / / Only MMAP v1 is required to use the UpdateNotifier for in - place updates , <nl> + / / so the number of callbacks is expected to be 0 for non - MMAP storage engines . <nl> + ASSERT_GTE ( 1 , umn . numInPlaceCallbacks ( ) ) ; <nl> } else { <nl> - ASSERT_EQUALS ( 1 , umn . getNumCalls ( ) ) ; <nl> + ASSERT_EQUALS ( 1 , umn . numMoveCallbacks ( ) ) ; <nl> + ASSERT_EQUALS ( 0 , umn . numInPlaceCallbacks ( ) ) ; <nl> } <nl> loc = res . getValue ( ) ; <nl> uow . commit ( ) ; <nl> mmm a / src / mongo / db / storage / record_store_test_updaterecord . h <nl> ppp b / src / mongo / db / storage / record_store_test_updaterecord . h <nl> <nl> namespace mongo { <nl> namespace { <nl> <nl> - class UpdateMoveNotifierSpy : public UpdateMoveNotifier { <nl> + class UpdateNotifierSpy : public UpdateNotifier { <nl> public : <nl> - UpdateMoveNotifierSpy ( OperationContext * txn , const RecordId & loc , <nl> + UpdateNotifierSpy ( OperationContext * txn , const RecordId & loc , <nl> const char * buf , size_t size ) <nl> - : _txn ( txn ) , _loc ( loc ) , _data ( buf , size ) , nCalls ( 0 ) { <nl> + : _txn ( txn ) , <nl> + _loc ( loc ) , <nl> + _data ( buf , size ) , <nl> + nMoveCalls ( 0 ) , <nl> + nInPlaceCalls ( 0 ) { <nl> } <nl> <nl> - ~ UpdateMoveNotifierSpy ( ) { } <nl> + ~ UpdateNotifierSpy ( ) { } <nl> <nl> Status recordStoreGoingToMove ( OperationContext * txn , <nl> const RecordId & oldLocation , <nl> const char * oldBuffer , <nl> size_t oldSize ) { <nl> - nCalls + + ; <nl> + nMoveCalls + + ; <nl> ASSERT_EQUALS ( _txn , txn ) ; <nl> ASSERT_EQUALS ( _loc , oldLocation ) ; <nl> ASSERT_EQUALS ( _data , oldBuffer ) ; <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> - int getNumCalls ( ) const { return nCalls ; } <nl> + Status recordStoreGoingToUpdateInPlace ( OperationContext * txn , <nl> + const RecordId & loc ) { <nl> + nInPlaceCalls + + ; <nl> + ASSERT_EQUALS ( _txn , txn ) ; <nl> + ASSERT_EQUALS ( _loc , loc ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + int numMoveCallbacks ( ) const { return nMoveCalls ; } <nl> + <nl> + int numInPlaceCallbacks ( ) const { return nInPlaceCalls ; } <nl> <nl> private : <nl> OperationContext * _txn ; <nl> RecordId _loc ; <nl> std : : string _data ; <nl> <nl> - int nCalls ; / / to verify that recordStoreGoingToMove ( ) gets called once <nl> + / / To verify the number of callbacks to the notifier . <nl> + int nMoveCalls ; <nl> + int nInPlaceCalls ; <nl> } ; <nl> <nl> } / / namespace <nl> mmm a / src / mongo / db / storage / rocks / rocks_record_store . cpp <nl> ppp b / src / mongo / db / storage / rocks / rocks_record_store . cpp <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> RocksRecoveryUnit * ru = RocksRecoveryUnit : : getRocksRecoveryUnit ( txn ) ; <nl> if ( ! ru - > transaction ( ) - > registerWrite ( _getTransactionID ( loc ) ) ) { <nl> throw WriteConflictException ( ) ; <nl> mmm a / src / mongo / db / storage / rocks / rocks_record_store . h <nl> ppp b / src / mongo / db / storage / rocks / rocks_record_store . h <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) ; <nl> + UpdateNotifier * notifier ) ; <nl> <nl> virtual bool updateWithDamagesSupported ( ) const ; <nl> <nl> mmm a / src / mongo / db / storage / wiredtiger / wiredtiger_record_store . cpp <nl> ppp b / src / mongo / db / storage / wiredtiger / wiredtiger_record_store . cpp <nl> namespace { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) { <nl> + UpdateNotifier * notifier ) { <nl> WiredTigerCursor curwrap ( _uri , _instanceId , true , txn ) ; <nl> curwrap . assertInActiveTxn ( ) ; <nl> WT_CURSOR * c = curwrap . get ( ) ; <nl> mmm a / src / mongo / db / storage / wiredtiger / wiredtiger_record_store . h <nl> ppp b / src / mongo / db / storage / wiredtiger / wiredtiger_record_store . h <nl> namespace mongo { <nl> const char * data , <nl> int len , <nl> bool enforceQuota , <nl> - UpdateMoveNotifier * notifier ) ; <nl> + UpdateNotifier * notifier ) ; <nl> <nl> virtual bool updateWithDamagesSupported ( ) const ; <nl> <nl> | SERVER - 16959 send INVALIDATION_MUTATION message prior to an in - place update | mongodb/mongo | 34f4776efba9205b9969232bce905f681ac8bac3 | 2015-01-22T00:23:45Z |
mmm a / stdlib / core / FloatingPoint . swift . gyb <nl> ppp b / stdlib / core / FloatingPoint . swift . gyb <nl> def getInfinityExponent ( bits ) : <nl> public struct $ { Self } { <nl> var value : Builtin . FPIEEE $ { bits } <nl> <nl> + / / / Create an instance initialized to zero . <nl> @ transparent public <nl> init ( ) { <nl> var zero : Int64 = 0 <nl> public struct $ { Self } { <nl> value = v <nl> } <nl> <nl> + / / / Create an instance initialized to ` value ` . <nl> @ transparent public <nl> init ( _ value : $ { Self } ) { self = value } <nl> } <nl> extension $ { Self } : FloatingPointType { <nl> return _toBitPattern ( ) & mask <nl> } <nl> <nl> + / / / The positive infinity . <nl> public static var infinity : $ { Self } { <nl> return _fromBitPattern ( $ { getInfBitPattern ( bits ) } ) <nl> } <nl> <nl> + / / / A quiet NaN . <nl> public static var NaN : $ { Self } { <nl> return quietNaN <nl> } <nl> <nl> + / / / A quiet NaN . <nl> public static var quietNaN : $ { Self } { <nl> return _fromBitPattern ( $ { getQuietNaNBitPattern ( bits ) } ) <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is negative <nl> public var isSignMinus : Bool { <nl> return __getSignBit ( ) = = 1 <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is normal ( not zero , subnormal , infinity , or <nl> + / / / NaN ) . <nl> public var isNormal : Bool { <nl> var biasedExponent = __getBiasedExponent ( ) <nl> return biasedExponent ! = $ { getInfinityExponent ( bits ) } & & <nl> biasedExponent ! = 0 <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is zero , subnormal , or normal ( not infinity <nl> + / / / or NaN ) . <nl> public var isFinite : Bool { <nl> return __getBiasedExponent ( ) ! = $ { getInfinityExponent ( bits ) } <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is + 0 . 0 or - 0 . 0 . <nl> public var isZero : Bool { <nl> / / Mask out the sign bit . <nl> var mask : _BitsType = ( 1 < < ( $ { bits } - 1 ) ) - 1 <nl> return ( _toBitPattern ( ) & mask ) = = 0 <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is subnormal . <nl> public var isSubnormal : Bool { <nl> if __getBiasedExponent ( ) = = 0 { <nl> return __getSignificand ( ) ! = 0 <nl> extension $ { Self } : FloatingPointType { <nl> / / condition is going to be faster . <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is infinity . <nl> public var isInfinite : Bool { <nl> if __getBiasedExponent ( ) = = $ { getInfinityExponent ( bits ) } { <nl> return __getSignificand ( ) = = 0 <nl> extension $ { Self } : FloatingPointType { <nl> / / return abs ( self ) = = $ { Self } . infinity ( ) <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is NaN . <nl> public var isNaN : Bool { <nl> if __getBiasedExponent ( ) = = $ { getInfinityExponent ( bits ) } { <nl> return __getSignificand ( ) ! = 0 <nl> extension $ { Self } : FloatingPointType { <nl> / / return self ! = self <nl> } <nl> <nl> + / / / ` true ` iff ` self ` is a signaling NaN . <nl> public var isSignaling : Bool { <nl> if __getBiasedExponent ( ) = = $ { getInfinityExponent ( bits ) } { <nl> / / IEEE - 754R 2008 6 . 2 . 1 : A signaling NaN bit string should be encoded <nl> extension $ { Self } : FloatingPointType { <nl> <nl> / / Not @ transparent because the function is too complex . <nl> extension $ { Self } / * : FloatingPointType * / { <nl> + / / / The IEEE 754 " class " of this type . <nl> public var floatingPointClass : FloatingPointClassification { <nl> get { <nl> var biasedExponent = __getBiasedExponent ( ) <nl> | [ stdlib ] Propagate FloatingPointType docs | apple/swift | b2d0ef6fefc27729a7998ae7c294e994c4851906 | 2014-09-23T18:52:21Z |
mmm a / jstests / slowNightly / index_retry . js <nl> ppp b / jstests / slowNightly / index_retry . js <nl> assert . soon ( <nl> } <nl> return true ; <nl> } , <nl> - ' index builds successfully ' <nl> + ' index builds successfully ' , <nl> + 60000 <nl> ) ; <nl> <nl> print ( " Index built " ) ; <nl> | Make test timeout longer for slow machines | mongodb/mongo | e972491122daadd7e7cfff888de46611d008d11b | 2012-12-27T16:21:46Z |
new file mode 100644 <nl> index 0000000000 . . 8b13789179 <nl> mmm / dev / null <nl> ppp b / . vsts - ci . yml <nl> @ @ - 0 , 0 + 1 @ @ <nl> + <nl> | Create . vsts - ci . yml | microsoft/LightGBM | 707ee62ce2a21a7feff55ba78a648fd4c79c65f4 | 2018-06-22T04:16:17Z |
deleted file mode 100755 <nl> index 03f8115354 . . 0000000000 <nl> mmm a / selfdrive / modeld / test / opencl_hooks / build . sh <nl> ppp / dev / null <nl> <nl> - # ! / bin / sh <nl> - gcc - fPIC - I / data / openpilot / phonelibs / opencl / include - shared hook . c <nl> - <nl> deleted file mode 100644 <nl> index f2ee2c0d51 . . 0000000000 <nl> mmm a / selfdrive / modeld / test / opencl_hooks / hook . c <nl> ppp / dev / null <nl> <nl> - # include < stdio . h > <nl> - # include < stdlib . h > <nl> - # include < dlfcn . h > <nl> - # include < CL / cl . h > <nl> - # include < stdint . h > <nl> - # include < time . h > <nl> - <nl> - static inline uint64_t nanos_since_boot ( ) { <nl> - struct timespec t ; <nl> - clock_gettime ( CLOCK_BOOTTIME , & t ) ; <nl> - return t . tv_sec * 1000000000ULL + t . tv_nsec ; <nl> - } <nl> - <nl> - struct kernel { <nl> - cl_kernel k ; <nl> - const char * name ; <nl> - cl_program p ; <nl> - } ; <nl> - <nl> - <nl> - int k_index = 0 ; <nl> - struct kernel kk [ 0x1000 ] = { 0 } ; <nl> - <nl> - FILE * f = NULL ; <nl> - <nl> - cl_program clCreateProgramWithSource ( cl_context context , <nl> - cl_uint count , <nl> - const char * * strings , <nl> - const size_t * lengths , <nl> - cl_int * errcode_ret ) { <nl> - printf ( " clCreateProgramWithSource : % d \ n " , count ) ; <nl> - <nl> - if ( f = = NULL ) { <nl> - f = fopen ( " / tmp / kernels . cl " , " w " ) ; <nl> - } <nl> - <nl> - fprintf ( f , " / * * * * * * * * * * * * * * * * * * * * * * * * * PROGRAM BREAK * * * * * * * * * * * * * * * * * * * * * * * * * * * * / \ n " ) ; <nl> - for ( int i = 0 ; i < count ; i + + ) { <nl> - fprintf ( f , " % s \ n " , strings [ i ] ) ; <nl> - if ( i ! = 0 ) fprintf ( f , " / * * * * * * * * * * * * * * * * * * * * * * * * * SECTION BREAK * * * * * * * * * * * * * * * * * * * * * * * * * * * * / \ n " ) ; <nl> - } <nl> - fflush ( f ) ; <nl> - <nl> - cl_program ( * my_clCreateProgramWithSource ) ( cl_context context , <nl> - cl_uint count , <nl> - const char * * strings , <nl> - const size_t * lengths , <nl> - cl_int * errcode_ret ) = dlsym ( RTLD_NEXT , " REAL_clCreateProgramWithSource " ) ; <nl> - <nl> - return my_clCreateProgramWithSource ( context , count , strings , lengths , errcode_ret ) ; <nl> - } <nl> - <nl> - cl_program clCreateProgramWithBinary ( cl_context context , <nl> - cl_uint num_devices , <nl> - const cl_device_id * device_list , <nl> - const size_t * lengths , <nl> - const unsigned char * * binaries , <nl> - cl_int * binary_status , <nl> - cl_int * errcode_ret ) { <nl> - printf ( " clCreateProgramWithBinary \ n " ) ; <nl> - <nl> - cl_program ( * my_clCreateProgramWithBinary ) ( cl_context context , <nl> - cl_uint num_devices , <nl> - const cl_device_id * device_list , <nl> - const size_t * lengths , <nl> - const unsigned char * * binaries , <nl> - cl_int * binary_status , <nl> - cl_int * errcode_ret ) = dlsym ( RTLD_NEXT , " REAL_clCreateProgramWithBinary " ) ; <nl> - <nl> - return my_clCreateProgramWithBinary ( context , num_devices , device_list , lengths , binaries , binary_status , errcode_ret ) ; <nl> - } <nl> - <nl> - cl_kernel clCreateKernel ( cl_program program , const char * kernel_name , cl_int * errcode_ret ) { <nl> - cl_kernel ( * my_clCreateKernel ) ( cl_program program , const char * kernel_name , cl_int * errcode_ret ) ; <nl> - my_clCreateKernel = dlsym ( RTLD_NEXT , " REAL_clCreateKernel " ) ; <nl> - cl_kernel ret = my_clCreateKernel ( program , kernel_name , errcode_ret ) ; <nl> - / / printf ( " clCreateKernel : % s - > % p \ n " , kernel_name , ret ) ; <nl> - <nl> - char * tmp = ( char * ) malloc ( strlen ( kernel_name ) + 1 ) ; <nl> - strcpy ( tmp , kernel_name ) ; <nl> - <nl> - kk [ k_index ] . k = ret ; <nl> - kk [ k_index ] . name = tmp ; <nl> - kk [ k_index ] . p = program ; <nl> - k_index + + ; <nl> - return ret ; <nl> - } <nl> - <nl> - <nl> - uint64_t start_time = 0 ; <nl> - int cnt = 0 ; <nl> - <nl> - cl_int clEnqueueNDRangeKernel ( cl_command_queue command_queue , <nl> - cl_kernel kernel , <nl> - cl_uint work_dim , <nl> - const size_t * global_work_offset , <nl> - const size_t * global_work_size , <nl> - const size_t * local_work_size , <nl> - cl_uint num_events_in_wait_list , <nl> - const cl_event * event_wait_list , <nl> - cl_event * event ) { <nl> - <nl> - cl_int ( * my_clEnqueueNDRangeKernel ) ( cl_command_queue , cl_kernel , cl_uint , <nl> - const size_t * , const size_t * , const size_t * , <nl> - cl_uint , const cl_event * , cl_event * ) = NULL ; <nl> - my_clEnqueueNDRangeKernel = dlsym ( RTLD_NEXT , " REAL_clEnqueueNDRangeKernel " ) ; <nl> - <nl> - if ( start_time = = 0 ) { <nl> - start_time = nanos_since_boot ( ) ; <nl> - } <nl> - <nl> - / / get kernel name <nl> - const char * name = NULL ; <nl> - cl_program p ; <nl> - for ( int i = 0 ; i < k_index ; i + + ) { <nl> - if ( kk [ i ] . k = = kernel ) { <nl> - name = kk [ i ] . name ; <nl> - p = kk [ i ] . p ; <nl> - break ; <nl> - } <nl> - } <nl> - <nl> - uint64_t tb = nanos_since_boot ( ) ; <nl> - cl_int ret = my_clEnqueueNDRangeKernel ( command_queue , kernel , work_dim , <nl> - global_work_offset , global_work_size , local_work_size , <nl> - num_events_in_wait_list , event_wait_list , event ) ; <nl> - uint64_t te = nanos_since_boot ( ) ; <nl> - <nl> - printf ( " % 10lu run % 8d in % 5ld us command_queue : % p work_dim : % d event : % p " , ( tb - start_time ) / 1000 , cnt + + , ( te - tb ) / 1000 , <nl> - command_queue , work_dim , event ) ; <nl> - for ( int i = 0 ; i < work_dim ; i + + ) { <nl> - printf ( " % 4zu " , global_work_size [ i ] ) ; <nl> - } <nl> - printf ( " % p % s \ n " , p , name ) ; <nl> - return ret ; <nl> - } <nl> - <nl> - void * dlsym ( void * handle , const char * symbol ) { <nl> - void * ( * my_dlsym ) ( void * handle , const char * symbol ) = ( void * ) dlopen - 0x2d4 ; <nl> - if ( memcmp ( " REAL_ " , symbol , 5 ) = = 0 ) { <nl> - return my_dlsym ( handle , symbol + 5 ) ; <nl> - } else if ( strcmp ( " clEnqueueNDRangeKernel " , symbol ) = = 0 ) { <nl> - return clEnqueueNDRangeKernel ; <nl> - } else if ( strcmp ( " clCreateKernel " , symbol ) = = 0 ) { <nl> - return clCreateKernel ; <nl> - } else if ( strcmp ( " clCreateProgramWithSource " , symbol ) = = 0 ) { <nl> - return clCreateProgramWithSource ; <nl> - } else if ( strcmp ( " clCreateProgramWithBinary " , symbol ) = = 0 ) { <nl> - return clCreateProgramWithBinary ; <nl> - } else { <nl> - printf ( " dlsym % s \ n " , symbol ) ; <nl> - return my_dlsym ( handle , symbol ) ; <nl> - } <nl> - } <nl> - <nl> mmm a / selfdrive / modeld / thneed / thneed . cc <nl> ppp b / selfdrive / modeld / thneed / thneed . cc <nl> void Thneed : : execute ( float * * finputs , float * foutput , bool slow ) { <nl> } <nl> } <nl> <nl> + / / TODO : with a different way of getting the input and output buffers , we don ' t have to intercept CL at all <nl> + <nl> cl_int ( * my_clSetKernelArg ) ( cl_kernel kernel , cl_uint arg_index , size_t arg_size , const void * arg_value ) = NULL ; <nl> - cl_int clSetKernelArg ( cl_kernel kernel , cl_uint arg_index , size_t arg_size , const void * arg_value ) { <nl> + cl_int thneed_clSetKernelArg ( cl_kernel kernel , cl_uint arg_index , size_t arg_size , const void * arg_value ) { <nl> if ( my_clSetKernelArg = = NULL ) my_clSetKernelArg = reinterpret_cast < decltype ( my_clSetKernelArg ) > ( dlsym ( RTLD_NEXT , " REAL_clSetKernelArg " ) ) ; <nl> if ( arg_value ! = NULL ) { <nl> g_args [ std : : make_pair ( kernel , arg_index ) ] = std : : string ( ( char * ) arg_value , arg_size ) ; <nl> cl_int clSetKernelArg ( cl_kernel kernel , cl_uint arg_index , size_t arg_size , cons <nl> } <nl> <nl> cl_int ( * my_clEnqueueNDRangeKernel ) ( cl_command_queue , cl_kernel , cl_uint , const size_t * , const size_t * , const size_t * , cl_uint , const cl_event * , cl_event * ) = NULL ; <nl> - cl_int clEnqueueNDRangeKernel ( cl_command_queue command_queue , <nl> + cl_int thneed_clEnqueueNDRangeKernel ( cl_command_queue command_queue , <nl> cl_kernel kernel , <nl> cl_uint work_dim , <nl> const size_t * global_work_offset , <nl> cl_int clEnqueueNDRangeKernel ( cl_command_queue command_queue , <nl> / / # define SAVE_KERNELS <nl> <nl> # ifdef SAVE_KERNELS <nl> - std : : map < cl_program , std : : string > program_source ; <nl> - # endif <nl> + std : : map < cl_program , std : : string > program_source ; <nl> <nl> cl_program ( * my_clCreateProgramWithSource ) ( cl_context context , cl_uint count , const char * * strings , const size_t * lengths , cl_int * errcode_ret ) = NULL ; <nl> - cl_program clCreateProgramWithSource ( cl_context context , cl_uint count , const char * * strings , const size_t * lengths , cl_int * errcode_ret ) { <nl> + cl_program thneed_clCreateProgramWithSource ( cl_context context , cl_uint count , const char * * strings , const size_t * lengths , cl_int * errcode_ret ) { <nl> if ( my_clCreateProgramWithSource = = NULL ) my_clCreateProgramWithSource = reinterpret_cast < decltype ( my_clCreateProgramWithSource ) > ( dlsym ( RTLD_NEXT , " REAL_clCreateProgramWithSource " ) ) ; <nl> assert ( count = = 1 ) ; <nl> size_t my_lengths [ 1 ] ; <nl> my_lengths [ 0 ] = lengths [ 0 ] ; <nl> <nl> - # ifdef SAVE_KERNELS <nl> char fn [ 0x100 ] ; <nl> snprintf ( fn , sizeof ( fn ) , " / tmp / program_ % zu . cl " , strlen ( strings [ 0 ] ) ) ; <nl> FILE * f = fopen ( fn , " wb " ) ; <nl> cl_program clCreateProgramWithSource ( cl_context context , cl_uint count , const ch <nl> } <nl> <nl> program_source [ ret ] = strings [ 0 ] ; <nl> - # endif <nl> <nl> cl_program ret = my_clCreateProgramWithSource ( context , count , strings , my_lengths , errcode_ret ) ; <nl> return ret ; <nl> } <nl> + # endif <nl> <nl> void * dlsym ( void * handle , const char * symbol ) { <nl> void * ( * my_dlsym ) ( void * handle , const char * symbol ) = ( void * ( * ) ( void * handle , const char * symbol ) ) ( ( uintptr_t ) dlopen - 0x2d4 ) ; <nl> if ( memcmp ( " REAL_ " , symbol , 5 ) = = 0 ) { <nl> return my_dlsym ( handle , symbol + 5 ) ; <nl> } else if ( strcmp ( " clEnqueueNDRangeKernel " , symbol ) = = 0 ) { <nl> - return ( void * ) clEnqueueNDRangeKernel ; <nl> + return ( void * ) thneed_clEnqueueNDRangeKernel ; <nl> } else if ( strcmp ( " clSetKernelArg " , symbol ) = = 0 ) { <nl> - return ( void * ) clSetKernelArg ; <nl> + return ( void * ) thneed_clSetKernelArg ; <nl> + # ifdef SAVE_KERNELS <nl> } else if ( strcmp ( " clCreateProgramWithSource " , symbol ) = = 0 ) { <nl> - return ( void * ) clCreateProgramWithSource ; <nl> + return ( void * ) thneed_clCreateProgramWithSource ; <nl> + # endif <nl> } else { <nl> return my_dlsym ( handle , symbol ) ; <nl> } <nl> | remove the clCreateProgramWithSource interceptor ( ) | commaai/openpilot | b8571710e09e58b9e67170a9924eef79f32c480b | 2020-05-24T10:33:36Z |
mmm a / etc / roots . pem <nl> ppp b / etc / roots . pem <nl> lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy / XZxMOIQIwBeF1Ad5o7Sof <nl> TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR <nl> mmm - - END CERTIFICATEmmm - - <nl> <nl> - # Issuer : CN = Certplus Root CA G1 O = Certplus <nl> - # Subject : CN = Certplus Root CA G1 O = Certplus <nl> - # Label : " Certplus Root CA G1 " <nl> - # Serial : 1491911565779898356709731176965615564637713 <nl> - # MD5 Fingerprint : 7f : 09 : 9c : f7 : d9 : b9 : 5c : 69 : 69 : 56 : d5 : 37 : 3e : 14 : 0d : 42 <nl> - # SHA1 Fingerprint : 22 : fd : d0 : b7 : fd : a2 : 4e : 0d : ac : 49 : 2c : a0 : ac : a6 : 7b : 6a : 1f : e3 : f7 : 66 <nl> - # SHA256 Fingerprint : 15 : 2a : 40 : 2b : fc : df : 2c : d5 : 48 : 05 : 4d : 22 : 75 : b3 : 9c : 7f : ca : 3e : c0 : 97 : 80 : 78 : b0 : f0 : ea : 76 : e5 : 61 : a6 : c7 : 43 : 3e <nl> mmmmmmBEGIN CERTIFICATEmmm - - <nl> - MIIFazCCA1OgAwIBAgISESBVg + QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA <nl> - MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy <nl> - dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa <nl> - MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy <nl> - dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB <nl> - ANpQh7bauKk + nWT6VjOaVj0W5QOVsjQcmm1iBdTYj + eJZJ + 622SLZOZ5KmHNr49a <nl> - iZFluVj8tANfkT8tEBXgfs + 8 / H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt <nl> - 6kuJPKNxQv4c / dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1 / oA / caP <nl> - 0FG7Yn2ksYyy / yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi / jQu3rfKG3akt62f <nl> - 6ajUeD94 / vI4CTYd0hYCyOwqaK / 1jpTvLRN6HkJKHRUxrgwEV / xhc / MxVoYxgKDE <nl> - EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v + WZxcIbekN <nl> - 1iNQMLAVdBM + 5S / / Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc <nl> - h2c0798wct3zyT8j / zXhviEpIDCB5BmlIOklynMxdCm + 4kLV87ImZsdo / Rmz5yCT <nl> - mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z / fyL8inqh3SV <nl> - 4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU + V / YDI + HLlJWvEYLF7bY5KinPO <nl> - WftwenMGE9nTdDckQQoRb5fc5 + R + ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud <nl> - DwEB / wQEAwIBBjAPBgNVHRMBAf8EBTADAQH / MB0GA1UdDgQWBBSowcCbkahDFXxd <nl> - Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq <nl> - hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh / k4DgYzDLDKTudQSk0YcbX8ACh <nl> - 66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT + Hern + X + 2B50ioUHj3 / MeXrKls3N / U / 7 <nl> - / SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE + 2J2winq14J2by5BS <nl> - S7CTKtQ + FjPlnsZlFT5kOwQ / 2wyPX1wdaR + v8 + khjPPvl / aatxm2hHSco1S1cE5j <nl> - 2FddUyGbQJJD + tZ3VTNPZNX70Cxqjm0lpu + F6ALEUz65noe8zDUa3qHpimOHZR4R <nl> - Kttjd5cUvpoUmRGywO6wT / gUITJDT5 + rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr <nl> - RHpejXDbl85IULFzk / bwg2D5zfHhMf1bfHEhYxQUqq / F3pN + aLHsIqKqkHWetUNy <nl> - 6mSjhEv9DKgma3GX7lZjZuhCVPnHHd / Qj1vfyDBviP4NxDMcU6ij / UgQ8uQKTuEV <nl> - V / xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz + e86i9sgktxChL8Bq4fA1SCC28a5 <nl> - g4VCXA9DO2pJNdWY9BW / + mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl <nl> - + + O / QmueD6i9a5jc2NvLi6Td11n0bt3 + qsOR0C5CB8AMTVPNJLFMWx5R9N / pkvo = <nl> mmmmmmEND CERTIFICATEmmm - - <nl> - <nl> - # Issuer : CN = Certplus Root CA G2 O = Certplus <nl> - # Subject : CN = Certplus Root CA G2 O = Certplus <nl> - # Label : " Certplus Root CA G2 " <nl> - # Serial : 1492087096131536844209563509228951875861589 <nl> - # MD5 Fingerprint : a7 : ee : c4 : 78 : 2d : 1b : ee : 2d : b9 : 29 : ce : d6 : a7 : 96 : 32 : 31 <nl> - # SHA1 Fingerprint : 4f : 65 : 8e : 1f : e9 : 06 : d8 : 28 : 02 : e9 : 54 : 47 : 41 : c9 : 54 : 25 : 5d : 69 : cc : 1a <nl> - # SHA256 Fingerprint : 6c : c0 : 50 : 41 : e6 : 44 : 5e : 74 : 69 : 6c : 4c : fb : c9 : f8 : 0f : 54 : 3b : 7e : ab : bb : 44 : b4 : ce : 6f : 78 : 7c : 6a : 99 : 71 : c4 : 2f : 17 <nl> mmmmmmBEGIN CERTIFICATEmmm - - <nl> - MIICHDCCAaKgAwIBAgISESDZkc6uo + jF5 / / pAq / Pc7xVMAoGCCqGSM49BAMDMD4x <nl> - CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs <nl> - dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x <nl> - CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs <nl> - dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3 / BFGtat <nl> - 93nwHcmsltaeTpwftEIRyoa / bfuFo8XlGVzX7qY / aWfYeOKmycTbLXku54uNAm8x <nl> - Ik0G42ByRZ0OQneezs / lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P <nl> - AQH / BAQDAgEGMA8GA1UdEwEB / wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj <nl> - FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG <nl> - SM49BAMDA2gAMGUCMHD + sAvZ94OX7PNVHdTcswYO / jOYnYs5kGuUIe22113WTNch <nl> - p + e / IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal <nl> - U5ORGpOucGpnutee5WEaXw = = <nl> mmmmmmEND CERTIFICATEmmm - - <nl> - <nl> - # Issuer : CN = OpenTrust Root CA G1 O = OpenTrust <nl> - # Subject : CN = OpenTrust Root CA G1 O = OpenTrust <nl> - # Label : " OpenTrust Root CA G1 " <nl> - # Serial : 1492036577811947013770400127034825178844775 <nl> - # MD5 Fingerprint : 76 : 00 : cc : 81 : 29 : cd : 55 : 5e : 88 : 6a : 7a : 2e : f7 : 4d : 39 : da <nl> - # SHA1 Fingerprint : 79 : 91 : e8 : 34 : f7 : e2 : ee : dd : 08 : 95 : 01 : 52 : e9 : 55 : 2d : 14 : e9 : 58 : d5 : 7e <nl> - # SHA256 Fingerprint : 56 : c7 : 71 : 28 : d9 : 8c : 18 : d9 : 1b : 4c : fd : ff : bc : 25 : ee : 91 : 03 : d4 : 75 : 8e : a2 : ab : ad : 82 : 6a : 90 : f3 : 45 : 7d : 46 : 0e : b4 <nl> mmmmmmBEGIN CERTIFICATEmmm - - <nl> - MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA <nl> - MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w <nl> - ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw <nl> - MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU <nl> - T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK <nl> - AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b <nl> - wiTXj / HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX <nl> - / uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij / swjm4eTrwSSTilZHcYTSSjFR0 <nl> - 77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP <nl> - uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx <nl> - p2NX5Ntqp66 / K7nJ5rInieV + mhxNaMbBGN4zK1FGSxyO9z0M + Yo0FMT7MzUj8czx <nl> - Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ / GbI4Kq3ywgsNw2 <nl> - TgOzfALU5nsaqocTvz6hdLubDuHAk5 / XpGbKuxs74zD0M1mKB3IDVedzagMxbm + W <nl> - G + Oin6 + Sx + 31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw <nl> - vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY <nl> - EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO <nl> - BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH / BAUwAwEB / zAdBgNVHQ4EFgQUl0YhVyE1 <nl> - 2jZVx / PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx / PxN3DlCPaTKbYw <nl> - DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E <nl> - PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr / jyTwyqkxf3kf <nl> - gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN + VxbS <nl> - FXJfLkur1J1juONI5f6ELlgKn0Md / rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv / TW0 <nl> - V8 / bhUiZucJvbI / NeJWsZCj9VrDDb8O + WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P <nl> - XlZs5VVZV6Xf8YpmMIzUUmI4d7S + KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g / 4 / I <nl> - i + GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t <nl> - TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 <nl> - 09S5zvE / bw4cHjdx5RiHdRk / ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS / Ky <nl> - Pu1svf0OnWZzsD2097 + o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ <nl> - AwSQiumPv + i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn / + KuYj <nl> - 1oxx <nl> mmmmmmEND CERTIFICATEmmm - - <nl> - <nl> - # Issuer : CN = OpenTrust Root CA G2 O = OpenTrust <nl> - # Subject : CN = OpenTrust Root CA G2 O = OpenTrust <nl> - # Label : " OpenTrust Root CA G2 " <nl> - # Serial : 1492012448042702096986875987676935573415441 <nl> - # MD5 Fingerprint : 57 : 24 : b6 : 59 : 24 : 6b : ae : c8 : fe : 1c : 0c : 20 : f2 : c0 : 4e : eb <nl> - # SHA1 Fingerprint : 79 : 5f : 88 : 60 : c5 : ab : 7c : 3d : 92 : e6 : cb : f4 : 8d : e1 : 45 : cd : 11 : ef : 60 : 0b <nl> - # SHA256 Fingerprint : 27 : 99 : 58 : 29 : fe : 6a : 75 : 15 : c1 : bf : e8 : 48 : f9 : c4 : 76 : 1d : b1 : 6c : 22 : 59 : 29 : 25 : 7b : f4 : 0d : 08 : 94 : f2 : 9e : a8 : ba : f2 <nl> mmmmmmBEGIN CERTIFICATEmmm - - <nl> - MIIFbzCCA1egAwIBAgISESChaRu / vbm9UpaPI + hIvyYRMA0GCSqGSIb3DQEBDQUA <nl> - MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w <nl> - ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw <nl> - MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU <nl> - T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK <nl> - AoICAQDMtlelM5QQgTJT32F + D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG + Ntmh <nl> - / LzubKh8NBpxGuga2F8ORAbtp + Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e <nl> - CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP + yatcfD7J6xcvDH1urqWPyKwlCm / 6 <nl> - 1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v / v6wNj0OxmXsWEH4db0fE <nl> - FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9 / ca1TS <nl> - gSuyzpJLHB9G + h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH / 1PCZ1Eb3X <nl> - G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW + vj3CzMpSZy <nl> - YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L + epJUzpM5ChaH <nl> - vGOz9bGTXOBut9Dq + WIyiET7vycotjCVXRIouZW + j1MY5aIYFuJWpLIsEPUdN6b4 <nl> - t / bQWVyJ98LVtZR00dX + G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA + aC / <nl> - gh7PU3 + 06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO <nl> - BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH / BAUwAwEB / zAdBgNVHQ4EFgQUajn6QiL3 <nl> - 5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w <nl> - DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx + 4BkJamz <nl> - Gj5oXScmp7oq4fBXgwpkTx4idBvpkF / wrM / / T2h6OKQQbA2xx6R3gBi2oihEdqc0 <nl> - nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO + qT <nl> - RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ / n9iyJVvttN7jLpT <nl> - wm + bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G / Tvw / HRwkqWOOAgfZDC2 <nl> - t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa <nl> - TkZQh + D5wVOAHrut + 0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 <nl> - o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY / X0VdiLWK2gKgW0VU <nl> - 3jg9CcCoSmVGFvyqv1ROTVu + OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA <nl> - iN1nE28daCSLT7d0geX0YJ96Vdc + N9oWaz53rK4YcJUIeSkDiv7BO7M / Gg + kO14f <nl> - WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI / n + UL3PIEM <nl> - S1IK <nl> mmmmmmEND CERTIFICATEmmm - - <nl> - <nl> - # Issuer : CN = OpenTrust Root CA G3 O = OpenTrust <nl> - # Subject : CN = OpenTrust Root CA G3 O = OpenTrust <nl> - # Label : " OpenTrust Root CA G3 " <nl> - # Serial : 1492104908271485653071219941864171170455615 <nl> - # MD5 Fingerprint : 21 : 37 : b4 : 17 : 16 : 92 : 7b : 67 : 46 : 70 : a9 : 96 : d7 : a8 : 13 : 24 <nl> - # SHA1 Fingerprint : 6e : 26 : 64 : f3 : 56 : bf : 34 : 55 : bf : d1 : 93 : 3f : 7c : 01 : de : d8 : 13 : da : 8a : a6 <nl> - # SHA256 Fingerprint : b7 : c3 : 62 : 31 : 70 : 6e : 81 : 07 : 8c : 36 : 7c : b8 : 96 : 19 : 8f : 1e : 32 : 08 : dd : 92 : 69 : 49 : dd : 8f : 57 : 09 : a4 : 10 : f7 : 5b : 62 : 92 <nl> mmmmmmBEGIN CERTIFICATEmmm - - <nl> - MIICITCCAaagAwIBAgISESDm + Ez8JLC + BUCs2oMbNGA / MAoGCCqGSM49BAMDMEAx <nl> - CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U <nl> - cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow <nl> - QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl <nl> - blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm <nl> - 3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d <nl> - oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G <nl> - A1UdDwEB / wQEAwIBBjAPBgNVHRMBAf8EBTADAQH / MB0GA1UdDgQWBBRHd8MUi2I5 <nl> - DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK <nl> - BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7 + BFjNAk2z8 + e2AcG + q <nl> - j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74 + nBCZx <nl> - 4nxp5V2a + EEfOzmTk51V6s2N8fvB <nl> mmmmmmEND CERTIFICATEmmm - - <nl> - <nl> # Issuer : CN = ISRG Root X1 O = Internet Security Research Group <nl> # Subject : CN = ISRG Root X1 O = Internet Security Research Group <nl> # Label : " ISRG Root X1 " <nl> MA8GA1UdEwEB / wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX <nl> ytRrJPOwPYdGWBrssd9v + 1a6cGvHOMzosYxPD / fxZ3YOg9AeUY8CMD32IygmTMZg <nl> h5Mmm7I1HrrW9zzRHM76JTymGoEVW / MSD2zuZYrJh6j5B + BimoxcSg = = <nl> mmm - - END CERTIFICATEmmm - - <nl> + <nl> + # Issuer : CN = GlobalSign O = GlobalSign OU = GlobalSign Root CA - R6 <nl> + # Subject : CN = GlobalSign O = GlobalSign OU = GlobalSign Root CA - R6 <nl> + # Label : " GlobalSign Root CA - R6 " <nl> + # Serial : 1417766617973444989252670301619537 <nl> + # MD5 Fingerprint : 4f : dd : 07 : e4 : d4 : 22 : 64 : 39 : 1e : 0c : 37 : 42 : ea : d1 : c6 : ae <nl> + # SHA1 Fingerprint : 80 : 94 : 64 : 0e : b5 : a7 : a1 : ca : 11 : 9c : 1f : dd : d5 : 9f : 81 : 02 : 63 : a7 : fb : d1 <nl> + # SHA256 Fingerprint : 2c : ab : ea : fe : 37 : d0 : 6c : a2 : 2a : ba : 73 : 91 : c0 : 03 : 3d : 25 : 98 : 29 : 52 : c4 : 53 : 64 : 73 : 49 : 76 : 3a : 3a : b5 : ad : 6c : cf : 69 <nl> + mmm - - BEGIN CERTIFICATEmmm - - <nl> + MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb / RVEwDQYJKoZIhvcNAQEMBQAwTDEg <nl> + MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh <nl> + bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx <nl> + MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET <nl> + MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ <nl> + KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI <nl> + xutbPK6DuEGSMxSkb3 / pKszGsIhrxbaJ0cay / xTOURQh7ErdG1rG1ofuTToVBu1k <nl> + ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD <nl> + aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL + + gmNQ0PAYid / kD3n16qIfKtJw <nl> + LnvnvJO7bVPiSHyMEAc4 / 2ayd2F + 4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw <nl> + 1CM69106yqLbnQneXUQtkPGBzVeS + n68UARjNN9rkxi + azayOeSsJDa38O + 2HBNX <nl> + k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 <nl> + SXcwvHE35absIQh1 / OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC + n + 7o / h <nl> + bguyCLNhZglqsQY6ZZZZwPA1 / cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n <nl> + WUx2OVvq + aWh2IMP0f / fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY <nl> + rZxCRXluDocZXFSxZba / jJvcE + kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce <nl> + MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB / wQEAwIBBjAPBgNVHRMBAf8EBTAD <nl> + AQH / MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx / B / yGdToDAfBgNVHSMEGDAWgBSu <nl> + bAWjkxPioufi1xzWx / B / yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN <nl> + nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc + ZfwFSY1XS + wc3iEZGt <nl> + Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU + sQghoefEQzd5Mr61 <nl> + 55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU / yDXNOd8v9EDERm8tLj <nl> + vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn + Xds + qkxV / ZoVqW / hpvvf <nl> + cDDpw + 5CRu3CkwWJ + n1jez / QcYF8AOiYrg54NMMl + 68KnyBr3TsTjxKM4kEaSHpz <nl> + oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t + uA / iU3 / gKbaKxCXcPu9czc8FB10jZp <nl> + nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI + z1TIvWfs <nl> + pA9MRf / TuTAjB0yPEL + GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x + v <nl> + JJUEeKgDu + 6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R <nl> + 8k8HWV + LLUNS60YMlOH1Zkd5d9VUWx + tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 <nl> + 5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA = <nl> + mmm - - END CERTIFICATEmmm - - <nl> | update roots . pem | grpc/grpc | a49a2a824d2bb0948dd3723d82a0c456d8a5b3e2 | 2018-08-08T16:17:44Z |
mmm a / java / core / src / test / proto / com / google / protobuf / map_lite_test . proto <nl> ppp b / java / core / src / test / proto / com / google / protobuf / map_lite_test . proto <nl> <nl> <nl> syntax = " proto3 " ; <nl> <nl> - package map_test ; <nl> + package map_lite_test ; <nl> <nl> - option java_package = " map_test " ; <nl> + option optimize_for = LITE_RUNTIME ; <nl> + option java_package = " map_lite_test " ; <nl> option java_outer_classname = " MapTestProto " ; <nl> <nl> message TestMap { <nl> | Fixed Java tests | protocolbuffers/protobuf | c81b4202d1ea7054d9722c93271bbccc7df5df01 | 2018-11-09T19:40:27Z |
mmm a / tensorflow / python / keras / optimizer_v2 / optimizer_v2 . py <nl> ppp b / tensorflow / python / keras / optimizer_v2 / optimizer_v2 . py <nl> def _prepare ( self , var_list ) : <nl> def _create_hypers ( self ) : <nl> if self . _hypers_created : <nl> return <nl> - for name , value in self . _hyper . items ( ) : <nl> + # Iterate hyper values deterministically . <nl> + for name , value in sorted ( self . _hyper . items ( ) ) : <nl> if isinstance ( value , ops . Tensor ) or callable ( value ) : <nl> continue <nl> else : <nl> | Make hyper variable creation deterministic across threads . | tensorflow/tensorflow | 9bca3674908cca0eafe72cbc5a560f09dd7ab445 | 2019-03-06T02:42:05Z |
mmm a / src / app / file / gif_format . cpp <nl> ppp b / src / app / file / gif_format . cpp <nl> class GifDecoder { <nl> , m_remap ( 256 ) <nl> , m_hasLocalColormaps ( false ) <nl> , m_firstLocalColormap ( nullptr ) { <nl> - / / LOG ( " GIF background index = % d \ n " , ( int ) m_gifFile - > SBackGroundColor ) ; <nl> + DLOG ( " [ GifDecoder ] GIF background index = % d \ n " , ( int ) m_gifFile - > SBackGroundColor ) ; <nl> } <nl> <nl> ~ GifDecoder ( ) { <nl> class GifDecoder { <nl> UniquePtr < Image > frameImage ( <nl> readFrameIndexedImage ( frameBounds ) ) ; <nl> <nl> - / / LOG ( " Frame [ % d ] transparent index = % d \ n " , ( int ) m_frameNum , m_localTransparentIndex ) ; <nl> + DLOG ( " [ GifDecoder ] Frame [ % d ] transparent index = % d \ n " , ( int ) m_frameNum , m_localTransparentIndex ) ; <nl> <nl> if ( m_frameNum = = 0 ) { <nl> if ( m_localTransparentIndex > = 0 ) <nl> class GifDecoder { <nl> m_localTransparentIndex = ( extension [ 1 ] & 1 ) ? extension [ 4 ] : - 1 ; <nl> m_frameDelay = ( extension [ 3 ] < < 8 ) | extension [ 2 ] ; <nl> <nl> - / / LOG ( " Disposal method : % d \ nTransparent index : % d \ nFrame delay : % d \ n " , <nl> - / / m_disposalMethod , m_localTransparentIndex , m_frameDelay ) ; <nl> + DLOG ( " [ GifDecoder ] Disposal method : % d \ n Transparent index : % d \ n Frame delay : % d \ n " , <nl> + m_disposalMethod , m_localTransparentIndex , m_frameDelay ) ; <nl> } <nl> } <nl> <nl> class GifEncoder { <nl> } <nl> } <nl> <nl> - / / LOG ( " frameBounds = % d % d % d % d prev = % d % d % d % d next = % d % d % d % d \ n " , <nl> - / / frameBounds . x , frameBounds . y , frameBounds . w , frameBounds . h , <nl> - / / prev . x , prev . y , prev . w , prev . h , <nl> - / / next . x , next . y , next . w , next . h ) ; <nl> + DLOG ( " [ GifEncoder ] frameBounds = % d % d % d % d prev = % d % d % d % d next = % d % d % d % d \ n " , <nl> + frameBounds . x , frameBounds . y , frameBounds . w , frameBounds . h , <nl> + prev . x , prev . y , prev . w , prev . h , <nl> + next . x , next . y , next . w , next . h ) ; <nl> } <nl> } <nl> <nl> | Enable some debugging messages in gif decoder / encoder | aseprite/aseprite | bab13c473974bb9a432b09be478527768927c488 | 2015-09-22T14:42:50Z |
mmm a / xbmc / GUIPassword . cpp <nl> ppp b / xbmc / GUIPassword . cpp <nl> bool CGUIPassword : : IsItemUnlocked ( CFileItem * pItem , const CStdString & strType ) <nl> pItem - > m_iBadPwdCount = 0 ; <nl> pItem - > m_iHasLock = 1 ; <nl> g_passwordManager . LockSource ( strType , strLabel , false ) ; <nl> - g_settings . UpdateSource ( strType , strLabel , " badpwdcount " , itoa ( pItem - > m_iBadPwdCount , buffer , 10 ) ) ; <nl> + sprintf ( buffer , " % i " , pItem - > m_iBadPwdCount ) ; <nl> + g_settings . UpdateSource ( strType , strLabel , " badpwdcount " , buffer ) ; <nl> g_settings . SaveSources ( ) ; <nl> break ; <nl> } <nl> bool CGUIPassword : : IsItemUnlocked ( CFileItem * pItem , const CStdString & strType ) <nl> / / password entry failed <nl> if ( 0 ! = g_guiSettings . GetInt ( " masterlock . maxretries " ) ) <nl> pItem - > m_iBadPwdCount + + ; <nl> - g_settings . UpdateSource ( strType , strLabel , " badpwdcount " , itoa ( pItem - > m_iBadPwdCount , buffer , 10 ) ) ; <nl> + sprintf ( buffer , " % i " , pItem - > m_iBadPwdCount ) ; <nl> + g_settings . UpdateSource ( strType , strLabel , " badpwdcount " , buffer ) ; <nl> g_settings . SaveSources ( ) ; <nl> break ; <nl> } <nl> mmm a / xbmc / cores / DllLoader / exports / emu_kernel32 . cpp <nl> ppp b / xbmc / cores / DllLoader / exports / emu_kernel32 . cpp <nl> extern " C " int WINAPI dllMultiByteToWideChar ( UINT CodePage , DWORD dwFlags , LPCST <nl> destinationBuffer = ( LPWSTR ) malloc ( destinationBufferSize * sizeof ( WCHAR ) ) ; <nl> } <nl> <nl> + # ifdef _WIN32 <nl> int ret = MultiByteToWideChar ( CodePage , dwFlags , lpMultiByteStr , cbMultiByte , destinationBuffer , destinationBufferSize ) ; <nl> + # else <nl> + int ret = 0 ; <nl> + # endif <nl> <nl> if ( ret > 0 ) <nl> { <nl> extern " C " int WINAPI dllWideCharToMultiByte ( UINT CodePage , DWORD dwFlags , LPCWS <nl> destinationBuffer = ( LPSTR ) malloc ( destinationBufferSize * sizeof ( char ) ) ; <nl> } <nl> <nl> + # ifdef _WIN32 <nl> int ret = WideCharToMultiByte ( CodePage , dwFlags , lpWideCharStr , cchWideChar , destinationBuffer , destinationBufferSize , lpDefaultChar , lpUsedDefaultChar ) ; <nl> + # else <nl> + int ret = 0 ; <nl> + # endif <nl> <nl> if ( ret > 0 ) <nl> { <nl> mmm a / xbmc / cores / DllLoader / exports / emu_msvcrt . cpp <nl> ppp b / xbmc / cores / DllLoader / exports / emu_msvcrt . cpp <nl> extern " C " <nl> <nl> memcpy ( var , envstring , value_start - envstring ) ; <nl> var [ value_start - envstring ] = 0 ; <nl> - strupr ( var ) ; <nl> + char * temp = var ; <nl> + while ( * temp ) <nl> + { <nl> + * temp = ( char ) toupper ( * temp ) ; <nl> + temp + + ; <nl> + } <nl> <nl> strncpy ( value , value_start + 1 , size ) ; <nl> if ( size ) <nl> mmm a / xbmc / filesystem / FileRTV . cpp <nl> ppp b / xbmc / filesystem / FileRTV . cpp <nl> bool CFileRTV : : Open ( const char * strHostName , const char * strFileName , int iport ) <nl> if ( iport ) <nl> { <nl> char buffer [ 10 ] ; <nl> + sprintf ( buffer , " % i " , iport ) ; <nl> strHostAndPort + = ' : ' ; <nl> - strHostAndPort + = itoa ( iport , buffer , 10 ) ; <nl> + strHostAndPort + = buffer ; <nl> } <nl> <nl> / / Get the file size of strFileName . If size is 0 or negative , file doesn ' t exist so exit . <nl> mmm a / xbmc / filesystem / RTVDirectory . cpp <nl> ppp b / xbmc / filesystem / RTVDirectory . cpp <nl> bool CRTVDirectory : : GetDirectory ( const CStdString & strPath , CFileItemList & items <nl> if ( url . HasPort ( ) ) <nl> { <nl> char buffer [ 10 ] ; <nl> + sprintf ( buffer , " % i " , url . GetPort ( ) ) ; <nl> strHostAndPort + = ' : ' ; <nl> - strHostAndPort + = itoa ( url . GetPort ( ) , buffer , 10 ) ; <nl> + strHostAndPort + = buffer ; <nl> } <nl> <nl> / / No path given , list shows from ReplayGuide <nl> mmm a / xbmc / interfaces / http - api / XBMCConfiguration . cpp <nl> ppp b / xbmc / interfaces / http - api / XBMCConfiguration . cpp <nl> int CXbmcConfiguration : : BookmarkSize ( int eid , webs_t wp , CStdString & response , <nl> if ( pShares ) <nl> { <nl> char buffer [ 10 ] ; <nl> + sprintf ( buffer , " % i " , pShares - > size ( ) ) ; <nl> <nl> - if ( eid ! = - 1 ) <nl> - ejSetResult ( eid , itoa ( pShares - > size ( ) , buffer , 10 ) ) ; <nl> + if ( eid ! = - 1 ) <nl> + ejSetResult ( eid , buffer ) ; <nl> else <nl> { <nl> CStdString tmp ; <nl> - tmp . Format ( " % s " , itoa ( pShares - > size ( ) , buffer , 10 ) ) ; <nl> + tmp . Format ( " % i " , pShares - > size ( ) ) ; <nl> response = " " + tmp ; <nl> } <nl> <nl> mmm a / xbmc / linux / ConvUtils . cpp <nl> ppp b / xbmc / linux / ConvUtils . cpp <nl> <nl> # include < ctype . h > <nl> # include < errno . h > <nl> <nl> - <nl> - / * <nl> - * * The following two functions together make up an itoa ( ) <nl> - * * implementation . Function i2a ( ) is a ' private ' function <nl> - * * called by the public itoa ( ) function . <nl> - * * <nl> - * * itoa ( ) takes three arguments : <nl> - * * 1 ) the integer to be converted , <nl> - * * 2 ) a pointer to a character conversion buffer , <nl> - * * 3 ) the radix for the conversion <nl> - * * which can range between 2 and 36 inclusive <nl> - * * range errors on the radix default it to base10 <nl> - * / <nl> - <nl> - static char * i2a ( unsigned i , char * a , unsigned r ) <nl> - { <nl> - if ( i / r > 0 ) a = i2a ( i / r , a , r ) ; <nl> - * a = " 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ " [ i % r ] ; <nl> - return a + 1 ; <nl> - } <nl> - <nl> - char * itoa ( int i , char * a , int r ) <nl> - { <nl> - if ( ( r < 2 ) | | ( r > 36 ) ) r = 10 ; <nl> - if ( i < 0 ) { <nl> - * a = ' - ' ; <nl> - * i2a ( - ( unsigned ) i , a + 1 , r ) = 0 ; <nl> - } else * i2a ( i , a , r ) = 0 ; <nl> - return a ; <nl> - } <nl> - <nl> void OutputDebugString ( LPCTSTR lpOuputString ) <nl> { <nl> } <nl> <nl> - void strlwr ( char * string ) <nl> - { <nl> - while ( * string ) <nl> - { <nl> - * string = ( char ) tolower ( * string ) ; <nl> - string + + ; <nl> - } <nl> - } <nl> - <nl> - void strupr ( char * string ) <nl> - { <nl> - while ( * string ) <nl> - { <nl> - * string = ( char ) toupper ( * string ) ; <nl> - string + + ; <nl> - } <nl> - } <nl> - <nl> LONGLONG Int32x32To64 ( LONG Multiplier , LONG Multiplicand ) <nl> { <nl> LONGLONG result = Multiplier ; <nl> LONGLONG Int32x32To64 ( LONG Multiplier , LONG Multiplicand ) <nl> return result ; <nl> } <nl> <nl> - int WideCharToMultiByte ( <nl> - UINT CodePage , <nl> - DWORD dwFlags , <nl> - LPCWSTR lpWideCharStr , <nl> - int cchWideChar , <nl> - LPSTR lpMultiByteStr , <nl> - int cbMultiByte , <nl> - LPCSTR lpDefaultChar , <nl> - LPBOOL lpUsedDefaultChar <nl> - ) { <nl> - <nl> - / / TODO : need to implement WideCharToMultiByte <nl> - return 0 ; <nl> - } <nl> - <nl> - int MultiByteToWideChar ( <nl> - UINT CodePage , <nl> - DWORD dwFlags , <nl> - LPCSTR lpMultiByteStr , <nl> - int cbMultiByte , <nl> - LPWSTR lpWideCharStr , <nl> - int cchWideChar <nl> - ) { <nl> - <nl> - / / TODO : need to implement MultiByteToWideChar <nl> - return 0 ; <nl> - <nl> - } <nl> - <nl> DWORD GetLastError ( ) <nl> { <nl> return errno ; <nl> mmm a / xbmc / linux / ConvUtils . h <nl> ppp b / xbmc / linux / ConvUtils . h <nl> <nl> * / <nl> # include " PlatformDefs . h " / / UINT DWORD LPCSTR LPSTR LPBOOL . . . <nl> <nl> - int WideCharToMultiByte ( <nl> - UINT CodePage , <nl> - DWORD dwFlags , <nl> - LPCWSTR lpWideCharStr , <nl> - int cchWideChar , <nl> - LPSTR lpMultiByteStr , <nl> - int cbMultiByte , <nl> - LPCSTR lpDefaultChar , <nl> - LPBOOL lpUsedDefaultChar <nl> - ) ; <nl> - <nl> - int MultiByteToWideChar ( <nl> - UINT CodePage , <nl> - DWORD dwFlags , <nl> - LPCSTR lpMultiByteStr , <nl> - int cbMultiByte , <nl> - LPWSTR lpWideCharStr , <nl> - int cchWideChar <nl> - ) ; <nl> - <nl> - <nl> DWORD GetLastError ( ) ; <nl> VOID SetLastError ( DWORD dwErrCode ) ; <nl> <nl> mmm a / xbmc / linux / PlatformDefs . h <nl> ppp b / xbmc / linux / PlatformDefs . h <nl> typedef struct _D3DMATRIX { <nl> # define FILE_SHARE_WRITE 0x00000002 <nl> # define FILE_SHARE_DELETE 0x00000004 <nl> <nl> - <nl> - / / String <nl> - char * itoa ( int i , char * a , int r ) ; <nl> - void strlwr ( char * string ) ; <nl> - void strupr ( char * string ) ; <nl> - <nl> / / Audio stuff <nl> typedef struct tWAVEFORMATEX <nl> { <nl> | Merge pull request from cptspiff / remove - convutils | xbmc/xbmc | 1d39bd9a5d64b26a96e642ee3ac388155bdf88c1 | 2012-03-26T08:36:08Z |
mmm a / dbms / src / Functions / randConstant . cpp <nl> ppp b / dbms / src / Functions / randConstant . cpp <nl> namespace DB <nl> { <nl> <nl> template < typename ToType , typename Name > <nl> - class PreparedFunctionRandomConstant : public PreparedFunctionImpl <nl> + class PreparedFunctionRandomConstant : public IExecutableFunctionImpl <nl> { <nl> public : <nl> explicit PreparedFunctionRandomConstant ( ToType value_ ) : value ( value_ ) { } <nl> <nl> String getName ( ) const override { return Name : : name ; } <nl> <nl> - protected : <nl> - void executeImpl ( Block & block , const ColumnNumbers & , size_t result , size_t input_rows_count ) override <nl> + void execute ( Block & block , const ColumnNumbers & , size_t result , size_t input_rows_count ) override <nl> { <nl> block . getByPosition ( result ) . column = DataTypeNumber < ToType > ( ) . createColumnConst ( input_rows_count , value ) ; <nl> } <nl> class PreparedFunctionRandomConstant : public PreparedFunctionImpl <nl> } ; <nl> <nl> template < typename ToType , typename Name > <nl> - class FunctionBaseRandomConstant : public IFunctionBase <nl> + class FunctionBaseRandomConstant : public IFunctionBaseImpl <nl> { <nl> public : <nl> explicit FunctionBaseRandomConstant ( ToType value_ , DataTypes argument_types_ ) <nl> class FunctionBaseRandomConstant : public IFunctionBase <nl> return return_type ; <nl> } <nl> <nl> - ExecutableFunctionPtr prepare ( const Block & , const ColumnNumbers & , size_t ) const override <nl> + ExecutableFunctionImplPtr prepare ( const Block & , const ColumnNumbers & , size_t ) const override <nl> { <nl> - return std : : make_shared < PreparedFunctionRandomConstant < ToType , Name > > ( value ) ; <nl> + return std : : make_unique < PreparedFunctionRandomConstant < ToType , Name > > ( value ) ; <nl> } <nl> <nl> bool isDeterministic ( ) const override { return false ; } <nl> class FunctionBaseRandomConstant : public IFunctionBase <nl> } ; <nl> <nl> template < typename ToType , typename Name > <nl> - class FunctionBuilderRandomConstant : public FunctionBuilderImpl <nl> + class FunctionBuilderRandomConstant : public IFunctionOverloadResolverImpl <nl> { <nl> public : <nl> static constexpr auto name = Name : : name ; <nl> class FunctionBuilderRandomConstant : public FunctionBuilderImpl <nl> bool isVariadic ( ) const override { return true ; } <nl> size_t getNumberOfArguments ( ) const override { return 0 ; } <nl> <nl> - void checkNumberOfArguments ( size_t number_of_arguments ) const override <nl> + void checkNumberOfArgumentsIfVariadic ( size_t number_of_arguments ) const override <nl> { <nl> if ( number_of_arguments > 1 ) <nl> throw Exception ( " Number of arguments for function " + getName ( ) + " doesn ' t match : passed " <nl> class FunctionBuilderRandomConstant : public FunctionBuilderImpl <nl> ErrorCodes : : NUMBER_OF_ARGUMENTS_DOESNT_MATCH ) ; <nl> } <nl> <nl> - static FunctionOverloadResolverPtr create ( const Context & ) <nl> + static FunctionOverloadResolverImplPtr create ( const Context & ) <nl> { <nl> - return std : : make_shared < FunctionBuilderRandomConstant < ToType , Name > > ( ) ; <nl> + return std : : make_unique < FunctionBuilderRandomConstant < ToType , Name > > ( ) ; <nl> } <nl> <nl> - protected : <nl> - DataTypePtr getReturnTypeImpl ( const DataTypes & ) const override { return std : : make_shared < DataTypeNumber < ToType > > ( ) ; } <nl> + DataTypePtr getReturnType ( const DataTypes & ) const override { return std : : make_shared < DataTypeNumber < ToType > > ( ) ; } <nl> <nl> - FunctionBasePtr buildImpl ( const ColumnsWithTypeAndName & arguments , const DataTypePtr & ) const override <nl> + FunctionBaseImplPtr build ( const ColumnsWithTypeAndName & arguments , const DataTypePtr & ) const override <nl> { <nl> DataTypes argument_types ; <nl> <nl> class FunctionBuilderRandomConstant : public FunctionBuilderImpl <nl> RandImpl : : execute ( reinterpret_cast < char * > ( vec_to . data ( ) ) , sizeof ( ToType ) ) ; <nl> ToType value = vec_to [ 0 ] ; <nl> <nl> - return std : : make_shared < FunctionBaseRandomConstant < ToType , Name > > ( value , argument_types ) ; <nl> + return std : : make_unique < FunctionBaseRandomConstant < ToType , Name > > ( value , argument_types ) ; <nl> } <nl> } ; <nl> <nl> mmm a / dbms / src / Functions / today . cpp <nl> ppp b / dbms / src / Functions / today . cpp <nl> <nl> namespace DB <nl> { <nl> <nl> - class PreparedFunctionToday : public PreparedFunctionImpl <nl> + class PreparedFunctionToday : public IExecutableFunctionImpl <nl> { <nl> public : <nl> explicit PreparedFunctionToday ( time_t time_ ) : day_value ( time_ ) { } <nl> <nl> String getName ( ) const override { return " today " ; } <nl> <nl> - protected : <nl> - void executeImpl ( Block & block , const ColumnNumbers & , size_t result , size_t input_rows_count ) override <nl> + void execute ( Block & block , const ColumnNumbers & , size_t result , size_t input_rows_count ) override <nl> { <nl> block . getByPosition ( result ) . column = DataTypeDate ( ) . createColumnConst ( input_rows_count , day_value ) ; <nl> } <nl> class PreparedFunctionToday : public PreparedFunctionImpl <nl> DayNum day_value ; <nl> } ; <nl> <nl> - class FunctionBaseToday : public IFunctionBase <nl> + class FunctionBaseToday : public IFunctionBaseImpl <nl> { <nl> public : <nl> explicit FunctionBaseToday ( DayNum day_value_ ) : day_value ( day_value_ ) , return_type ( std : : make_shared < DataTypeDate > ( ) ) { } <nl> class FunctionBaseToday : public IFunctionBase <nl> return return_type ; <nl> } <nl> <nl> - ExecutableFunctionPtr prepare ( const Block & , const ColumnNumbers & , size_t ) const override <nl> + ExecutableFunctionImplPtr prepare ( const Block & , const ColumnNumbers & , size_t ) const override <nl> { <nl> - return std : : make_shared < PreparedFunctionToday > ( day_value ) ; <nl> + return std : : make_unique < PreparedFunctionToday > ( day_value ) ; <nl> } <nl> <nl> bool isDeterministic ( ) const override { return false ; } <nl> class FunctionBaseToday : public IFunctionBase <nl> DataTypePtr return_type ; <nl> } ; <nl> <nl> - class FunctionBuilderToday : public FunctionBuilderImpl <nl> + class FunctionBuilderToday : public IFunctionOverloadResolverImpl <nl> { <nl> public : <nl> static constexpr auto name = " today " ; <nl> class FunctionBuilderToday : public FunctionBuilderImpl <nl> <nl> size_t getNumberOfArguments ( ) const override { return 0 ; } <nl> <nl> - static FunctionOverloadResolverPtr create ( const Context & ) { return std : : make_shared < FunctionBuilderToday > ( ) ; } <nl> + static FunctionOverloadResolverImplPtr create ( const Context & ) { return std : : make_unique < FunctionBuilderToday > ( ) ; } <nl> <nl> - protected : <nl> - DataTypePtr getReturnTypeImpl ( const DataTypes & ) const override { return std : : make_shared < DataTypeDate > ( ) ; } <nl> + DataTypePtr getReturnType ( const DataTypes & ) const override { return std : : make_shared < DataTypeDate > ( ) ; } <nl> <nl> - FunctionBasePtr buildImpl ( const ColumnsWithTypeAndName & , const DataTypePtr & ) const override <nl> + FunctionBaseImplPtr build ( const ColumnsWithTypeAndName & , const DataTypePtr & ) const override <nl> { <nl> - return std : : make_shared < FunctionBaseToday > ( DateLUT : : instance ( ) . toDayNum ( time ( nullptr ) ) ) ; <nl> + return std : : make_unique < FunctionBaseToday > ( DateLUT : : instance ( ) . toDayNum ( time ( nullptr ) ) ) ; <nl> } <nl> } ; <nl> <nl> | update now and today . | ClickHouse/ClickHouse | d7b08b166b87620240f9273c65133b3a7e2be515 | 2019-12-12T14:55:35Z |
mmm a / tools / emterpretify . py <nl> ppp b / tools / emterpretify . py <nl> <nl> ' 18 ' : ' EQ ' , # [ lx , ly , lz ] ly = ly = = lz <nl> ' 20 ' : ' SLT ' , # [ lx , ly , lz ] ly = ly < lz ( 32 - bit signed ) <nl> ' 21 ' : ' ULT ' , # [ lx , ly , lz ] ly = ly < lz ( 32 - bit unsigned ) <nl> + ' 22 ' : ' SLE ' , # [ lx , ly , lz ] ly = ly < = lz ( 32 - bit signed ) <nl> + ' 23 ' : ' ULE ' , # [ lx , ly , lz ] ly = ly < = lz ( 32 - bit unsigned ) <nl> ' 30 ' : ' AND ' , # [ lx , ly , lz ] ly = ly & lz <nl> ' 100 ' : ' LOAD8 ' , # [ lx , ly , 0 ] lx = HEAP8 [ ly > > 0 ] <nl> ' 110 ' : ' LOAD16 ' , # [ lx , ly , 0 ] lx = HEAP16 [ ly > > 1 ] <nl> def get_coerced_access ( l , s = ' i ' , unsigned = False ) : <nl> CASES [ ROPCODES [ ' EQ ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' ) + ' ) = = ( ' + get_coerced_access ( ' lz ' ) + ' ) | 0 ; ' <nl> CASES [ ROPCODES [ ' SLT ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' ) + ' ) < ( ' + get_coerced_access ( ' lz ' ) + ' ) | 0 ; ' <nl> CASES [ ROPCODES [ ' ULT ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' , unsigned = True ) + ' ) < ( ' + get_coerced_access ( ' lz ' , unsigned = True ) + ' ) | 0 ; ' <nl> + CASES [ ROPCODES [ ' SLE ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' ) + ' ) < = ( ' + get_coerced_access ( ' lz ' ) + ' ) | 0 ; ' <nl> + CASES [ ROPCODES [ ' ULE ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' , unsigned = True ) + ' ) < = ( ' + get_coerced_access ( ' lz ' , unsigned = True ) + ' ) | 0 ; ' <nl> CASES [ ROPCODES [ ' AND ' ] ] = get_access ( ' lx ' ) + ' = ( ' + get_coerced_access ( ' ly ' ) + ' ) & ( ' + get_coerced_access ( ' lz ' ) + ' ) | 0 ; ' <nl> CASES [ ROPCODES [ ' LOAD8 ' ] ] = get_access ( ' lx ' ) + ' = ' + ' HEAP8 [ ' + get_access ( ' ly ' ) + ' > > 0 ] ; ' <nl> CASES [ ROPCODES [ ' LOAD16 ' ] ] = get_access ( ' lx ' ) + ' = ' + ' HEAP16 [ ' + get_access ( ' ly ' ) + ' > > 1 ] ; ' <nl> mmm a / tools / js - optimizer . js <nl> ppp b / tools / js - optimizer . js <nl> function emterpretify ( ast ) { <nl> <nl> switch ( node [ 1 ] ) { <nl> case ' & ' : return makeMath ( node , ASM_INT , ASM_SIGNED ) ; <nl> - case ' > = ' : <nl> - case ' + ' : case ' - ' : case ' < ' : case ' / ' : case ' = = ' : { <nl> + case ' > = ' : case ' > ' : <nl> + case ' + ' : case ' - ' : case ' < ' : case ' < = ' : case ' / ' : case ' = = ' : { <nl> var type = getCombinedType ( node [ 2 ] , node [ 3 ] , asmData , typeHint ) ; <nl> var sign = getCombinedSign ( node [ 2 ] , node [ 3 ] , signHint ) ; <nl> - if ( node [ 1 ] = = = ' > = ' ) { <nl> + if ( node [ 1 ] = = = ' > = ' | | node [ 1 ] = = = ' > ' ) { <nl> if ( type = = = ASM_INT ) { / / float / double comparisons are not antisymmetrical due to NaNs <nl> var temp = node [ 2 ] ; <nl> node [ 2 ] = node [ 3 ] ; <nl> node [ 3 ] = temp ; <nl> - node [ 1 ] = ' < ' ; <nl> + node [ 1 ] = node [ 1 ] = = = ' > = ' ? ' < ' : ' < = ' ; <nl> } else throw ' ex ' + type ; <nl> } <nl> return makeMath ( node , type , sign ) ; <nl> function emterpretify ( ast ) { <nl> else opcode = ' ULT ' ; <nl> break ; <nl> } <nl> + case ' < = ' : { <nl> + if ( sign = = = ASM_SIGNED ) opcode = ' SLE ' ; <nl> + else opcode = ' ULE ' ; <nl> + break ; <nl> + } <nl> case ' = = ' : opcode = ' EQ ' ; break ; <nl> case ' & ' : opcode = ' AND ' ; break ; <nl> default : throw ' bad ' ; <nl> | SLE , ULE | emscripten-core/emscripten | 18a8e2356dc3e3fd9090b36c2bcfcdf28de02bb5 | 2014-09-21T04:45:19Z |
mmm a / ports / proj / portfile . cmake <nl> ppp b / ports / proj / portfile . cmake <nl> vcpkg_install_cmake ( <nl> - DPROJ4_TESTS = NO <nl> ) <nl> <nl> + # Rename library and adapt cmake configuration <nl> + # N . B . debug cmake export is not copied , as it ' s not relocatable <nl> + file ( READ $ { CURRENT_PACKAGES_DIR } / share / proj / proj4 - targets - release . cmake PROJ4_TARGETS_RELEASE ) <nl> + string ( REPLACE " proj_4_9 . lib " " proj . lib " PROJ4_TARGETS_RELEASE $ { PROJ4_TARGETS_RELEASE } ) <nl> + file ( WRITE $ { CURRENT_PACKAGES_DIR } / share / proj / proj4 - targets - release . cmake $ { PROJ4_TARGETS_RELEASE } ) <nl> + <nl> + file ( RENAME $ { CURRENT_PACKAGES_DIR } / lib / proj_4_9 . lib $ { CURRENT_PACKAGES_DIR } / lib / proj . lib ) <nl> + file ( RENAME $ { CURRENT_PACKAGES_DIR } / debug / lib / proj_4_9_d . lib $ { CURRENT_PACKAGES_DIR } / debug / lib / projd . lib ) <nl> + <nl> # Remove duplicate headers installed from debug build <nl> file ( REMOVE_RECURSE $ { CURRENT_PACKAGES_DIR } / debug / include ) <nl> # Remove data installed from debug build <nl> | [ proj ] Rename library to " proj . lib " / " projd . lib " | microsoft/vcpkg | c969fad3f99c5c6b0ef50d7f30a66d0a12c59d2c | 2016-11-28T14:02:51Z |
mmm a / arangod / RestHandler / RestDocumentHandler . cpp <nl> ppp b / arangod / RestHandler / RestDocumentHandler . cpp <nl> bool RestDocumentHandler : : readSingleDocument ( bool generateBody ) { <nl> <nl> res = trx . read ( & document , key ) ; <nl> <nl> + TRI_primary_collection_t * primary = trx . primaryCollection ( ) ; <nl> + assert ( primary ! = 0 ) ; <nl> + TRI_shaper_t * shaper = primary - > _shaper ; <nl> + <nl> / / register a barrier . will be destroyed automatically <nl> - Barrier barrier ( trx . primaryCollection ( ) ) ; <nl> + Barrier barrier ( primary ) ; <nl> <nl> res = trx . finish ( res ) ; <nl> <nl> bool RestDocumentHandler : : readSingleDocument ( bool generateBody ) { <nl> <nl> if ( ifNoneRid = = 0 ) { <nl> if ( ifRid = = 0 | | ifRid = = rid ) { <nl> - generateDocument ( resolver , collection , document , trx . shaper ( ) , generateBody ) ; <nl> + generateDocument ( resolver , collection , document , shaper , generateBody ) ; <nl> } <nl> else { <nl> generatePreconditionFailed ( collection , document - > _key , rid ) ; <nl> bool RestDocumentHandler : : readSingleDocument ( bool generateBody ) { <nl> } <nl> else { <nl> if ( ifRid = = 0 | | ifRid = = rid ) { <nl> - generateDocument ( resolver , collection , document , trx . shaper ( ) , generateBody ) ; <nl> + generateDocument ( resolver , collection , document , shaper , generateBody ) ; <nl> } <nl> else { <nl> generatePreconditionFailed ( collection , document - > _key , rid ) ; <nl> bool RestDocumentHandler : : modifyDocument ( bool isPatch ) { <nl> } <nl> <nl> TRI_voc_rid_t rid = 0 ; <nl> + TRI_primary_collection_t * primary = trx . primaryCollection ( ) ; <nl> + assert ( primary ! = 0 ) ; <nl> + TRI_shaper_t * shaper = primary - > _shaper ; <nl> <nl> if ( isPatch ) { <nl> / / patching an existing document <nl> bool RestDocumentHandler : : modifyDocument ( bool isPatch ) { <nl> <nl> assert ( oldDocument ) ; <nl> <nl> - TRI_shaper_t * shaper = trx . shaper ( ) ; <nl> TRI_shaped_json_t shapedJson ; <nl> TRI_EXTRACT_SHAPED_JSON_MARKER ( shapedJson , oldDocument - > _data ) ; <nl> TRI_json_t * old = TRI_JsonShapedJson ( shaper , & shapedJson ) ; <nl> deleted file mode 100644 <nl> index 04dbb07c31a . . 00000000000 <nl> mmm a / arangod / Utils / CollectionReadLock . h <nl> ppp / dev / null <nl> <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief " safe " collection read lock <nl> - / / / <nl> - / / / @ file <nl> - / / / <nl> - / / / DISCLAIMER <nl> - / / / <nl> - / / / Copyright 2004 - 2012 triAGENS GmbH , Cologne , Germany <nl> - / / / <nl> - / / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - / / / you may not use this file except in compliance with the License . <nl> - / / / You may obtain a copy of the License at <nl> - / / / <nl> - / / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - / / / <nl> - / / / Unless required by applicable law or agreed to in writing , software <nl> - / / / distributed under the License is distributed on an " AS IS " BASIS , <nl> - / / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - / / / See the License for the specific language governing permissions and <nl> - / / / limitations under the License . <nl> - / / / <nl> - / / / Copyright holder is triAGENS GmbH , Cologne , Germany <nl> - / / / <nl> - / / / @ author Jan Steemann <nl> - / / / @ author Copyright 2011 - 2012 , triAGENS GmbH , Cologne , Germany <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - # ifndef TRIAGENS_UTILS_COLLECTION_READ_LOCK_H <nl> - # define TRIAGENS_UTILS_COLLECTION_READ_LOCK_H 1 <nl> - <nl> - # include " VocBase / primary - collection . h " <nl> - <nl> - using namespace std ; <nl> - <nl> - namespace triagens { <nl> - namespace arango { <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - class CollectionReadLock <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - class CollectionReadLock { <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief CollectionReadLock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - private : <nl> - CollectionReadLock ( const CollectionReadLock & ) ; <nl> - CollectionReadLock & operator = ( const CollectionReadLock & ) ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - constructors and destructors <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - public : <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief create the lock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - CollectionReadLock ( TRI_primary_collection_t * primary ) : _primary ( primary ) { <nl> - _primary - > beginRead ( _primary ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief destroy the lock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - ~ CollectionReadLock ( ) { <nl> - _primary - > endRead ( _primary ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - private variables <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - private : <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief the collection <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - TRI_primary_collection_t * _primary ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - } ; <nl> - <nl> - } <nl> - } <nl> - <nl> - # endif <nl> - <nl> - / / Local Variables : <nl> - / / mode : outline - minor <nl> - / / outline - regexp : " ^ \ \ ( / / / @ brief \ \ | / / / { @ inheritDoc } \ \ | / / / @ addtogroup \ \ | / / / @ page \ \ | / / - - SECTION - - \ \ | / / / @ \ \ } \ \ ) " <nl> - / / End : <nl> deleted file mode 100644 <nl> index 5616e2d992c . . 00000000000 <nl> mmm a / arangod / Utils / CollectionWriteLock . h <nl> ppp / dev / null <nl> <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief " safe " collection write lock <nl> - / / / <nl> - / / / @ file <nl> - / / / <nl> - / / / DISCLAIMER <nl> - / / / <nl> - / / / Copyright 2004 - 2012 triAGENS GmbH , Cologne , Germany <nl> - / / / <nl> - / / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> - / / / you may not use this file except in compliance with the License . <nl> - / / / You may obtain a copy of the License at <nl> - / / / <nl> - / / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - / / / <nl> - / / / Unless required by applicable law or agreed to in writing , software <nl> - / / / distributed under the License is distributed on an " AS IS " BASIS , <nl> - / / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> - / / / See the License for the specific language governing permissions and <nl> - / / / limitations under the License . <nl> - / / / <nl> - / / / Copyright holder is triAGENS GmbH , Cologne , Germany <nl> - / / / <nl> - / / / @ author Jan Steemann <nl> - / / / @ author Copyright 2011 - 2012 , triAGENS GmbH , Cologne , Germany <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - # ifndef TRIAGENS_UTILS_COLLECTION_WRITE_LOCK_H <nl> - # define TRIAGENS_UTILS_COLLECTION_WRITE_LOCK_H 1 <nl> - <nl> - # include " VocBase / primary - collection . h " <nl> - <nl> - using namespace std ; <nl> - <nl> - namespace triagens { <nl> - namespace arango { <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - class CollectionWriteLock <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - class CollectionWriteLock { <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief CollectionWriteLock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - private : <nl> - CollectionWriteLock ( const CollectionWriteLock & ) ; <nl> - CollectionWriteLock & operator = ( const CollectionWriteLock & ) ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - constructors and destructors <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - public : <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief create the lock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - CollectionWriteLock ( TRI_primary_collection_t * primary ) : _primary ( primary ) { <nl> - _primary - > beginWrite ( _primary ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief destroy the lock <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - ~ CollectionWriteLock ( ) { <nl> - _primary - > endWrite ( _primary ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / - - SECTION - - private variables <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ addtogroup ArangoDB <nl> - / / / @ { <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - private : <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief the collection <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - TRI_primary_collection_t * _primary ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ } <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - } ; <nl> - <nl> - } <nl> - } <nl> - <nl> - # endif <nl> - <nl> - / / Local Variables : <nl> - / / mode : outline - minor <nl> - / / outline - regexp : " ^ \ \ ( / / / @ brief \ \ | / / / { @ inheritDoc } \ \ | / / / @ addtogroup \ \ | / / / @ page \ \ | / / - - SECTION - - \ \ | / / / @ \ \ } \ \ ) " <nl> - / / End : <nl> mmm a / arangod / Utils / SingleCollectionTransaction . h <nl> ppp b / arangod / Utils / SingleCollectionTransaction . h <nl> namespace triagens { <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief return the collection ' s shaper <nl> + / / / @ brief return the collection ' s barrier list <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - inline TRI_shaper_t * shaper ( ) { <nl> - return primaryCollection ( ) - > _shaper ; <nl> + inline TRI_barrier_list_t * barrierList ( ) { <nl> + return & primaryCollection ( ) - > _barrierList ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief return the collection ' s barrier list <nl> + / / / @ brief explicitly lock the underlying collection for read access <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - inline TRI_barrier_list_t * barrierList ( ) { <nl> - return & primaryCollection ( ) - > _barrierList ; <nl> + <nl> + int lockRead ( ) { <nl> + TRI_primary_collection_t * primary = primaryCollection ( ) ; <nl> + <nl> + return this - > lockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / arangod / Utils / SingleCollectionWriteTransaction . h <nl> ppp b / arangod / Utils / SingleCollectionWriteTransaction . h <nl> <nl> <nl> # include " Utils / CollectionNameResolver . h " <nl> # include " Utils / SingleCollectionTransaction . h " <nl> - # include " Utils / CollectionWriteLock . h " <nl> <nl> # include " ShapedJson / shaped - json . h " <nl> # include " VocBase / transaction . h " <nl> namespace triagens { <nl> return _synchronous ; <nl> } <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief explicitly lock the underlying collection for write access <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + int lockWrite ( ) { <nl> + TRI_primary_collection_t * primary = this - > primaryCollection ( ) ; <nl> + <nl> + return this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief create a single document within a transaction , using json <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / arangod / Utils / Transaction . h <nl> ppp b / arangod / Utils / Transaction . h <nl> <nl> <nl> # include " Logger / Logger . h " <nl> # include " Utils / CollectionNameResolver . h " <nl> - # include " Utils / CollectionReadLock . h " <nl> - # include " Utils / CollectionWriteLock . h " <nl> <nl> # define TRX_LOG if ( false ) std : : cout <nl> <nl> namespace triagens { <nl> this - > _hints | = ( TRI_transaction_hint_t ) hint ; <nl> } <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief read - or write - lock a collection <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + int lockExplicit ( TRI_primary_collection_t * const primary , <nl> + const TRI_transaction_type_e type ) { <nl> + if ( this - > _trx = = 0 ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> + <nl> + if ( ( this - > status ( ) = = TRI_TRANSACTION_RUNNING & & ! this - > isEmbedded ( ) ) | | <nl> + this - > status ( ) = = TRI_TRANSACTION_COMMITTED | | <nl> + this - > status ( ) = = TRI_TRANSACTION_ABORTED | | <nl> + this - > status ( ) = = TRI_TRANSACTION_FINISHED ) { <nl> + return TRI_ERROR_TRANSACTION_INVALID_STATE ; <nl> + } <nl> + <nl> + if ( this - > isEmbedded ( ) ) { <nl> + / / locking is a no - op in embedded transactions <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> + if ( type = = TRI_TRANSACTION_READ ) { <nl> + primary - > beginRead ( primary ) ; <nl> + } <nl> + else { <nl> + primary - > beginWrite ( primary ) ; <nl> + } <nl> + <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief read - or write - unlock a collection <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + int unlockExplicit ( TRI_primary_collection_t * const primary , <nl> + const TRI_transaction_type_e type ) { <nl> + if ( this - > _trx = = 0 ) { <nl> + return TRI_ERROR_INTERNAL ; <nl> + } <nl> + <nl> + if ( ( this - > status ( ) = = TRI_TRANSACTION_RUNNING & & ! this - > isEmbedded ( ) ) | | <nl> + this - > status ( ) = = TRI_TRANSACTION_COMMITTED | | <nl> + this - > status ( ) = = TRI_TRANSACTION_ABORTED | | <nl> + this - > status ( ) = = TRI_TRANSACTION_FINISHED ) { <nl> + return TRI_ERROR_TRANSACTION_INVALID_STATE ; <nl> + } <nl> + <nl> + if ( this - > isEmbedded ( ) ) { <nl> + / / locking is a no - op in embedded transactions <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> + if ( type = = TRI_TRANSACTION_READ ) { <nl> + primary - > endRead ( primary ) ; <nl> + } <nl> + else { <nl> + primary - > endWrite ( primary ) ; <nl> + } <nl> + <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief read any ( random ) document <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> if ( * barrier = = 0 ) { <nl> return TRI_ERROR_OUT_OF_MEMORY ; <nl> } <nl> - <nl> - CollectionReadLock lock ( primary ) ; <nl> + <nl> + / / READ - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + <nl> if ( primary - > _primaryIndex . _nrUsed = = 0 ) { <nl> TRI_FreeBarrier ( * barrier ) ; <nl> * barrier = 0 ; <nl> namespace triagens { <nl> <nl> * mptr = ( TRI_doc_mptr_t * ) beg [ pos ] ; <nl> } <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + / / READ - LOCK END <nl> <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> namespace triagens { <nl> TRI_doc_operation_context_t context ; <nl> TRI_InitReadContextPrimaryCollection ( & context , primary ) ; <nl> <nl> - CollectionReadLock lock ( primary ) ; <nl> + / / READ - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> <nl> - return primary - > read ( & context , mptr , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + int res = primary - > read ( & context , mptr , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + / / READ - LOCK END <nl> + <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> TRI_doc_operation_context_t context ; <nl> TRI_InitReadContextPrimaryCollection ( & context , primary ) ; <nl> <nl> - CollectionReadLock lock ( primary ) ; <nl> + / / READ - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> <nl> if ( primary - > _primaryIndex . _nrUsed > 0 ) { <nl> void * * ptr = primary - > _primaryIndex . _table ; <nl> namespace triagens { <nl> } <nl> } <nl> } <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + / / READ - LOCK END <nl> <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> namespace triagens { <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> <nl> - CollectionReadLock lock ( primary ) ; <nl> + / / READ - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> <nl> if ( primary - > _primaryIndex . _nrUsed = = 0 ) { <nl> / / nothing to do <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + / / READ - LOCK END <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> <nl> namespace triagens { <nl> } <nl> } <nl> } <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_READ ) ; <nl> + / / READ - LOCK END <nl> <nl> if ( count = = 0 ) { <nl> / / barrier not needed , kill it <nl> namespace triagens { <nl> TRI_doc_operation_context_t context ; <nl> TRI_InitContextPrimaryCollection ( & context , primary , TRI_DOC_UPDATE_ERROR , forceSync ) ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> <nl> - return primary - > createJson ( & context , markerType , mptr , json , data ) ; <nl> + int res = primary - > createJson ( & context , markerType , mptr , json , data ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> + <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> TRI_doc_operation_context_t context ; <nl> TRI_InitContextPrimaryCollection ( & context , primary , TRI_DOC_UPDATE_ERROR , forceSync ) ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> <nl> - return primary - > create ( & context , markerType , mptr , shaped , data , key ) ; <nl> + int res = primary - > create ( & context , markerType , mptr , shaped , data , key ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> + <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> context . _expectedRid = expectedRevision ; <nl> context . _previousRid = actualRevision ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + <nl> + int res = primary - > updateJson ( & context , mptr , json , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> <nl> - return primary - > updateJson ( & context , mptr , json , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> context . _expectedRid = expectedRevision ; <nl> context . _previousRid = actualRevision ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> <nl> - return primary - > update ( & context , mptr , shaped , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + int res = primary - > update ( & context , mptr , shaped , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> + <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> context . _expectedRid = expectedRevision ; <nl> context . _previousRid = actualRevision ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + <nl> + int res = primary - > destroy ( & context , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> <nl> - return primary - > destroy ( & context , ( TRI_voc_key_t ) key . c_str ( ) ) ; <nl> + return res ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> namespace triagens { <nl> <nl> res = TRI_ERROR_NO_ERROR ; <nl> <nl> - CollectionWriteLock lock ( primary ) ; <nl> + / / WRITE - LOCK START <nl> + this - > lockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> <nl> for ( size_t i = 0 ; i < n ; + + i ) { <nl> const string & id = ids [ i ] ; <nl> namespace triagens { <nl> break ; <nl> } <nl> } <nl> + <nl> + this - > unlockExplicit ( primary , TRI_TRANSACTION_WRITE ) ; <nl> + / / WRITE - LOCK END <nl> <nl> return res ; <nl> } <nl> mmm a / arangod / V8Server / v8 - vocbase . cpp <nl> ppp b / arangod / V8Server / v8 - vocbase . cpp <nl> static v8 : : Handle < v8 : : Value > ReplaceVocbaseCol ( const bool useCollection , <nl> return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( res , " cannot replace document " , true ) ) ) ; <nl> } <nl> <nl> - TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 1 ] , trx . shaper ( ) ) ; <nl> - if ( ! holder . registerShapedJson ( trx . shaper ( ) , shaped ) ) { <nl> + TRI_primary_collection_t * primary = trx . primaryCollection ( ) ; <nl> + <nl> + TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 1 ] , primary - > _shaper ) ; <nl> + if ( ! holder . registerShapedJson ( primary - > _shaper , shaped ) ) { <nl> return scope . Close ( v8 : : ThrowException ( <nl> TRI_CreateErrorObject ( TRI_errno ( ) , <nl> " < data > cannot be converted into JSON shape " ) ) ) ; <nl> static v8 : : Handle < v8 : : Value > SaveVocbaseCol ( SingleCollectionWriteTransaction < Em <nl> } <nl> } <nl> <nl> - TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 0 ] , trx - > shaper ( ) ) ; <nl> - if ( ! holder . registerShapedJson ( trx - > shaper ( ) , shaped ) ) { <nl> + TRI_primary_collection_t * primary = trx - > primaryCollection ( ) ; <nl> + <nl> + TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 0 ] , primary - > _shaper ) ; <nl> + if ( ! holder . registerShapedJson ( primary - > _shaper , shaped ) ) { <nl> return scope . Close ( v8 : : ThrowException ( <nl> TRI_CreateErrorObject ( TRI_errno ( ) , <nl> " < data > cannot be converted into JSON shape " ) ) ) ; <nl> static v8 : : Handle < v8 : : Value > SaveEdgeCol ( SingleCollectionWriteTransaction < Embed <nl> return scope . Close ( v8 : : ThrowException ( err ) ) ; <nl> } <nl> edge . _toCid = toCollection - > _cid ; <nl> + <nl> + TRI_primary_collection_t * primary = trx - > primaryCollection ( ) ; <nl> <nl> / / extract shaped data <nl> - TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 2 ] , trx - > shaper ( ) ) ; <nl> - if ( ! holder . registerShapedJson ( trx - > shaper ( ) , shaped ) ) { <nl> + TRI_shaped_json_t * shaped = TRI_ShapedJsonV8Object ( argv [ 2 ] , primary - > _shaper ) ; <nl> + if ( ! holder . registerShapedJson ( primary - > _shaper , shaped ) ) { <nl> return scope . Close ( v8 : : ThrowException ( <nl> TRI_CreateErrorObject ( TRI_errno ( ) , <nl> " < data > cannot be converted into JSON shape " ) ) ) ; <nl> static v8 : : Handle < v8 : : Value > UpdateVocbaseCol ( const bool useCollection , <nl> return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( res , " cannot update document " , true ) ) ) ; <nl> } <nl> <nl> + TRI_primary_collection_t * primary = trx . primaryCollection ( ) ; <nl> + <nl> assert ( document ) ; <nl> <nl> TRI_shaped_json_t shaped ; <nl> TRI_EXTRACT_SHAPED_JSON_MARKER ( shaped , document - > _data ) ; <nl> - TRI_json_t * old = TRI_JsonShapedJson ( trx . shaper ( ) , & shaped ) ; <nl> + TRI_json_t * old = TRI_JsonShapedJson ( primary - > _shaper , & shaped ) ; <nl> <nl> - if ( ! holder . registerJson ( trx . shaper ( ) - > _memoryZone , old ) ) { <nl> + if ( ! holder . registerJson ( primary - > _shaper - > _memoryZone , old ) ) { <nl> return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( TRI_ERROR_OUT_OF_MEMORY ) ) ) ; <nl> } <nl> <nl> static v8 : : Handle < v8 : : Value > JS_LookupFulltextIndexVocbaseCol ( v8 : : Arguments con <nl> <nl> static v8 : : Handle < v8 : : Value > JS_FiguresVocbaseCol ( v8 : : Arguments const & argv ) { <nl> v8 : : HandleScope scope ; <nl> - <nl> + <nl> TRI_vocbase_col_t * collection = TRI_UnwrapClass < TRI_vocbase_col_t > ( argv . Holder ( ) , WRP_VOCBASE_COL_TYPE ) ; <nl> <nl> if ( collection = = 0 ) { <nl> static v8 : : Handle < v8 : : Value > JS_FiguresVocbaseCol ( v8 : : Arguments const & argv ) { <nl> <nl> v8 : : Handle < v8 : : Object > result = v8 : : Object : : New ( ) ; <nl> <nl> - TRI_READ_LOCK_STATUS_VOCBASE_COL ( collection ) ; <nl> - TRI_vocbase_col_status_e status = collection - > _status ; <nl> - <nl> - if ( status ! = TRI_VOC_COL_STATUS_LOADED ) { <nl> - TRI_READ_UNLOCK_STATUS_VOCBASE_COL ( collection ) ; <nl> - return scope . Close ( result ) ; <nl> + CollectionNameResolver resolver ( collection - > _vocbase ) ; <nl> + SingleCollectionReadOnlyTransaction < EmbeddableTransaction < V8TransactionContext > > trx ( collection - > _vocbase , resolver , collection - > _cid ) ; <nl> + int res = trx . begin ( ) ; <nl> + if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> + return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( res , " cannot fetch figures " , true ) ) ) ; <nl> } <nl> - <nl> - TRI_primary_collection_t * primary = collection - > _collection ; <nl> <nl> - if ( primary = = 0 ) { <nl> - TRI_READ_UNLOCK_STATUS_VOCBASE_COL ( collection ) ; <nl> - return scope . Close ( v8 : : ThrowException ( v8 : : String : : New ( " illegal collection pointer " ) ) ) ; <nl> - } <nl> + trx . lockRead ( ) ; <nl> <nl> + TRI_primary_collection_t * primary = collection - > _collection ; <nl> TRI_doc_collection_info_t * info = primary - > figures ( primary ) ; <nl> - <nl> - TRI_READ_UNLOCK_STATUS_VOCBASE_COL ( collection ) ; <nl> <nl> + res = trx . finish ( res ) ; <nl> + if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> + return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( res , " cannot fetch figures " , true ) ) ) ; <nl> + } <nl> + <nl> if ( info = = NULL ) { <nl> v8 : : Handle < v8 : : Object > errorObject = TRI_CreateErrorObject ( TRI_ERROR_OUT_OF_MEMORY ) ; <nl> <nl> static v8 : : Handle < v8 : : Value > JS_StatusVocbaseCol ( v8 : : Arguments const & argv ) { <nl> static v8 : : Handle < v8 : : Value > JS_RevisionVocbaseCol ( v8 : : Arguments const & argv ) { <nl> v8 : : HandleScope scope ; <nl> <nl> - v8 : : Handle < v8 : : Object > err ; <nl> - TRI_vocbase_col_t const * collection = UseCollection ( argv . Holder ( ) , & err ) ; <nl> + TRI_vocbase_col_t * collection = TRI_UnwrapClass < TRI_vocbase_col_t > ( argv . Holder ( ) , WRP_VOCBASE_COL_TYPE ) ; <nl> <nl> if ( collection = = 0 ) { <nl> - return scope . Close ( v8 : : ThrowException ( err ) ) ; <nl> + return scope . Close ( v8 : : ThrowException ( v8 : : String : : New ( " illegal collection pointer " ) ) ) ; <nl> } <nl> <nl> - TRI_primary_collection_t * primary = collection - > _collection ; <nl> - <nl> - if ( ! TRI_IS_DOCUMENT_COLLECTION ( collection - > _type ) ) { <nl> - TRI_ReleaseCollection ( collection ) ; <nl> - return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( TRI_ERROR_INTERNAL , " unknown collection type " ) ) ) ; <nl> + CollectionNameResolver resolver ( collection - > _vocbase ) ; <nl> + SingleCollectionReadOnlyTransaction < EmbeddableTransaction < V8TransactionContext > > trx ( collection - > _vocbase , resolver , collection - > _cid ) ; <nl> + int res = trx . begin ( ) ; <nl> + if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> + return scope . Close ( v8 : : ThrowException ( TRI_CreateErrorObject ( res , " cannot fetch revision " , true ) ) ) ; <nl> } <nl> - <nl> - primary - > beginRead ( primary ) ; <nl> + <nl> + trx . lockRead ( ) ; <nl> + TRI_primary_collection_t * primary = collection - > _collection ; <nl> TRI_voc_rid_t rid = primary - > base . _info . _rid ; <nl> - primary - > endRead ( primary ) ; <nl> <nl> - TRI_ReleaseCollection ( collection ) ; <nl> - <nl> + trx . finish ( res ) ; <nl> + <nl> return scope . Close ( V8RevisionId ( rid ) ) ; <nl> } <nl> <nl> mmm a / arangod / VocBase / primary - collection . c <nl> ppp b / arangod / VocBase / primary - collection . c <nl> static void FreeDatafileInfo ( TRI_associative_pointer_t * const files ) { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief returns information about the collection <nl> + / / / note : the collection lock must be held when calling this function <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> static TRI_doc_collection_info_t * Figures ( TRI_primary_collection_t * primary ) { <nl> static TRI_doc_collection_info_t * Figures ( TRI_primary_collection_t * primary ) { <nl> return NULL ; <nl> } <nl> <nl> - primary - > beginRead ( primary ) ; <nl> - <nl> for ( i = 0 ; i < primary - > _datafileInfo . _nrAlloc ; + + i ) { <nl> TRI_doc_datafile_info_t * d = primary - > _datafileInfo . _table [ i ] ; <nl> <nl> static TRI_doc_collection_info_t * Figures ( TRI_primary_collection_t * primary ) { <nl> <nl> info - > _numberShapes = ( TRI_voc_ssize_t ) primary - > _shaper - > numShapes ( primary - > _shaper ) ; <nl> <nl> - primary - > endRead ( primary ) ; <nl> - <nl> return info ; <nl> } <nl> <nl> | removed lock classes | arangodb/arangodb | 8a6225865f869ba9052086bbbb5fd458812eb30b | 2013-02-04T13:40:14Z |
mmm a / modules / dnn / src / layers / pooling_layer . cpp <nl> ppp b / modules / dnn / src / layers / pooling_layer . cpp <nl> class PoolingLayerImpl : public PoolingLayer <nl> <nl> # if CV_SIMD128 <nl> const int * ofsptr = ofsbuf . empty ( ) ? 0 : ( const int * ) & ofsbuf [ 0 ] ; <nl> + if ( poolingType = = MAX & & ! compMaxIdx & & ! ofsptr ) <nl> + CV_Error ( Error : : StsBadArg , " ofsbuf should be initialized in this mode " ) ; <nl> v_float32x4 idx00 ( 0 . f , ( float ) stride_w , ( float ) ( stride_w * 2 ) , ( float ) ( stride_w * 3 ) ) ; <nl> v_float32x4 ones = v_setall_f32 ( 1 . f ) ; <nl> v_float32x4 idx_delta = v_setall_f32 ( ( float ) ( inp_width - kernel_w ) ) ; <nl> mmm a / modules / imgcodecs / src / grfmt_tiff . cpp <nl> ppp b / modules / imgcodecs / src / grfmt_tiff . cpp <nl> bool TiffEncoder : : writeLibTiff ( const std : : vector < Mat > & img_vec , const std : : vect <nl> } <nl> default : <nl> { <nl> + TIFFClose ( pTiffHandle ) ; <nl> return false ; <nl> } <nl> } <nl> | Merge pull request from mshabunin : fix - static - issues - 9 | opencv/opencv | fda7bb95d0baeb8e28766ab4ced284c5aceabbe2 | 2018-03-02T11:34:53Z |
mmm a / include / swift / ABI / Metadata . h <nl> ppp b / include / swift / ABI / Metadata . h <nl> struct TargetStoredClassMetadataBounds { <nl> using StoredClassMetadataBounds = <nl> TargetStoredClassMetadataBounds < InProcess > ; <nl> <nl> + template < typename Runtime > <nl> + struct TargetResilientSuperclass { <nl> + / / / The superclass of this class . This pointer can be interpreted <nl> + / / / using the superclass reference kind stored in the type context <nl> + / / / descriptor flags . It is null if the class has no formal superclass . <nl> + / / / <nl> + / / / Note that SwiftObject , the implicit superclass of all Swift root <nl> + / / / classes when building with ObjC compatibility , does not appear here . <nl> + TargetRelativeDirectPointer < Runtime , const void , / * nullable * / true > Superclass ; <nl> + } ; <nl> + <nl> template < typename Runtime > <nl> class TargetClassDescriptor final <nl> : public TargetTypeContextDescriptor < Runtime > , <nl> public TrailingGenericContextObjects < TargetClassDescriptor < Runtime > , <nl> TargetTypeGenericContextDescriptorHeader , <nl> / * additional trailing objects : * / <nl> + TargetResilientSuperclass < Runtime > , <nl> TargetForeignMetadataInitialization < Runtime > , <nl> TargetSingletonMetadataInitialization < Runtime > , <nl> TargetVTableDescriptorHeader < Runtime > , <nl> class TargetClassDescriptor final <nl> using TrailingGenericContextObjects = <nl> TrailingGenericContextObjects < TargetClassDescriptor < Runtime > , <nl> TargetTypeGenericContextDescriptorHeader , <nl> + TargetResilientSuperclass < Runtime > , <nl> TargetForeignMetadataInitialization < Runtime > , <nl> TargetSingletonMetadataInitialization < Runtime > , <nl> TargetVTableDescriptorHeader < Runtime > , <nl> class TargetClassDescriptor final <nl> using VTableDescriptorHeader = TargetVTableDescriptorHeader < Runtime > ; <nl> using OverrideTableHeader = TargetOverrideTableHeader < Runtime > ; <nl> using MethodOverrideDescriptor = TargetMethodOverrideDescriptor < Runtime > ; <nl> + using ResilientSuperclass = TargetResilientSuperclass < Runtime > ; <nl> using ForeignMetadataInitialization = <nl> TargetForeignMetadataInitialization < Runtime > ; <nl> using SingletonMetadataInitialization = <nl> class TargetClassDescriptor final <nl> using TrailingGenericContextObjects : : getGenericParams ; <nl> using TargetTypeContextDescriptor < Runtime > : : getTypeContextDescriptorFlags ; <nl> <nl> - / / / The superclass of this class . This pointer can be interpreted <nl> - / / / using the superclass reference kind stored in the type context <nl> - / / / descriptor flags . It is null if the class has no formal superclass . <nl> - / / / <nl> - / / / Note that SwiftObject , the implicit superclass of all Swift root <nl> - / / / classes when building with ObjC compatibility , does not appear here . <nl> - TargetRelativeDirectPointer < Runtime , const void , / * nullable * / true > Superclass ; <nl> - <nl> - / / / Does this class have a formal superclass ? <nl> - bool hasSuperclass ( ) const { <nl> - return ! Superclass . isNull ( ) ; <nl> - } <nl> - <nl> - TypeReferenceKind getSuperclassReferenceKind ( ) const { <nl> - return getTypeContextDescriptorFlags ( ) . class_getSuperclassReferenceKind ( ) ; <nl> + TypeReferenceKind getResilientSuperclassReferenceKind ( ) const { <nl> + return getTypeContextDescriptorFlags ( ) <nl> + . class_getResilientSuperclassReferenceKind ( ) ; <nl> } <nl> <nl> union { <nl> class TargetClassDescriptor final <nl> <nl> using TrailingGenericContextObjects : : numTrailingObjects ; <nl> <nl> + size_t numTrailingObjects ( OverloadToken < ResilientSuperclass > ) const { <nl> + return this - > hasResilientSuperclass ( ) ? 1 : 0 ; <nl> + } <nl> + <nl> size_t numTrailingObjects ( OverloadToken < ForeignMetadataInitialization > ) const { <nl> return this - > hasForeignMetadataInitialization ( ) ? 1 : 0 ; <nl> } <nl> class TargetClassDescriptor final <nl> } <nl> <nl> public : <nl> + const TargetRelativeDirectPointer < Runtime , const void , / * nullable * / true > & <nl> + getResilientSuperclass ( ) const { <nl> + assert ( this - > hasResilientSuperclass ( ) ) ; <nl> + return this - > template getTrailingObjects < ResilientSuperclass > ( ) - > Superclass ; <nl> + } <nl> + <nl> const ForeignMetadataInitialization & getForeignMetadataInitialization ( ) const { <nl> assert ( this - > hasForeignMetadataInitialization ( ) ) ; <nl> return * this - > template getTrailingObjects < ForeignMetadataInitialization > ( ) ; <nl> mmm a / include / swift / ABI / MetadataValues . h <nl> ppp b / include / swift / ABI / MetadataValues . h <nl> class TypeContextDescriptorFlags : public FlagSet < uint16_t > { <nl> <nl> / / Type - specific flags : <nl> <nl> - / / / The kind of reference that this class makes to its superclass <nl> + / / / The kind of reference that this class makes to its resilient superclass <nl> / / / descriptor . A TypeReferenceKind . <nl> / / / <nl> / / / Only meaningful for class descriptors . <nl> - Class_SuperclassReferenceKind = 9 , <nl> - Class_SuperclassReferenceKind_width = 3 , <nl> + Class_ResilientSuperclassReferenceKind = 9 , <nl> + Class_ResilientSuperclassReferenceKind_width = 3 , <nl> <nl> / / / Whether the immediate class members in this metadata are allocated <nl> / / / at negative offsets . For now , we don ' t use this . <nl> class TypeContextDescriptorFlags : public FlagSet < uint16_t > { <nl> class_areImmediateMembersNegative , <nl> class_setAreImmediateMembersNegative ) <nl> <nl> - FLAGSET_DEFINE_FIELD_ACCESSORS ( Class_SuperclassReferenceKind , <nl> - Class_SuperclassReferenceKind_width , <nl> + FLAGSET_DEFINE_FIELD_ACCESSORS ( Class_ResilientSuperclassReferenceKind , <nl> + Class_ResilientSuperclassReferenceKind_width , <nl> TypeReferenceKind , <nl> - class_getSuperclassReferenceKind , <nl> - class_setSuperclassReferenceKind ) <nl> + class_getResilientSuperclassReferenceKind , <nl> + class_setResilientSuperclassReferenceKind ) <nl> } ; <nl> <nl> / / / Flags for protocol context descriptors . These values are used as the <nl> mmm a / include / swift / Remote / MetadataReader . h <nl> ppp b / include / swift / Remote / MetadataReader . h <nl> class MetadataReader { <nl> Optional < ClassMetadataBounds > <nl> readMetadataBoundsOfSuperclass ( ContextDescriptorRef subclassRef ) { <nl> auto subclass = cast < TargetClassDescriptor < Runtime > > ( subclassRef ) ; <nl> + if ( ! subclass - > hasResilientSuperclass ( ) ) <nl> + return ClassMetadataBounds : : forSwiftRootClass ( ) ; <nl> <nl> auto rawSuperclass = <nl> - resolveNullableRelativeField ( subclassRef , subclass - > Superclass ) ; <nl> + resolveNullableRelativeField ( subclassRef , <nl> + subclass - > getResilientSuperclass ( ) ) ; <nl> if ( ! rawSuperclass ) { <nl> return ClassMetadataBounds : : forSwiftRootClass ( ) ; <nl> } <nl> <nl> return forTypeReference < ClassMetadataBounds > ( <nl> - subclass - > getSuperclassReferenceKind ( ) , * rawSuperclass , <nl> + subclass - > getResilientSuperclassReferenceKind ( ) , * rawSuperclass , <nl> [ & ] ( ContextDescriptorRef superclass ) <nl> - > Optional < ClassMetadataBounds > { <nl> if ( ! isa < TargetClassDescriptor < Runtime > > ( superclass ) ) <nl> mmm a / lib / IRGen / GenMeta . cpp <nl> ppp b / lib / IRGen / GenMeta . cpp <nl> namespace { <nl> asImpl ( ) . addReflectionFieldDescriptor ( ) ; <nl> asImpl ( ) . addLayoutInfo ( ) ; <nl> asImpl ( ) . addGenericSignature ( ) ; <nl> + asImpl ( ) . maybeAddResilientSuperclass ( ) ; <nl> asImpl ( ) . maybeAddMetadataInitialization ( ) ; <nl> } <nl> <nl> namespace { <nl> setCommonFlags ( flags ) ; <nl> return flags . getOpaqueValue ( ) ; <nl> } <nl> - <nl> + <nl> + void maybeAddResilientSuperclass ( ) { } <nl> + <nl> void addReflectionFieldDescriptor ( ) { <nl> / / Structs are reflectable unless we emit them with opaque reflection <nl> / / metadata . <nl> namespace { <nl> return flags . getOpaqueValue ( ) ; <nl> } <nl> <nl> + void maybeAddResilientSuperclass ( ) { } <nl> + <nl> void addReflectionFieldDescriptor ( ) { <nl> / / Some enum layout strategies ( viz . C compatible layout ) aren ' t <nl> / / supported by reflection . <nl> namespace { <nl> / / Non - null unless the type is foreign . <nl> ClassMetadataLayout * MetadataLayout = nullptr ; <nl> <nl> - Optional < TypeEntityReference > SuperClassRef ; <nl> + Optional < TypeEntityReference > ResilientSuperClassRef ; <nl> <nl> SILVTable * VTable ; <nl> bool Resilient ; <nl> namespace { <nl> <nl> MetadataLayout = & IGM . getClassMetadataLayout ( Type ) ; <nl> <nl> - if ( auto superclassDecl = getType ( ) - > getSuperclassDecl ( ) ) <nl> - SuperClassRef = IGM . getTypeEntityReference ( superclassDecl ) ; <nl> + if ( auto superclassDecl = getType ( ) - > getSuperclassDecl ( ) ) { <nl> + if ( MetadataLayout & & MetadataLayout - > hasResilientSuperclass ( ) ) <nl> + ResilientSuperClassRef = IGM . getTypeEntityReference ( superclassDecl ) ; <nl> + } <nl> <nl> addVTableEntries ( getType ( ) ) ; <nl> } <nl> namespace { <nl> flags . class_setHasResilientSuperclass ( true ) ; <nl> } <nl> <nl> - if ( SuperClassRef ) { <nl> - flags . class_setSuperclassReferenceKind ( SuperClassRef - > getKind ( ) ) ; <nl> + if ( ResilientSuperClassRef ) { <nl> + flags . class_setResilientSuperclassReferenceKind ( <nl> + ResilientSuperClassRef - > getKind ( ) ) ; <nl> } <nl> <nl> return flags . getOpaqueValue ( ) ; <nl> } <nl> <nl> + void maybeAddResilientSuperclass ( ) { <nl> + / / RelativeDirectPointer < const void , / * nullable * / true > SuperClass ; <nl> + if ( ResilientSuperClassRef ) { <nl> + B . addRelativeAddress ( ResilientSuperClassRef - > getValue ( ) ) ; <nl> + } <nl> + } <nl> + <nl> void addReflectionFieldDescriptor ( ) { <nl> / / Classes are always reflectable , unless reflection is disabled or this <nl> / / is a foreign class . <nl> namespace { <nl> void addLayoutInfo ( ) { <nl> auto properties = getType ( ) - > getStoredProperties ( ) ; <nl> <nl> - / / RelativeDirectPointer < const void , / * nullable * / true > SuperClass ; <nl> - if ( SuperClassRef ) { <nl> - B . addRelativeAddress ( SuperClassRef - > getValue ( ) ) ; <nl> - } else { <nl> - B . addInt32 ( 0 ) ; <nl> - } <nl> - <nl> / / union { <nl> / / uint32_t MetadataNegativeSizeInWords ; <nl> / / RelativeDirectPointer < StoredClassMetadataBounds > <nl> mmm a / stdlib / public / runtime / Metadata . cpp <nl> ppp b / stdlib / public / runtime / Metadata . cpp <nl> static ClassMetadataBounds computeMetadataBoundsFromSuperclass ( <nl> <nl> / / Compute the bounds for the superclass , extending it to the minimum <nl> / / bounds of a Swift class . <nl> - if ( const void * superRef = description - > Superclass . get ( ) ) { <nl> + if ( const void * superRef = description - > getResilientSuperclass ( ) ) { <nl> bounds = computeMetadataBoundsForSuperclass ( superRef , <nl> - description - > getSuperclassReferenceKind ( ) ) ; <nl> + description - > getResilientSuperclassReferenceKind ( ) ) ; <nl> } else { <nl> bounds = ClassMetadataBounds : : forSwiftRootClass ( ) ; <nl> } <nl> | [ ABI ] Only emit resilient superclass descriptor references in class metadata . | apple/swift | a4778e1c0c5eabf98fc85abc6dd93822e7fde3cb | 2018-10-04T03:05:28Z |
mmm a / bson / bsonobj . h <nl> ppp b / bson / bsonobj . h <nl> namespace mongo { <nl> massert ( 10334 , s , 0 ) ; <nl> } <nl> } <nl> - / * <nl> - # pragma pack ( 1 ) <nl> - static struct EmptyObject { <nl> - EmptyObject ( ) { <nl> - len = 5 ; <nl> - jstype = EOO ; <nl> - } <nl> - int len ; <nl> - char jstype ; <nl> - } emptyObject ; <nl> - # pragma pack ( ) <nl> - * / <nl> } ; <nl> ostream & operator < < ( ostream & s , const BSONObj & o ) ; <nl> ostream & operator < < ( ostream & s , const BSONElement & e ) ; <nl> mmm a / bson / oid . h <nl> ppp b / bson / oid . h <nl> <nl> namespace mongo { <nl> <nl> # pragma pack ( 1 ) <nl> - <nl> / * * Object ID type . <nl> BSON objects typically have an _id field for the object id . This field should be the first <nl> member of the object when present . class OID is a special type that is a 12 byte id which <nl> | cruft / pack cleaning | mongodb/mongo | 0afdef1676fcc0e822540f0c1b8c701d4a27a670 | 2010-05-17T20:50:47Z |
mmm a / CHANGELOG . md <nl> ppp b / CHANGELOG . md <nl> <nl> # # ClickHouse release 19 . 7 . 6 . 1 , 2019 - 07 - 05 <nl> <nl> # # # Bug Fix <nl> - * Fix push require columns with join . [ # 5192 ] ( https : / / github . com / yandex / ClickHouse / pull / 5192 ) ( [ Winter Zhang ] ( https : / / github . com / zhang2014 ) ) <nl> + * Fix performance regression in some queries with JOIN . [ # 5192 ] ( https : / / github . com / yandex / ClickHouse / pull / 5192 ) ( [ Winter Zhang ] ( https : / / github . com / zhang2014 ) ) <nl> <nl> # # ClickHouse release 19 . 9 . 2 . 4 , 2019 - 06 - 24 <nl> <nl> | Update CHANGELOG . md | ClickHouse/ClickHouse | 6515445a05e081ca8f62a092ee4d1dc07e4b46c9 | 2019-07-08T19:25:38Z |
mmm a / test / cpp / qps / driver . cc <nl> ppp b / test / cpp / qps / driver . cc <nl> std : : unique_ptr < ScenarioResult > RunScenario ( <nl> stats . request_results ( i ) . count ( ) ; <nl> } <nl> result - > add_client_stats ( ) - > CopyFrom ( stats ) ; <nl> - / / That final status should be the last message on the client stream <nl> - GPR_ASSERT ( ! client - > stream - > Read ( & client_status ) ) ; <nl> } else { <nl> gpr_log ( GPR_ERROR , " Couldn ' t get final status from client % zu " , i ) ; <nl> } <nl> | remove the last invalid read , which cause hanging issue in driver | grpc/grpc | 1ead5c19057c9ea09d5134bba5a9cae42d14d555 | 2019-10-28T17:00:25Z |
mmm a / dbms / src / Functions / FunctionHelpers . cpp <nl> ppp b / dbms / src / Functions / FunctionHelpers . cpp <nl> Block createBlockWithNestedColumns ( const Block & block , ColumnNumbers args , size <nl> if ( col . type - > isNullable ( ) ) <nl> { <nl> bool is_const = col . column - > isConst ( ) ; <nl> - auto const_col = static_cast < const ColumnConst * > ( col . column . get ( ) ) ; <nl> + auto const_col = typeid_cast < const ColumnConst * > ( col . column . get ( ) ) ; <nl> <nl> if ( is_const & & ! const_col - > getDataColumn ( ) . isNullable ( ) ) <nl> throw Exception ( " Column at position " + toString ( i + 1 ) + " with type " + col . type - > getName ( ) + <nl> mmm a / dbms / src / Functions / IFunction . cpp <nl> ppp b / dbms / src / Functions / IFunction . cpp <nl> bool defaultImplementationForNulls ( <nl> const ColumnWithTypeAndName & source_col = temporary_block . getByPosition ( result ) ; <nl> ColumnWithTypeAndName & dest_col = block . getByPosition ( result ) ; <nl> <nl> - if ( source_col . column - > isConst ( ) ) <nl> - dest_col . column = source_col . column ; <nl> - else <nl> - { <nl> - / / / Initialize the result column . <nl> - ColumnPtr null_map = std : : make_shared < ColumnUInt8 > ( block . rows ( ) , 0 ) ; <nl> - dest_col . column = std : : make_shared < ColumnNullable > ( source_col . column , null_map ) ; <nl> + / / / Initialize the result column . <nl> + ColumnPtr null_map = std : : make_shared < ColumnUInt8 > ( block . rows ( ) , 0 ) ; <nl> + dest_col . column = std : : make_shared < ColumnNullable > ( source_col . column , null_map ) ; <nl> <nl> - / / / Deduce the null map of the result from the null maps of the nullable columns . <nl> - createNullMap ( block , args , result ) ; <nl> - } <nl> + / / / Deduce the null map of the result from the null maps of the nullable columns . <nl> + createNullMap ( block , args , result ) ; <nl> <nl> return true ; <nl> } <nl> mmm a / dbms / tests / queries / 0_stateless / 00503_cast_const_nullable . reference <nl> ppp b / dbms / tests / queries / 0_stateless / 00503_cast_const_nullable . reference <nl> <nl> 1 <nl> 1 <nl> + \ N <nl> mmm a / dbms / tests / queries / 0_stateless / 00503_cast_const_nullable . sql <nl> ppp b / dbms / tests / queries / 0_stateless / 00503_cast_const_nullable . sql <nl> <nl> SELECT CAST ( 1 AS Nullable ( UInt8 ) ) AS id WHERE id = CAST ( 1 AS Nullable ( UInt8 ) ) ; <nl> SELECT CAST ( 1 AS Nullable ( UInt8 ) ) AS id WHERE id = 1 ; <nl> + SELECT NULL = = CAST ( toUInt8 ( 0 ) AS Nullable ( UInt8 ) ) ; <nl> | Fixed segfault in compare operators with Nullable types . [ # CLICKHOUSE - 3 ] | ClickHouse/ClickHouse | cd5d1d500974f73c92de1fb190e6458d48262ad7 | 2017-10-27T20:03:12Z |
deleted file mode 100644 <nl> index c89a2101d3 . . 0000000000 <nl> mmm a / change / react - native - windows - 2020 - 04 - 06 - 19 - 51 - 02 - no - specialize . json <nl> ppp / dev / null <nl> <nl> - { <nl> - " type " : " prerelease " , <nl> - " comment " : " Clean up normalization " , <nl> - " packageName " : " react - native - windows " , <nl> - " email " : " ngerlem @ microsoft . com " , <nl> - " dependentChangeType " : " patch " , <nl> - " date " : " 2020 - 04 - 07T02 : 51 : 02 . 164Z " <nl> - } <nl> \ No newline at end of file <nl> mmm a / packages / E2ETest / package . json <nl> ppp b / packages / E2ETest / package . json <nl> <nl> " dependencies " : { <nl> " react " : " 16 . 9 . 0 " , <nl> " react - native " : " 0 . 61 . 5 " , <nl> - " react - native - windows " : " 0 . 0 . 0 - master . 16 " , <nl> + " react - native - windows " : " 0 . 0 . 0 - master . 17 " , <nl> " rnpm - plugin - windows " : " ^ 0 . 6 . 1 " <nl> } , <nl> " devDependencies " : { <nl> mmm a / packages / microsoft - reactnative - sampleapps / package . json <nl> ppp b / packages / microsoft - reactnative - sampleapps / package . json <nl> <nl> " dependencies " : { <nl> " react " : " 16 . 9 . 0 " , <nl> " react - native " : " 0 . 61 . 5 " , <nl> - " react - native - windows " : " 0 . 0 . 0 - master . 16 " , <nl> + " react - native - windows " : " 0 . 0 . 0 - master . 17 " , <nl> " rnpm - plugin - windows " : " ^ 0 . 6 . 1 " <nl> } , <nl> " devDependencies " : { <nl> mmm a / packages / playground / package . json <nl> ppp b / packages / playground / package . json <nl> <nl> " dependencies " : { <nl> " react " : " 16 . 9 . 0 " , <nl> " react - native " : " 0 . 61 . 5 " , <nl> - " react - native - windows " : " 0 . 0 . 0 - master . 16 " , <nl> + " react - native - windows " : " 0 . 0 . 0 - master . 17 " , <nl> " rnpm - plugin - windows " : " ^ 0 . 6 . 1 " <nl> } , <nl> " devDependencies " : { <nl> mmm a / vnext / CHANGELOG . json <nl> ppp b / vnext / CHANGELOG . json <nl> <nl> { <nl> " name " : " react - native - windows " , <nl> " entries " : [ <nl> + { <nl> + " date " : " Tue , 07 Apr 2020 06 : 29 : 14 GMT " , <nl> + " tag " : " react - native - windows_v0 . 0 . 0 - master . 17 " , <nl> + " version " : " 0 . 0 . 0 - master . 17 " , <nl> + " comments " : { <nl> + " prerelease " : [ <nl> + { <nl> + " comment " : " Clean up normalization " , <nl> + " author " : " ngerlem @ microsoft . com " , <nl> + " commit " : " 407c0834ada43cd9d42c24cb6ddfe7c91ddf960a " , <nl> + " package " : " react - native - windows " <nl> + } <nl> + ] <nl> + } <nl> + } , <nl> { <nl> " date " : " Tue , 07 Apr 2020 00 : 48 : 37 GMT " , <nl> " tag " : " react - native - windows_v0 . 0 . 0 - master . 16 " , <nl> mmm a / vnext / CHANGELOG . md <nl> ppp b / vnext / CHANGELOG . md <nl> <nl> # Change Log - react - native - windows <nl> <nl> - This log was last generated on Tue , 07 Apr 2020 00 : 48 : 37 GMT and should not be manually modified . <nl> + This log was last generated on Tue , 07 Apr 2020 06 : 29 : 14 GMT and should not be manually modified . <nl> <nl> < ! - - Start content - - > <nl> <nl> + # # 0 . 0 . 0 - master . 17 <nl> + <nl> + Tue , 07 Apr 2020 06 : 29 : 14 GMT <nl> + <nl> + # # # Changes <nl> + <nl> + - Clean up normalization ( ngerlem @ microsoft . com ) <nl> + <nl> # # 0 . 0 . 0 - master . 16 <nl> <nl> Tue , 07 Apr 2020 00 : 48 : 37 GMT <nl> mmm a / vnext / package . json <nl> ppp b / vnext / package . json <nl> <nl> { <nl> " name " : " react - native - windows " , <nl> - " version " : " 0 . 0 . 0 - master . 16 " , <nl> + " version " : " 0 . 0 . 0 - master . 17 " , <nl> " license " : " MIT " , <nl> " repository " : { <nl> " type " : " git " , <nl> | applying package updates * * * NO_CI * * * | microsoft/react-native-windows | 6fb385c449753a303c14d34bc6e6730507510dda | 2020-04-07T06:29:15Z |
mmm a / fdbclient / DatabaseConfiguration . cpp <nl> ppp b / fdbclient / DatabaseConfiguration . cpp <nl> StatusObject DatabaseConfiguration : : toJSON ( bool noPolicies ) const { <nl> if ( ! noPolicies ) result [ " log_replication_policy " ] = tLogPolicy - > info ( ) ; <nl> } <nl> <nl> - if ( tLogVersion > TLogVersion : : DEFAULT ) { <nl> + if ( tLogVersion > TLogVersion : : DEFAULT | | isOverridden ( " log_version " ) ) { <nl> result [ " log_version " ] = ( int ) tLogVersion ; <nl> } <nl> <nl> StatusObject DatabaseConfiguration : : toJSON ( bool noPolicies ) const { <nl> result [ " regions " ] = getRegionJSON ( ) ; <nl> } <nl> <nl> - if ( desiredTLogCount ! = - 1 ) { <nl> + if ( desiredTLogCount ! = - 1 | | isOverridden ( " logs " ) ) { <nl> result [ " logs " ] = desiredTLogCount ; <nl> } <nl> - if ( masterProxyCount ! = - 1 ) { <nl> + if ( masterProxyCount ! = - 1 | | isOverridden ( " proxies " ) ) { <nl> result [ " proxies " ] = masterProxyCount ; <nl> } <nl> - if ( resolverCount ! = - 1 ) { <nl> + if ( resolverCount ! = - 1 | | isOverridden ( " resolvers " ) ) { <nl> result [ " resolvers " ] = resolverCount ; <nl> } <nl> - if ( desiredLogRouterCount ! = - 1 ) { <nl> + if ( desiredLogRouterCount ! = - 1 | | isOverridden ( " log_routers " ) ) { <nl> result [ " log_routers " ] = desiredLogRouterCount ; <nl> } <nl> - if ( remoteDesiredTLogCount ! = - 1 ) { <nl> + if ( remoteDesiredTLogCount ! = - 1 | | isOverridden ( " remote_logs " ) ) { <nl> result [ " remote_logs " ] = remoteDesiredTLogCount ; <nl> } <nl> - if ( repopulateRegionAntiQuorum ! = 0 ) { <nl> + if ( repopulateRegionAntiQuorum ! = 0 | | isOverridden ( " repopulate_anti_quorum " ) ) { <nl> result [ " repopulate_anti_quorum " ] = repopulateRegionAntiQuorum ; <nl> } <nl> - if ( autoMasterProxyCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_PROXIES ) { <nl> + if ( autoMasterProxyCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_PROXIES | | isOverridden ( " auto_proxies " ) ) { <nl> result [ " auto_proxies " ] = autoMasterProxyCount ; <nl> } <nl> - if ( autoResolverCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_RESOLVERS ) { <nl> + if ( autoResolverCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_RESOLVERS | | isOverridden ( " auto_resolvers " ) ) { <nl> result [ " auto_resolvers " ] = autoResolverCount ; <nl> } <nl> - if ( autoDesiredTLogCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_LOGS ) { <nl> + if ( autoDesiredTLogCount ! = CLIENT_KNOBS - > DEFAULT_AUTO_LOGS | | isOverridden ( " auto_logs " ) ) { <nl> result [ " auto_logs " ] = autoDesiredTLogCount ; <nl> } <nl> <nl> void DatabaseConfiguration : : makeConfigurationImmutable ( ) { <nl> rawConfiguration [ i + + ] = KeyValueRef ( rawConfiguration . arena ( ) , KeyValueRef ( r - > first , r - > second ) ) ; <nl> mutableConfiguration = Optional < std : : map < std : : string , std : : string > > ( ) ; <nl> } <nl> + <nl> + void DatabaseConfiguration : : fromKeyValues ( Standalone < VectorRef < KeyValueRef > > rawConfig ) { <nl> + resetInternal ( ) ; <nl> + this - > rawConfiguration = rawConfig ; <nl> + for ( auto c = rawConfiguration . begin ( ) ; c ! = rawConfiguration . end ( ) ; + + c ) { <nl> + setInternal ( c - > key , c - > value ) ; <nl> + } <nl> + setDefaultReplicationPolicy ( ) ; <nl> + } <nl> + <nl> + bool DatabaseConfiguration : : isOverridden ( const std : : string & key ) const { <nl> + for ( auto iter = rawConfiguration . begin ( ) ; iter ! = rawConfiguration . end ( ) ; + + iter ) { <nl> + auto confKey = iter - > key . removePrefix ( configKeysPrefix ) . toString ( ) ; <nl> + if ( key = = confKey ) { <nl> + return true ; <nl> + } <nl> + } <nl> + <nl> + if ( ! mutableConfiguration . present ( ) ) { <nl> + return false ; <nl> + } <nl> + <nl> + for ( auto iter = mutableConfiguration . get ( ) . begin ( ) ; iter ! = mutableConfiguration . get ( ) . end ( ) ; + + iter ) { <nl> + UNSTOPPABLE_ASSERT ( iter - > first . size ( ) > = configKeysPrefix . size ( ) & & <nl> + iter - > first . substr ( 0 , configKeysPrefix . size ( ) ) = = configKeysPrefix . toString ( ) ) ; <nl> + auto confKey = iter - > first . substr ( configKeysPrefix . size ( ) ) ; <nl> + if ( key = = confKey ) { <nl> + return true ; <nl> + } <nl> + } <nl> + return false ; <nl> + } <nl> mmm a / fdbclient / DatabaseConfiguration . h <nl> ppp b / fdbclient / DatabaseConfiguration . h <nl> struct DatabaseConfiguration { <nl> } <nl> } <nl> <nl> - void fromKeyValues ( Standalone < VectorRef < KeyValueRef > > rawConfig ) { <nl> - resetInternal ( ) ; <nl> - this - > rawConfiguration = rawConfig ; <nl> - for ( auto c = rawConfiguration . begin ( ) ; c ! = rawConfiguration . end ( ) ; + + c ) <nl> - setInternal ( c - > key , c - > value ) ; <nl> - setDefaultReplicationPolicy ( ) ; <nl> - } <nl> + void fromKeyValues ( Standalone < VectorRef < KeyValueRef > > rawConfig ) ; <nl> <nl> private : <nl> Optional < std : : map < std : : string , std : : string > > mutableConfiguration ; / / If present , rawConfiguration is not valid <nl> struct DatabaseConfiguration { <nl> bool setInternal ( KeyRef key , ValueRef value ) ; <nl> void resetInternal ( ) ; <nl> void setDefaultReplicationPolicy ( ) ; <nl> + <nl> + bool isOverridden ( const std : : string & key ) const ; <nl> } ; <nl> <nl> # endif <nl> | When a configuration K / V pair is modified , always output the K / V pair values | apple/foundationdb | 6b8843ad1878c3fa6b5fa824f7153913b86bed4a | 2020-08-09T06:24:30Z |
mmm a / docs / tutorial / mac - app - store - submission - guide . md <nl> ppp b / docs / tutorial / mac - app - store - submission - guide . md <nl> more information . <nl> # # # Additional Entitlements <nl> <nl> Depending on which Electron APIs your app uses , you may need to add additional <nl> - entitlements to your ` parent . plist ` file to be able to use these certain APIs <nl> - from your app ' s Mac App Store build . <nl> + entitlements to your ` parent . plist ` file to be able to use these APIs from your <nl> + app ' s Mac App Store build . <nl> <nl> # # # # dialog . showOpenDialog <nl> <nl> | Drop the certain | electron/electron | bfde8b02bb0f003ba714675b320a592de662d0c5 | 2016-06-14T22:22:12Z |
mmm a / src / arm / codegen - arm . h <nl> ppp b / src / arm / codegen - arm . h <nl> class CodeGenerator : public AstVisitor { <nl> # endif <nl> <nl> static void SetFunctionInfo ( Handle < JSFunction > fun , <nl> - int length , <nl> - int function_token_position , <nl> - int start_position , <nl> - int end_position , <nl> - bool is_expression , <nl> + FunctionLiteral * lit , <nl> bool is_toplevel , <nl> - Handle < Script > script , <nl> - Handle < String > inferred_name ) ; <nl> + Handle < Script > script ) ; <nl> <nl> / / Accessors <nl> MacroAssembler * masm ( ) { return masm_ ; } <nl> mmm a / src / x64 / codegen - x64 . h <nl> ppp b / src / x64 / codegen - x64 . h <nl> class CodeGenerator : public AstVisitor { <nl> # endif <nl> <nl> static void SetFunctionInfo ( Handle < JSFunction > fun , <nl> - int length , <nl> - int function_token_position , <nl> - int start_position , <nl> - int end_position , <nl> - bool is_expression , <nl> + FunctionLiteral * lit , <nl> bool is_toplevel , <nl> - Handle < Script > script , <nl> - Handle < String > inferred_name ) ; <nl> + Handle < Script > script ) ; <nl> <nl> / / Accessors <nl> MacroAssembler * masm ( ) { return masm_ ; } <nl> | Refactor SetFunctionInfo to reduce long argument list take 2 . | v8/v8 | 409993be387060422bfa5dab74245737933ebf43 | 2009-08-14T11:35:10Z |
mmm a / . bazelrc <nl> ppp b / . bazelrc <nl> build : monolithic - - define framework_shared_object = false <nl> # opts in to modular op registration support by default . <nl> build - - define framework_shared_object = true <nl> <nl> - # As part of Tensorflow ' s build refactoring , https : / / github . com / tensorflow / community / pull / 179 , <nl> - # we plan on migrating TF to use bazel ' s cc_shared_library . This requires always setting <nl> - # the flag " - - experimental_cc_shared_library " on all builds : https : / / github . com / bazelbuild / rules_cc / blob / 7e650b11fe6d49f70f2ca7a1c4cb8bcc4a1fe239 / examples / experimental_cc_shared_library . bzl # L3 - L5 <nl> - build - - experimental_cc_shared_library <nl> - <nl> # Flags for open source build , always set to be true . <nl> build - - define open_source_build = true <nl> test - - define open_source_build = true <nl> mmm a / tensorflow / BUILD <nl> ppp b / tensorflow / BUILD <nl> bzl_library ( <nl> visibility = [ " / / visibility : public " ] , <nl> deps = [ <nl> " / / tensorflow / core / platform : build_config_root_bzl " , <nl> - " / / tensorflow / core / platform : rules_cc_bzl " , <nl> " / / tensorflow / core / platform / default : cuda_build_defs_bzl " , <nl> " / / third_party / mkl : build_defs_bzl " , <nl> " / / third_party / mkl_dnn : build_defs_bzl " , <nl> mmm a / tensorflow / core / BUILD <nl> ppp b / tensorflow / core / BUILD <nl> load ( <nl> " tf_protos_profiler_impl " , <nl> " tf_pyclif_proto_library " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> load ( <nl> " / / tensorflow / core / platform : build_config_root . bzl " , <nl> " if_dynamic_kernels " , <nl> mmm a / tensorflow / core / framework / BUILD <nl> ppp b / tensorflow / core / framework / BUILD <nl> load ( <nl> " / / tensorflow / core / platform : build_config_root . bzl " , <nl> " if_static " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ <nl> mmm a / tensorflow / core / lib / bfloat16 / BUILD <nl> ppp b / tensorflow / core / lib / bfloat16 / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> " / / tensorflow : __subpackages__ " , <nl> mmm a / tensorflow / core / lib / core / BUILD <nl> ppp b / tensorflow / core / lib / core / BUILD <nl> <nl> load ( " / / tensorflow / core / platform : build_config . bzl " , " tf_proto_library " ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ <nl> mmm a / tensorflow / core / lib / db / BUILD <nl> ppp b / tensorflow / core / lib / db / BUILD <nl> <nl> # Libraries for storing tensors in SQL databases . <nl> <nl> load ( " / / tensorflow : tensorflow . bzl " , " tf_cc_test " , " tf_copts " ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ " / / tensorflow : internal " ] , <nl> mmm a / tensorflow / core / lib / gtl / BUILD <nl> ppp b / tensorflow / core / lib / gtl / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / lib / hash / BUILD <nl> ppp b / tensorflow / core / lib / hash / BUILD <nl> load ( <nl> " if_linux_x86_64 " , <nl> " tf_copts " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ <nl> mmm a / tensorflow / core / lib / histogram / BUILD <nl> ppp b / tensorflow / core / lib / histogram / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / lib / io / BUILD <nl> ppp b / tensorflow / core / lib / io / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> " / / tensorflow / c / experimental / filesystem : __pkg__ " , <nl> mmm a / tensorflow / core / lib / math / BUILD <nl> ppp b / tensorflow / core / lib / math / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> " / / tensorflow : __subpackages__ " , <nl> mmm a / tensorflow / core / lib / monitoring / BUILD <nl> ppp b / tensorflow / core / lib / monitoring / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / lib / png / BUILD <nl> ppp b / tensorflow / core / lib / png / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / lib / random / BUILD <nl> ppp b / tensorflow / core / lib / random / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / lib / strings / BUILD <nl> ppp b / tensorflow / core / lib / strings / BUILD <nl> <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> - <nl> package ( <nl> default_visibility = [ <nl> # tensorflow / core : lib effectively exposes all targets under tensorflow / core / lib / * * <nl> mmm a / tensorflow / core / platform / BUILD <nl> ppp b / tensorflow / core / platform / BUILD <nl> load ( <nl> " tf_protobuf_deps " , <nl> " tf_windows_aware_platform_deps " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_binary " , <nl> - " cc_library " , <nl> - ) <nl> load ( <nl> " / / tensorflow : tensorflow . bzl " , <nl> " if_not_android " , <nl> bzl_library ( <nl> name = " build_config_root_bzl " , <nl> srcs = [ <nl> " build_config_root . bzl " , <nl> - ] + tf_platform_alias ( " build_config_root . bzl " ) , <nl> - ) <nl> - <nl> - bzl_library ( <nl> - name = " rules_cc_bzl " , <nl> - srcs = [ <nl> - " rules_cc . bzl " , <nl> - ] + tf_platform_alias ( " rules_cc . bzl " ) , <nl> + " / / tensorflow / core / platform / default : build_config_root . bzl " , <nl> + ] , <nl> ) <nl> mmm a / tensorflow / core / platform / default / BUILD <nl> ppp b / tensorflow / core / platform / default / BUILD <nl> <nl> # Tensorflow default + linux implementations of tensorflow / core / platform libraries . <nl> load ( " @ bazel_skylib / / : bzl_library . bzl " , " bzl_library " ) <nl> load ( " / / tensorflow : tensorflow . bzl " , " tf_copts " ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ <nl> mmm a / tensorflow / core / platform / windows / BUILD <nl> ppp b / tensorflow / core / platform / windows / BUILD <nl> load ( <nl> " if_windows " , <nl> " tf_copts " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> <nl> package ( <nl> default_visibility = [ <nl> mmm a / tensorflow / core / util / BUILD <nl> ppp b / tensorflow / core / util / BUILD <nl> load ( <nl> " tf_kernel_tests_linkstatic " , <nl> " tf_proto_library " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_library " , <nl> - ) <nl> load ( <nl> " / / tensorflow : tensorflow . bzl " , <nl> " tf_cc_test " , <nl> mmm a / tensorflow / opensource_only . files <nl> ppp b / tensorflow / opensource_only . files <nl> tensorflow / third_party / pybind11 . BUILD <nl> tensorflow / third_party / python_runtime / BUILD <nl> tensorflow / third_party / repo . bzl <nl> tensorflow / third_party / rules_closure . patch <nl> - tensorflow / third_party / rules_swift . patch <nl> tensorflow / third_party / six . BUILD <nl> tensorflow / third_party / snappy . BUILD <nl> tensorflow / third_party / sqlite . BUILD <nl> mmm a / tensorflow / tensorflow . bzl <nl> ppp b / tensorflow / tensorflow . bzl <nl> load ( <nl> " tf_gpu_tests_tags " , <nl> " tf_sycl_tests_tags " , <nl> ) <nl> - load ( <nl> - " / / tensorflow / core / platform : rules_cc . bzl " , <nl> - " cc_binary " , <nl> - " cc_library " , <nl> - " cc_test " , <nl> - ) <nl> load ( <nl> " @ local_config_tensorrt / / : build_defs . bzl " , <nl> " if_tensorrt " , <nl> def tf_android_core_proto_headers ( core_proto_sources_relative ) : <nl> # Wrapper for portable protos which currently just creates an empty rule . <nl> def tf_portable_proto_library ( name , proto_deps , deps = [ ] , * * kwargs ) : <nl> _ignore = [ kwargs ] <nl> - cc_library ( name = name , deps = deps + [ dep + " _cc " for dep in proto_deps ] ) <nl> + native . cc_library ( name = name , deps = deps + [ dep + " _cc " for dep in proto_deps ] ) <nl> <nl> # Sanitize a dependency so that it works correctly from code that includes <nl> # TensorFlow as a submodule . <nl> def tf_gen_op_libs ( op_lib_names , deps = None , is_external = True ) : <nl> if not deps : <nl> deps = [ ] <nl> for n in op_lib_names : <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = n + " _op_lib " , <nl> copts = tf_copts ( is_external = is_external ) , <nl> srcs = [ " ops / " + n + " . cc " ] , <nl> def tf_cc_shared_object ( <nl> if framework_so ! = [ ] : <nl> data_extra = tf_binary_additional_data_deps ( ) <nl> <nl> - cc_binary ( <nl> + native . cc_binary ( <nl> name = name_os_full , <nl> srcs = srcs + framework_so , <nl> deps = deps , <nl> def tf_cc_binary ( <nl> else : <nl> names = [ name ] <nl> for name_os in names : <nl> - cc_binary ( <nl> + native . cc_binary ( <nl> name = name_os , <nl> copts = copts , <nl> srcs = srcs + tf_binary_additional_srcs ( ) , <nl> def tf_native_cc_binary ( <nl> copts = tf_copts ( ) , <nl> linkopts = [ ] , <nl> * * kwargs ) : <nl> - cc_binary ( <nl> + native . cc_binary ( <nl> name = name , <nl> copts = copts , <nl> linkopts = select ( { <nl> def tf_gen_op_wrappers_cc ( <nl> internalsrcs + = [ " ops / " + n + " _internal . cc " ] <nl> internalhdrs + = [ " ops / " + n + " _internal . h " ] <nl> <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = name , <nl> srcs = subsrcs , <nl> hdrs = subhdrs , <nl> def tf_gen_op_wrappers_cc ( <nl> alwayslink = 1 , <nl> visibility = visibility , <nl> ) <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = name + " _internal " , <nl> srcs = internalsrcs , <nl> hdrs = internalhdrs , <nl> def tf_cc_test ( <nl> linkopts = [ ] , <nl> kernels = [ ] , <nl> * * kwargs ) : <nl> - cc_test ( <nl> + native . cc_test ( <nl> name = " % s % s " % ( name , suffix ) , <nl> srcs = srcs + tf_binary_additional_srcs ( ) , <nl> copts = tf_copts ( ) + extra_copts , <nl> def tf_gpu_only_cc_test ( <nl> deps = deps , <nl> testonly = 1 , <nl> ) <nl> - cc_test ( <nl> + native . cc_test ( <nl> name = " % s % s " % ( name , " _gpu " ) , <nl> size = size , <nl> args = args , <nl> def tf_cc_test_mkl ( <nl> disable_header_modules = [ " - use_header_modules " ] <nl> <nl> for src in srcs : <nl> - cc_test ( <nl> + native . cc_test ( <nl> name = src_to_test_name ( src ) , <nl> srcs = if_mkl ( [ src ] ) + tf_binary_additional_srcs ( ) , <nl> copts = tf_copts ( allow_exceptions = True ) + tf_openmp_copts ( ) , <nl> def tf_gpu_library ( deps = None , cuda_deps = None , copts = tf_copts ( ) , * * kwargs ) : <nl> cuda_deps = [ ] <nl> <nl> kwargs [ " features " ] = kwargs . get ( " features " , [ ] ) + [ " - use_header_modules " ] <nl> - cc_library ( <nl> + native . cc_library ( <nl> deps = deps + if_cuda_is_configured_compat ( cuda_deps + [ <nl> clean_dep ( " / / tensorflow / stream_executor / cuda : cudart_stub " ) , <nl> " @ local_config_cuda / / cuda : cuda_headers " , <nl> def tf_mkl_kernel_library ( <nl> # - fno - exceptions in nocopts breaks compilation if header modules are enabled . <nl> disable_header_modules = [ " - use_header_modules " ] <nl> <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = name , <nl> srcs = if_mkl ( srcs ) , <nl> hdrs = hdrs , <nl> def transitive_hdrs ( name , deps = [ ] , * * kwargs ) : <nl> # the libraries in deps . <nl> def cc_header_only_library ( name , deps = [ ] , includes = [ ] , extra_deps = [ ] , * * kwargs ) : <nl> _transitive_hdrs ( name = name + " _gather " , deps = deps ) <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = name , <nl> hdrs = [ " : " + name + " _gather " ] , <nl> includes = includes , <nl> def tf_generate_proto_text_sources ( name , srcs_relative_dir , srcs , protodeps = [ ] <nl> visibility = visibility , <nl> ) <nl> <nl> - cc_library ( <nl> + native . cc_library ( <nl> name = name , <nl> srcs = out_srcs , <nl> hdrs = out_hdrs , <nl> def cc_library_with_android_deps ( <nl> copts = tf_copts ( ) , <nl> * * kwargs ) : <nl> deps = if_not_android ( deps ) + if_android ( android_deps ) + common_deps <nl> - cc_library ( deps = deps , copts = copts , * * kwargs ) <nl> + native . cc_library ( deps = deps , copts = copts , * * kwargs ) <nl> <nl> register_extension_info ( <nl> extension_name = " cc_library_with_android_deps " , <nl> def pybind_extension ( <nl> visibility = [ " / / visibility : private " ] , <nl> testonly = testonly , <nl> ) <nl> - cc_binary ( <nl> + native . cc_binary ( <nl> name = so_file , <nl> srcs = srcs + hdrs , <nl> data = data , <nl> mmm a / tensorflow / workspace . bzl <nl> ppp b / tensorflow / workspace . bzl <nl> def tf_repositories ( path_prefix = " " , tf_repo_name = " " ) : <nl> # https : / / github . com / bazelbuild / rules_swift / releases <nl> tf_http_archive ( <nl> name = " build_bazel_rules_swift " , <nl> - patch_file = clean_dep ( " / / third_party : rules_swift . patch " ) , <nl> sha256 = " 18cd4df4e410b0439a4935f9ca035bd979993d42372ba79e7f2d4fafe9596ef0 " , <nl> urls = [ <nl> " http : / / mirror . tensorflow . org / github . com / bazelbuild / rules_swift / releases / download / 0 . 12 . 1 / rules_swift . 0 . 12 . 1 . tar . gz " , <nl> deleted file mode 100644 <nl> index 5e4e24b40cea3 . . 0000000000000 <nl> mmm a / third_party / rules_swift . patch <nl> ppp / dev / null <nl> <nl> - From 4c1a4d676d1633ff9f67bda3540d24ea5fa31c8f Mon Sep 17 00 : 00 : 00 2001 <nl> - From : Brian Zhao < bmzhao @ google . com > <nl> - Date : Tue , 14 Jan 2020 18 : 23 : 34 - 0800 <nl> - Subject : [ PATCH ] Adding linker_inputs flag to create_linking_context , in <nl> - preparation for bazel ' s cc_shared_library rule . Note that this cannot be <nl> - enabled as of now unless - - experimental_cc_shared_library is passed to bazel . <nl> - <nl> mmm - <nl> - swift / internal / utils . bzl | 1 + <nl> - 1 file changed , 1 insertion ( + ) <nl> - <nl> mmmmmm a / swift / internal / utils . bzl <nl> - ppp b / swift / internal / utils . bzl <nl> - def create_cc_info ( <nl> - <nl> - this_cc_info = CcInfo ( <nl> - linking_context = cc_common . create_linking_context ( <nl> - + linker_inputs = None , <nl> - additional_inputs = all_additional_inputs , <nl> - libraries_to_link = libraries_to_link , <nl> - user_link_flags = all_user_link_flags , <nl> mmm <nl> - 2 . 25 . 0 . rc1 . 283 . g88dfdc4193 - goog <nl> | Rolling forward the addition of build flag - - experimental_cc_shared_library to tf / . bazelrc after patching the iOS build failure . This basically is https : / / github . com / tensorflow / tensorflow / commit / e635ec06c606213c01ae6ea9476f9fc8aa6af499 with an additional patch to rules_swift . This change is part of the build refactoring described in https : / / github . com / tensorflow / community / pull / 179 | tensorflow/tensorflow | b7f05ca3e470bbc6f33b4c34c2e6d1609704981b | 2020-01-15T20:10:33Z |
mmm a / src / runtime / vm / translator / hopt / codegen . cpp <nl> ppp b / src / runtime / vm / translator / hopt / codegen . cpp <nl> Address CodeGenerator : : cgAllocActRec6 ( SSATmp * dst , <nl> / / actRec - > m_this <nl> if ( objOrCls - > getType ( ) = = Type : : ClassRef ) { <nl> / / store class <nl> - m_as . store_imm64_disp_reg64 ( uintptr_t ( objOrCls - > getConstValAsClass ( ) ) | 1 , <nl> - actRecAdjustment + AROFF ( m_this ) , <nl> - spReg ) ; <nl> + if ( objOrCls - > isConst ( ) ) { <nl> + m_as . store_imm64_disp_reg64 ( uintptr_t ( objOrCls - > getConstValAsClass ( ) ) | 1 , <nl> + actRecAdjustment + AROFF ( m_this ) , <nl> + spReg ) ; <nl> + } else { <nl> + Reg64 clsPtrReg = objOrCls - > getReg ( ) ; <nl> + m_as . movq ( clsPtrReg , rScratch ) ; <nl> + m_as . orq ( 1 , rScratch ) ; <nl> + m_as . storeq ( rScratch , spReg [ actRecAdjustment + AROFF ( m_this ) ] ) ; <nl> + } <nl> } else if ( objOrCls - > getType ( ) = = Type : : Obj ) { <nl> / / store this pointer <nl> m_as . store_reg64_disp_reg64 ( objOrCls - > getReg ( ) , <nl> static const char * getContextName ( ) { <nl> } <nl> <nl> Address CodeGenerator : : cgLdClsMethod ( IRInstruction * inst ) { <nl> - if ( inst - > getNumSrcs ( ) < 3 ) { <nl> + Address start = m_as . code . frontier ; <nl> + SSATmp * dst = inst - > getDst ( ) ; <nl> + SSATmp * cls = inst - > getSrc ( 0 ) ; <nl> + SSATmp * mSlot = inst - > getSrc ( 1 ) ; <nl> + <nl> + ASSERT ( cls - > getType ( ) = = Type : : ClassRef ) ; <nl> + ASSERT ( mSlot - > isConst ( ) & & mSlot - > getType ( ) = = Type : : Int ) ; <nl> + uint64 mSlotInt64 = mSlot - > getConstValAsRawInt ( ) ; <nl> + / / We ' re going to multiply mSlotVal by sizeof ( Func * ) and use <nl> + / / it as a 32 - bit offset ( methOff ) below . <nl> + if ( mSlotInt64 > ( std : : numeric_limits < uint32_t > : : max ( ) / sizeof ( Func * ) ) ) { <nl> + CG_PUNT ( cgLdClsMethod_large_offset ) ; <nl> + } <nl> + int32 mSlotVal = ( uint32 ) mSlotInt64 ; <nl> + <nl> + Reg64 dstReg = dst - > getReg ( ) ; <nl> + ASSERT ( dstReg ! = InvalidReg ) ; <nl> + <nl> + Reg64 clsReg = cls - > getReg ( ) ; <nl> + if ( clsReg = = InvalidReg ) { <nl> CG_PUNT ( LdClsMethod ) ; <nl> } <nl> + <nl> + Offset vecOff = Class : : getMethodsOffset ( ) + Class : : MethodMap : : vecOff ( ) ; <nl> + int32 methOff = mSlotVal * sizeof ( Func * ) ; <nl> + m_as . loadq ( clsReg [ vecOff ] , dstReg ) ; <nl> + m_as . loadq ( dstReg [ methOff ] , dstReg ) ; <nl> + <nl> + return start ; <nl> + } <nl> + <nl> + Address CodeGenerator : : cgLdClsMethodCache ( IRInstruction * inst ) { <nl> + if ( inst - > getNumSrcs ( ) < 3 ) { <nl> + CG_PUNT ( LdClsMethodCache ) ; <nl> + } <nl> Address start = m_as . code . frontier ; <nl> SSATmp * dst = inst - > getDst ( ) ; <nl> SSATmp * className = inst - > getSrc ( 0 ) ; <nl> mmm a / src / runtime / vm / translator / hopt / hhbctranslator . cpp <nl> ppp b / src / runtime / vm / translator / hopt / hhbctranslator . cpp <nl> void HhbcTranslator : : emitFPushObjMethodD ( int32 numParams , <nl> methodName - > data ( ) , <nl> numParams ) ; <nl> bool magicCall = false ; <nl> + SSATmp * funcTmp = NULL ; <nl> const Func * func = HPHP : : VM : : Transl : : lookupImmutableMethod ( baseClass , <nl> methodName , <nl> magicCall , <nl> / * staticLookup : * / <nl> false ) ; <nl> SSATmp * objOrCls = popC ( ) ; <nl> - if ( func ) { <nl> + <nl> + if ( ! func ) { <nl> + if ( baseClass & & ! ( baseClass - > attrs ( ) & AttrInterface ) ) { <nl> + MethodLookup : : LookupResult res = <nl> + g_vmContext - > lookupObjMethod ( func , baseClass , methodName , false ) ; <nl> + if ( ( res = = MethodLookup : : MethodFoundWithThis | | <nl> + res = = MethodLookup : : MethodFoundNoThis ) & & <nl> + ! func - > isAbstract ( ) ) { <nl> + / * <nl> + * If we found the func in baseClass , then either : <nl> + * a ) its private , and this is always going to be the <nl> + * called function . This case is handled further down . <nl> + * OR <nl> + * b ) any derived class must have a func that matches in staticness <nl> + * and is at least as accessible ( and in particular , you can ' t <nl> + * override a public / protected method with a private method ) . <nl> + * In this case , we emit code to dynamically lookup the method <nl> + * given the Object and the method slot , which is the same as func ' s . <nl> + * / <nl> + if ( ! ( func - > attrs ( ) & AttrPrivate ) ) { <nl> + SSATmp * clsTmp = m_tb . genLdObjClass ( objOrCls ) ; <nl> + funcTmp = m_tb . genLdClsMethod ( clsTmp , func - > methodSlot ( ) ) ; <nl> + if ( res = = MethodLookup : : MethodFoundNoThis ) { <nl> + m_tb . genDecRef ( objOrCls ) ; <nl> + objOrCls = clsTmp ; <nl> + } <nl> + } <nl> + } else { <nl> + func = NULL ; / / force lookup <nl> + } <nl> + } <nl> + } <nl> + <nl> + if ( func ! = NULL & & funcTmp = = NULL ) { <nl> if ( func - > attrs ( ) & AttrStatic ) { <nl> ASSERT ( baseClass ) ; / / This assert may be too strong , but be aggressive <nl> / / static function : store base class into this slot instead of obj <nl> void HhbcTranslator : : emitFPushObjMethodD ( int32 numParams , <nl> } <nl> } <nl> spillStack ( ) ; <nl> - SSATmp * actRec = m_tb . genAllocActRec ( func , <nl> - objOrCls , <nl> - numParams , <nl> - ( func & & magicCall ? methodName : NULL ) ) ; <nl> + SSATmp * actRec = NULL ; <nl> + if ( funcTmp ) { <nl> + actRec = m_tb . genAllocActRec ( funcTmp , <nl> + objOrCls , <nl> + numParams ) ; <nl> + } else { <nl> + actRec = m_tb . genAllocActRec ( func , <nl> + objOrCls , <nl> + numParams , <nl> + ( func & & magicCall ? methodName : NULL ) ) ; <nl> + } <nl> + <nl> if ( ! func ) { <nl> / / lookup the function <nl> SSATmp * meth = m_tb . genLdObjMethod ( methodName , actRec ) ; <nl> void HhbcTranslator : : emitFPushClsMethodD ( int32 numParams , <nl> / / lookup static method & class in the target cache <nl> Trace * exitTrace = getExitSlowTrace ( ) ; <nl> const StringData * className = np . first ; <nl> - SSATmp * funcClassTmp = m_tb . genLdClsMethod ( <nl> + SSATmp * funcClassTmp = m_tb . genLdClsMethodCache ( <nl> m_tb . genDefConst < const StringData * > ( className ) , <nl> m_tb . genDefConst < const StringData * > ( methodName ) , <nl> / * TODO : NamedEntity * * / m_tb . genDefConst < int64 > ( ( uintptr_t ) np . second ) , <nl> mmm a / src / runtime / vm / translator / hopt / ir . h <nl> ppp b / src / runtime / vm / translator / hopt / ir . h <nl> static const TCA kIRDirectGuardActive = ( TCA ) 0x03 ; <nl> OPC ( LdCls , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 1 , 1 , 1 ) \ <nl> / * XXX cg doesn ' t support the version without a label * / \ <nl> OPC ( LdClsCns , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ) \ <nl> - OPC ( LdClsMethod , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 1 , 1 ) \ <nl> + OPC ( LdClsMethodCache , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 1 , 1 ) \ <nl> + OPC ( LdClsMethod , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ) \ <nl> / * XXX TODO Create version of LdClsPropAddr that doesn ' t check * / \ <nl> OPC ( LdPropAddr , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ) \ <nl> OPC ( LdClsPropAddr , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 1 , 1 ) \ <nl> mmm a / src / runtime / vm / translator / hopt / simplifier . cpp <nl> ppp b / src / runtime / vm / translator / hopt / simplifier . cpp <nl> SSATmp * Simplifier : : simplifyInst ( Opcode opc , <nl> extendedSrcs [ 1 ] ) ; <nl> } <nl> return NULL ; <nl> - case LdClsMethod : / / simplify : fall - through and return NULL <nl> + case LdClsMethodCache : / / simplify : fall - through and return NULL <nl> + case LdClsMethod : <nl> case Call : <nl> case SpillStack : <nl> case SpillStackAllocAR : <nl> mmm a / src / runtime / vm / translator / hopt / tracebuilder . cpp <nl> ppp b / src / runtime / vm / translator / hopt / tracebuilder . cpp <nl> SSATmp * TraceBuilder : : genLdFixedFunc ( const StringData * funcName , <nl> actRec ) ; <nl> } <nl> <nl> + SSATmp * TraceBuilder : : genLdClsMethod ( SSATmp * cls , uint32 methodSlot ) { <nl> + return genInstruction ( LdClsMethod , Type : : FuncRef , cls , <nl> + genDefConst < int64 > ( methodSlot ) ) ; <nl> + } <nl> <nl> - SSATmp * TraceBuilder : : genLdClsMethod ( SSATmp * methodName , SSATmp * classRef ) { <nl> - return genInstruction ( LdClsMethod , Type : : FuncClassRef , methodName , classRef ) ; <nl> + SSATmp * TraceBuilder : : genLdClsMethodCache ( SSATmp * methodName , <nl> + SSATmp * classRef ) { <nl> + return genInstruction ( LdClsMethodCache , Type : : FuncClassRef , <nl> + methodName , classRef ) ; <nl> } <nl> <nl> - SSATmp * TraceBuilder : : genLdClsMethod ( SSATmp * className , <nl> - SSATmp * methodName , <nl> - SSATmp * baseClass , <nl> - Trace * exit ) { <nl> + SSATmp * TraceBuilder : : genLdClsMethodCache ( SSATmp * className , <nl> + SSATmp * methodName , <nl> + SSATmp * baseClass , <nl> + Trace * exit ) { <nl> ExtendedInstruction inst ( m_irFactory , <nl> - LdClsMethod , <nl> + LdClsMethodCache , <nl> Type : : FuncClassRef , <nl> className , <nl> methodName , <nl> SSATmp * TraceBuilder : : genLdObjMethod ( const StringData * methodName , <nl> genDefConst < const StringData * > ( methodName ) , <nl> actRec ) ; <nl> } <nl> + <nl> SSATmp * TraceBuilder : : genQueryOp ( Opcode queryOpc , SSATmp * addr ) { <nl> ASSERT ( isQueryOp ( queryOpc ) ) ; <nl> return genInstruction ( queryOpc , Type : : Bool , addr ) ; <nl> mmm a / src / runtime / vm / translator / hopt / tracebuilder . h <nl> ppp b / src / runtime / vm / translator / hopt / tracebuilder . h <nl> class TraceBuilder { <nl> SSATmp * genConvToObj ( SSATmp * src ) ; <nl> SSATmp * genLdPropAddr ( SSATmp * obj , SSATmp * prop ) ; <nl> SSATmp * genLdClsPropAddr ( SSATmp * cls , SSATmp * clsName , SSATmp * propName ) ; <nl> - SSATmp * genLdClsMethod ( SSATmp * methodName , SSATmp * classOpnd ) ; <nl> - SSATmp * genLdClsMethod ( SSATmp * className , <nl> - SSATmp * methodName , <nl> - SSATmp * baseClass , <nl> - Trace * slowPathExit ) ; <nl> + SSATmp * genLdClsMethod ( SSATmp * cls , uint32 methodSlot ) ; <nl> + SSATmp * genLdClsMethodCache ( SSATmp * methodName , SSATmp * classOpnd ) ; <nl> + SSATmp * genLdClsMethodCache ( SSATmp * className , <nl> + SSATmp * methodName , <nl> + SSATmp * baseClass , <nl> + Trace * slowPathExit ) ; <nl> SSATmp * genLdObjMethod ( const StringData * methodName , SSATmp * obj ) ; <nl> SSATmp * genLdObjClass ( SSATmp * obj ) ; <nl> SSATmp * genLdFunc ( SSATmp * funcName , SSATmp * actRec ) ; <nl> | Add support to FPushObjMethodD for fast method lookup using known class | facebook/hhvm | f6bdf891c58ddbaab99a106c0958c5f24e665d71 | 2012-12-13T19:07:04Z |
deleted file mode 100644 <nl> index 426c8739ae15 . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater1 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : oldVersion } , <nl> - n3 : { binVersion : oldVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : newVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> deleted file mode 100644 <nl> index bb4f240e2c70 . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater2 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : newVersion } , <nl> - n3 : { binVersion : oldVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : oldVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> deleted file mode 100644 <nl> index 1022cd43886b . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater3 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : oldVersion } , <nl> - n3 : { binVersion : newVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : oldVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> deleted file mode 100644 <nl> index b2d6c16ea420 . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater4 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : newVersion } , <nl> - n3 : { binVersion : oldVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : newVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> deleted file mode 100644 <nl> index 55e0379b7047 . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater5 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : oldVersion } , <nl> - n3 : { binVersion : newVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : newVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> deleted file mode 100644 <nl> index 54a8062e2740 . . 000000000000 <nl> mmm a / jstests / multiVersion / replset_primary_updater6 . js <nl> ppp / dev / null <nl> <nl> - / * SERVER - 6071 This test ( and the other replset_primary_updater tests ) check cross - compatibility of <nl> - * sync_source_feedback ' s updatePosition command and the OplogReader - based method of updating the <nl> - * primary ' s knowledge of the secondaries ' sync progress . This is done through a modified version of <nl> - * the tags . js replicaset js test because tags . js was the test that helped me discover and resolve <nl> - * the largest number of bugs when creating the updatePosition command . In tags . js , a chain forms <nl> - * running from member 4 to member 1 to member 2 ( nodes n5 , n2 , and n3 , respectively ) . Between the <nl> - * six replset_primary_updater tests , we run tags . js with each possible permutation of new and old <nl> - * nodes along this chain . <nl> - * / <nl> - <nl> - if ( ! _isWindows ( ) ) { <nl> - function myprint ( x ) { <nl> - print ( " tags output : " + x ) ; <nl> - } <nl> - <nl> - <nl> - load ( ' . / jstests / multiVersion / libs / multi_rs . js ' ) <nl> - load ( ' . / jstests / libs / test_background_ops . js ' ) <nl> - <nl> - var oldVersion = " 2 . 4 " <nl> - var newVersion = " latest " <nl> - <nl> - var nodes = { n1 : { binVersion : oldVersion } , <nl> - n2 : { binVersion : newVersion } , <nl> - n3 : { binVersion : newVersion } , <nl> - n4 : { binVersion : oldVersion } , <nl> - n5 : { binVersion : oldVersion } } <nl> - <nl> - / / Wait for a primary node . . . <nl> - <nl> - var num = 5 ; <nl> - var host = getHostName ( ) ; <nl> - var name = " dannentest " ; <nl> - <nl> - var replTest = new ReplSetTest ( { name : name , nodes : nodes , startPort : 31000 } ) ; <nl> - var nodes = replTest . startSet ( ) ; <nl> - var port = replTest . ports ; <nl> - replTest . initiate ( { _id : name , members : <nl> - [ <nl> - { _id : 0 , host : host + " : " + port [ 0 ] , tags : { " server " : " 0 " , " dc " : " ny " , " ny " : " 1 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 1 , host : host + " : " + port [ 1 ] , tags : { " server " : " 1 " , " dc " : " ny " , " ny " : " 2 " , " rack " : " ny . rk1 " } } , <nl> - { _id : 2 , host : host + " : " + port [ 2 ] , tags : { " server " : " 2 " , " dc " : " ny " , " ny " : " 3 " , " rack " : " ny . rk2 " , " 2 " : " this " } } , <nl> - { _id : 3 , host : host + " : " + port [ 3 ] , tags : { " server " : " 3 " , " dc " : " sf " , " sf " : " 1 " , " rack " : " sf . rk1 " } } , <nl> - { _id : 4 , host : host + " : " + port [ 4 ] , tags : { " server " : " 4 " , " dc " : " sf " , " sf " : " 2 " , " rack " : " sf . rk2 " } } , <nl> - ] , <nl> - settings : { <nl> - getLastErrorModes : { <nl> - " 2 dc and 3 server " : { " dc " : 2 , " server " : 3 } , <nl> - " 1 and 2 " : { " server " : 1 } <nl> - } <nl> - } } ) ; <nl> - <nl> - var master = replTest . getMaster ( ) ; <nl> - / / make everyone catch up before reconfig <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - var config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - <nl> - printjson ( config ) ; <nl> - var modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - <nl> - config . version + + ; <nl> - config . members [ 1 ] . priority = 1 . 5 ; <nl> - config . members [ 2 ] . priority = 2 ; <nl> - modes [ " 3 or 4 " ] = { " sf " : 1 } ; <nl> - modes [ " 3 and 4 " ] = { " sf " : 2 } ; <nl> - modes [ " 1 and 2 " ] [ " 2 " ] = 1 ; <nl> - modes [ " 2 " ] = { " 2 " : 1 } <nl> - <nl> - try { <nl> - master . getDB ( " admin " ) . runCommand ( { replSetReconfig : config } ) ; <nl> - } <nl> - catch ( e ) { <nl> - myprint ( e ) ; <nl> - } <nl> - <nl> - replTest . awaitReplication ( ) ; <nl> - <nl> - myprint ( " primary should now be 2 " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - config = master . getDB ( " local " ) . system . replset . findOne ( ) ; <nl> - printjson ( config ) ; <nl> - <nl> - modes = config . settings . getLastErrorModes ; <nl> - assert . eq ( typeof modes , " object " ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . dc , 2 ) ; <nl> - assert . eq ( modes [ " 2 dc and 3 server " ] . server , 3 ) ; <nl> - assert . eq ( modes [ " 1 and 2 " ] [ " server " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 or 4 " ] [ " sf " ] , 1 ) ; <nl> - assert . eq ( modes [ " 3 and 4 " ] [ " sf " ] , 2 ) ; <nl> - <nl> - myprint ( " bridging " ) ; <nl> - replTest . bridge ( ) ; <nl> - myprint ( " bridge 1 " ) ; <nl> - replTest . partition ( 0 , 3 ) ; <nl> - myprint ( " bridge 2 " ) ; <nl> - replTest . partition ( 0 , 4 ) ; <nl> - myprint ( " bridge 3 " ) ; <nl> - replTest . partition ( 1 , 3 ) ; <nl> - myprint ( " bridge 4 " ) ; <nl> - replTest . partition ( 1 , 4 ) ; <nl> - myprint ( " bridge 5 " ) ; <nl> - replTest . partition ( 2 , 3 ) ; <nl> - myprint ( " bridge 6 " ) ; <nl> - replTest . partition ( 2 , 4 ) ; <nl> - myprint ( " bridge 7 " ) ; <nl> - replTest . partition ( 3 , 4 ) ; <nl> - myprint ( " done bridging " ) ; <nl> - <nl> - myprint ( " paritions : [ 0 - 1 - 2 - 0 ] [ 3 ] [ 4 ] " ) <nl> - myprint ( " test1 " ) ; <nl> - myprint ( " 2 should be primary " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - printjson ( master . getDB ( " admin " ) . runCommand ( { replSetGetStatus : 1 } ) ) ; <nl> - <nl> - var timeout = 20000 ; <nl> - <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - var result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 1 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 or 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " partitions : [ 1 - 4 ] [ 0 - 1 - 2 - 0 ] [ 3 ] " ) ; <nl> - myprint ( " test3 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . unPartition ( 3 , 4 ) ; <nl> - <nl> - myprint ( " partitions : [ 0 - 4 - 3 ] [ 0 - 1 - 2 - 0 ] " ) ; <nl> - myprint ( " 31004 should sync from 31001 ( 31026 ) " ) ; <nl> - myprint ( " 31003 should sync from 31004 ( 31024 ) " ) ; <nl> - myprint ( " test4 " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " non - existent w " ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " blahblah " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . code , 14830 ) ; <nl> - assert . eq ( result . ok , 0 ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test two on the primary " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 1 and 2 " , wtimeout : 0 } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test5 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 dc and 3 server " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - replTest . unPartition ( 1 , 3 ) ; <nl> - <nl> - replTest . partition ( 2 , 0 ) ; <nl> - replTest . partition ( 2 , 1 ) ; <nl> - replTest . stop ( 2 ) ; <nl> - <nl> - myprint ( " 1 must become primary here because otherwise the other members will take too long timing out their old sync threads " ) ; <nl> - master = replTest . getMaster ( ) ; <nl> - <nl> - myprint ( " test6 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 3 and 4 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , null ) ; <nl> - <nl> - myprint ( " test mode 2 " ) ; <nl> - master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> - result = master . getDB ( " foo " ) . runCommand ( { getLastError : 1 , w : " 2 " , wtimeout : timeout } ) ; <nl> - printjson ( result ) ; <nl> - assert . eq ( result . err , " timeout " ) ; <nl> - <nl> - replTest . stopSet ( ) ; <nl> - myprint ( " \ n \ ntags . js SUCCESS \ n \ n " ) ; <nl> - <nl> - } <nl> mmm a / jstests / replsets / sync2 . js <nl> ppp b / jstests / replsets / sync2 . js <nl> replTest . partition ( 4 , 3 ) ; <nl> <nl> jsTestLog ( " Checking that ops still replicate correctly " ) ; <nl> master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> + replTest . awaitReplication ( ) ; <nl> <nl> - var result = master . getDB ( " admin " ) . runCommand ( { getLastError : 1 , w : 5 , wtimeout : 30000 } ) ; <nl> + var result = master . getDB ( " admin " ) . runCommand ( { getLastError : 1 , w : 5 , wtimeout : 1000 } ) ; <nl> assert . eq ( null , result . err , tojson ( result ) ) ; <nl> <nl> / / 4 is connected to 3 <nl> replTest . partition ( 4 , 2 ) ; <nl> replTest . unPartition ( 4 , 3 ) ; <nl> <nl> master . getDB ( " foo " ) . bar . insert ( { x : 1 } ) ; <nl> + replTest . awaitReplication ( ) ; <nl> <nl> - result = master . getDB ( " admin " ) . runCommand ( { getLastError : 1 , w : 5 , wtimeout : 30000 } ) ; <nl> + result = master . getDB ( " admin " ) . runCommand ( { getLastError : 1 , w : 5 , wtimeout : 1000 } ) ; <nl> assert . eq ( null , result . err , tojson ( result ) ) ; <nl> <nl> - replTest . stopSet ( ) ; <nl> + replTest . stopSet ( ) ; <nl> \ No newline at end of file <nl> mmm a / src / mongo / SConscript <nl> ppp b / src / mongo / SConscript <nl> serverOnlyFiles = [ " db / curop . cpp " , <nl> " db / repl / master_slave . cpp " , <nl> " db / repl / finding_start_cursor . cpp " , <nl> " db / repl / sync . cpp " , <nl> - " db / repl / sync_source_feedback . cpp " , <nl> " db / repl / optime . cpp " , <nl> " db / repl / oplogreader . cpp " , <nl> " db / repl / replication_server_status . cpp " , <nl> mmm a / src / mongo / db / auth / action_types . txt <nl> ppp b / src / mongo / db / auth / action_types . txt <nl> <nl> " replSetReconfig " , <nl> " replSetStepDown " , <nl> " replSetSyncFrom " , <nl> - " replSetUpdatePosition " , <nl> " resync " , <nl> " serverStatus " , <nl> " setParameter " , <nl> mmm a / src / mongo / db / repl / bgsync . cpp <nl> ppp b / src / mongo / db / repl / bgsync . cpp <nl> <nl> # include " mongo / db / repl / bgsync . h " <nl> # include " mongo / db / repl / oplog . h " <nl> # include " mongo / db / repl / rs_sync . h " <nl> - # include " mongo / db / repl / rs . h " <nl> # include " mongo / util / fail_point_service . h " <nl> # include " mongo / base / counter . h " <nl> # include " mongo / db / stats / timer_stats . h " <nl> namespace replset { <nl> _assumingPrimary ( false ) , <nl> _currentSyncTarget ( NULL ) , <nl> _oplogMarkerTarget ( NULL ) , <nl> + _oplogMarker ( true / * doHandshake * / ) , <nl> _consumedOpTime ( 0 , 0 ) { <nl> } <nl> <nl> namespace replset { <nl> void BackgroundSync : : notifierThread ( ) { <nl> Client : : initThread ( " rsSyncNotifier " ) ; <nl> replLocalAuth ( ) ; <nl> - theReplSet - > syncSourceFeedback . go ( ) ; <nl> <nl> while ( ! inShutdown ( ) ) { <nl> bool clearTarget = false ; <nl> namespace replset { <nl> } <nl> <nl> void BackgroundSync : : markOplog ( ) { <nl> - LOG ( 3 ) < < " replset markOplog : " < < _consumedOpTime < < " " <nl> - < < theReplSet - > lastOpTimeWritten < < rsLog ; <nl> + LOG ( 3 ) < < " replset markOplog : " < < _consumedOpTime < < " " < < theReplSet - > lastOpTimeWritten < < rsLog ; <nl> <nl> - if ( theReplSet - > syncSourceFeedback . supportsUpdater ( ) ) { <nl> - theReplSet - > syncSourceFeedback . updateSelfInMap ( theReplSet - > lastOpTimeWritten ) ; <nl> - _consumedOpTime = theReplSet - > lastOpTimeWritten ; <nl> + if ( ! hasCursor ( ) ) { <nl> + sleepsecs ( 1 ) ; <nl> + return ; <nl> } <nl> - else { <nl> - if ( ! hasCursor ( ) ) { <nl> - return ; <nl> - } <nl> <nl> - if ( ! theReplSet - > syncSourceFeedback . moreInCurrentBatch ( ) ) { <nl> - theReplSet - > syncSourceFeedback . more ( ) ; <nl> - } <nl> - <nl> - if ( ! theReplSet - > syncSourceFeedback . more ( ) ) { <nl> - theReplSet - > syncSourceFeedback . tailCheck ( ) ; <nl> - return ; <nl> - } <nl> + if ( ! _oplogMarker . moreInCurrentBatch ( ) ) { <nl> + _oplogMarker . more ( ) ; <nl> + } <nl> <nl> - / / if this member has written the op at optime T <nl> - / / we want to nextSafe up to and including T <nl> - while ( _consumedOpTime < theReplSet - > lastOpTimeWritten <nl> - & & theReplSet - > syncSourceFeedback . more ( ) ) { <nl> - BSONObj temp = theReplSet - > syncSourceFeedback . nextSafe ( ) ; <nl> - _consumedOpTime = temp [ " ts " ] . _opTime ( ) ; <nl> - } <nl> + if ( ! _oplogMarker . more ( ) ) { <nl> + _oplogMarker . tailCheck ( ) ; <nl> + sleepsecs ( 1 ) ; <nl> + return ; <nl> + } <nl> <nl> - / / call more ( ) to signal the sync target that we ' ve synced T <nl> - theReplSet - > syncSourceFeedback . more ( ) ; <nl> + / / if this member has written the op at optime T , we want to nextSafe up to and including T <nl> + while ( _consumedOpTime < theReplSet - > lastOpTimeWritten & & _oplogMarker . more ( ) ) { <nl> + BSONObj temp = _oplogMarker . nextSafe ( ) ; <nl> + _consumedOpTime = temp [ " ts " ] . _opTime ( ) ; <nl> } <nl> + <nl> + / / call more ( ) to signal the sync target that we ' ve synced T <nl> + _oplogMarker . more ( ) ; <nl> } <nl> <nl> bool BackgroundSync : : hasCursor ( ) { <nl> { <nl> / / prevent writers from blocking readers during fsync <nl> - SimpleMutex : : scoped_lock fsynclk ( filesLockedFsync ) ; <nl> + SimpleMutex : : scoped_lock fsynclk ( filesLockedFsync ) ; <nl> / / we don ' t need the local write lock yet , but it ' s needed by OplogReader : : connect <nl> / / so we take it preemptively to avoid deadlocking . <nl> Lock : : DBWrite lk ( " local " ) ; <nl> namespace replset { <nl> return false ; <nl> } <nl> <nl> - log ( ) < < " replset setting oplog notifier to " <nl> - < < _currentSyncTarget - > fullName ( ) < < rsLog ; <nl> + log ( ) < < " replset setting oplog notifier to " < < _currentSyncTarget - > fullName ( ) < < rsLog ; <nl> _oplogMarkerTarget = _currentSyncTarget ; <nl> <nl> - if ( ! theReplSet - > syncSourceFeedback . connect ( _oplogMarkerTarget ) ) { <nl> + _oplogMarker . resetConnection ( ) ; <nl> + <nl> + if ( ! _oplogMarker . connect ( _oplogMarkerTarget - > fullName ( ) ) ) { <nl> + LOG ( 1 ) < < " replset could not connect to " < < _oplogMarkerTarget - > fullName ( ) < < rsLog ; <nl> _oplogMarkerTarget = NULL ; <nl> return false ; <nl> } <nl> } <nl> } <nl> - if ( ! theReplSet - > syncSourceFeedback . haveCursor ( ) ) { <nl> + <nl> + if ( ! _oplogMarker . haveCursor ( ) ) { <nl> BSONObj fields = BSON ( " ts " < < 1 ) ; <nl> - theReplSet - > syncSourceFeedback . tailingQueryGTE ( rsoplog , <nl> - theReplSet - > lastOpTimeWritten , & fields ) ; <nl> + _oplogMarker . tailingQueryGTE ( rsoplog , theReplSet - > lastOpTimeWritten , & fields ) ; <nl> } <nl> <nl> - return theReplSet - > syncSourceFeedback . haveCursor ( ) ; <nl> + return _oplogMarker . haveCursor ( ) ; <nl> } <nl> <nl> void BackgroundSync : : producerThread ( ) { <nl> namespace replset { <nl> _currentSyncTarget = target ; <nl> } <nl> <nl> - theReplSet - > syncSourceFeedback . connect ( target ) ; <nl> - <nl> return ; <nl> } <nl> <nl> mmm a / src / mongo / db / repl / bgsync . h <nl> ppp b / src / mongo / db / repl / bgsync . h <nl> namespace replset { <nl> boost : : mutex _lastOpMutex ; <nl> <nl> const Member * _oplogMarkerTarget ; <nl> + OplogReader _oplogMarker ; / / not locked , only used by notifier thread <nl> OpTime _consumedOpTime ; / / not locked , only used by notifier thread <nl> <nl> BackgroundSync ( ) ; <nl> mmm a / src / mongo / db / repl / health . cpp <nl> ppp b / src / mongo / db / repl / health . cpp <nl> namespace mongo { <nl> return 0 ; <nl> } <nl> <nl> - Member * ReplSetImpl : : getMutableMember ( unsigned id ) { <nl> - if ( _self & & id = = _self - > id ( ) ) return _self ; <nl> - <nl> - for ( Member * m = head ( ) ; m ; m = m - > next ( ) ) <nl> - if ( m - > id ( ) = = id ) <nl> - return m ; <nl> - return 0 ; <nl> - } <nl> - <nl> Member * ReplSetImpl : : findByName ( const std : : string & hostname ) const { <nl> if ( _self & & hostname = = _self - > fullName ( ) ) { <nl> return _self ; <nl> mmm a / src / mongo / db / repl / oplogreader . cpp <nl> ppp b / src / mongo / db / repl / oplogreader . cpp <nl> namespace mongo { <nl> if ( conn ( ) = = 0 ) { <nl> _conn = shared_ptr < DBClientConnection > ( new DBClientConnection ( false , <nl> 0 , <nl> - tcp_timeout ) ) ; <nl> + 30 / * tcp timeout * / ) ) ; <nl> string errmsg ; <nl> if ( ! _conn - > connect ( hostName . c_str ( ) , errmsg ) | | <nl> ( AuthorizationManager : : isAuthEnabled ( ) & & ! replAuthenticate ( _conn . get ( ) , true ) ) ) { <nl> mmm a / src / mongo / db / repl / oplogreader . h <nl> ppp b / src / mongo / db / repl / oplogreader . h <nl> namespace mongo { <nl> return findOne ( ns , Query ( ) . sort ( reverseNaturalObj ) ) ; <nl> } <nl> <nl> - / * SO_TIMEOUT ( send / recv time out ) for our DBClientConnections * / <nl> - static const int tcp_timeout = 30 ; <nl> - <nl> / * ok to call if already connected * / <nl> bool connect ( const std : : string & hostname ) ; <nl> <nl> mmm a / src / mongo / db / repl / replset_commands . cpp <nl> ppp b / src / mongo / db / repl / replset_commands . cpp <nl> <nl> # include " mongo / db / cmdline . h " <nl> # include " mongo / db / commands . h " <nl> # include " mongo / db / repl / health . h " <nl> - # include " mongo / db / repl / oplog . h " <nl> # include " mongo / db / repl / replication_server_status . h " / / replSettings <nl> # include " mongo / db / repl / rs . h " <nl> # include " mongo / db / repl / rs_config . h " <nl> namespace mongo { <nl> } <nl> } cmdReplSetSyncFrom ; <nl> <nl> - class CmdReplSetUpdatePosition : public ReplSetCommand { <nl> - public : <nl> - virtual void help ( stringstream & help ) const { <nl> - help < < " internal " ; <nl> - } <nl> - virtual void addRequiredPrivileges ( const std : : string & dbname , <nl> - const BSONObj & cmdObj , <nl> - std : : vector < Privilege > * out ) { <nl> - ActionSet actions ; <nl> - actions . addAction ( ActionType : : replSetUpdatePosition ) ; <nl> - out - > push_back ( Privilege ( AuthorizationManager : : SERVER_RESOURCE_NAME , actions ) ) ; <nl> - } <nl> - CmdReplSetUpdatePosition ( ) : ReplSetCommand ( " replSetUpdatePosition " ) { } <nl> - virtual bool run ( const string & , BSONObj & cmdObj , int , string & errmsg , <nl> - BSONObjBuilder & result , bool fromRepl ) { <nl> - if ( ! check ( errmsg , result ) ) <nl> - return false ; <nl> - <nl> - if ( cmdObj . hasField ( " handshake " ) ) { <nl> - / / we have received a handshake , not an update message <nl> - / / handshakes are done here to ensure the receiving end supports the update command <nl> - cc ( ) . gotHandshake ( cmdObj [ " handshake " ] . embeddedObject ( ) ) ; <nl> - / / if we aren ' t primary , pass the handshake along <nl> - if ( ! theReplSet - > isPrimary ( ) & & theReplSet - > syncSourceFeedback . supportsUpdater ( ) ) { <nl> - theReplSet - > syncSourceFeedback . forwardSlaveHandshake ( ) ; <nl> - } <nl> - return true ; <nl> - } <nl> - <nl> - uassert ( 16888 , " optimes field should be an array with an object for each secondary " , <nl> - cmdObj [ " optimes " ] . type ( ) = = Array ) ; <nl> - BSONArray newTimes = BSONArray ( cmdObj [ " optimes " ] . Obj ( ) ) ; <nl> - updateSlaveLocations ( newTimes ) ; <nl> - <nl> - return true ; <nl> - } <nl> - } cmdReplSetUpdatePosition ; <nl> - <nl> } <nl> mmm a / src / mongo / db / repl / rs . cpp <nl> ppp b / src / mongo / db / repl / rs . cpp <nl> namespace mongo { <nl> void ReplSetImpl : : registerSlave ( const BSONObj & rid , const int memberId ) { <nl> / / To prevent race conditions with clearing the cache at reconfig time , <nl> / / we lock the replset mutex here . <nl> - { <nl> - lock lk ( this ) ; <nl> - ghost - > associateSlave ( rid , memberId ) ; <nl> - } <nl> - syncSourceFeedback . associateMember ( rid , memberId ) ; <nl> + lock lk ( this ) ; <nl> + ghost - > associateSlave ( rid , memberId ) ; <nl> } <nl> <nl> class ReplIndexPrefetch : public ServerParameter { <nl> mmm a / src / mongo / db / repl / rs . h <nl> ppp b / src / mongo / db / repl / rs . h <nl> <nl> # include " mongo / db / repl / rs_exception . h " <nl> # include " mongo / db / repl / rs_member . h " <nl> # include " mongo / db / repl / rs_sync . h " <nl> - # include " mongo / db / repl / sync_source_feedback . h " <nl> # include " mongo / util / concurrency / list . h " <nl> # include " mongo / util / concurrency / msg . h " <nl> # include " mongo / util / concurrency / thread_pool . h " <nl> namespace mongo { <nl> <nl> StateBox box ; <nl> <nl> - SyncSourceFeedback syncSourceFeedback ; <nl> - <nl> OpTime lastOpTimeWritten ; <nl> long long lastH ; / / hash we use to make sure we are reading the right flow of ops and aren ' t on an out - of - date " fork " <nl> bool forceSyncFrom ( const string & host , string & errmsg , BSONObjBuilder & result ) ; <nl> namespace mongo { <nl> Member * head ( ) const { return _members . head ( ) ; } <nl> public : <nl> const Member * findById ( unsigned id ) const ; <nl> - Member * getMutableMember ( unsigned id ) ; <nl> Member * findByName ( const std : : string & hostname ) const ; <nl> private : <nl> void _getTargets ( list < Target > & , int & configVersion ) ; <nl> deleted file mode 100644 <nl> index 21a7bffd3c48 . . 000000000000 <nl> mmm a / src / mongo / db / repl / sync_source_feedback . cpp <nl> ppp / dev / null <nl> <nl> - / * * <nl> - * Copyright ( C ) 2013 10gen Inc . <nl> - * <nl> - * This program is free software : you can redistribute it and / or modify <nl> - * it under the terms of the GNU Affero General Public License , version 3 , <nl> - * as published by the Free Software Foundation . <nl> - * <nl> - * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> - * GNU Affero General Public License for more details . <nl> - * <nl> - * You should have received a copy of the GNU Affero General Public License <nl> - * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> - * / <nl> - <nl> - # include " mongo / db / repl / sync_source_feedback . h " <nl> - <nl> - # include " mongo / client / constants . h " <nl> - # include " mongo / client / dbclientcursor . h " <nl> - # include " mongo / db / auth / authorization_session . h " <nl> - # include " mongo / db / auth / security_key . h " <nl> - # include " mongo / db / dbhelpers . h " <nl> - # include " mongo / db / repl / bgsync . h " <nl> - # include " mongo / db / repl / rs . h " / / theReplSet <nl> - <nl> - namespace mongo { <nl> - <nl> - / / used in replAuthenticate <nl> - static const BSONObj userReplQuery = fromjson ( " { \ " user \ " : \ " repl \ " } " ) ; <nl> - <nl> - void SyncSourceFeedback : : associateMember ( const BSONObj & id , const int memberId ) { <nl> - const OID rid = id [ " _id " ] . OID ( ) ; <nl> - boost : : unique_lock < boost : : mutex > lock ( _mtx ) ; <nl> - _handshakeNeeded = true ; <nl> - _members [ rid ] = theReplSet - > getMutableMember ( memberId ) ; <nl> - _cond . notify_all ( ) ; <nl> - } <nl> - <nl> - bool SyncSourceFeedback : : replAuthenticate ( bool skipAuthCheck ) { <nl> - <nl> - if ( ! AuthorizationManager : : isAuthEnabled ( ) ) { <nl> - return true ; <nl> - } <nl> - if ( ! skipAuthCheck & & ! cc ( ) . getAuthorizationSession ( ) - > hasInternalAuthorization ( ) ) { <nl> - log ( ) < < " replauthenticate : requires internal authorization , failing " < < endl ; <nl> - return false ; <nl> - } <nl> - <nl> - if ( isInternalAuthSet ( ) ) { <nl> - return authenticateInternalUser ( _connection . get ( ) ) ; <nl> - } <nl> - <nl> - BSONObj user ; <nl> - { <nl> - Client : : ReadContext ctxt ( " local . " ) ; <nl> - if ( ! Helpers : : findOne ( " local . system . users " , userReplQuery , user ) | | <nl> - / / try the first user in local <nl> - ! Helpers : : getSingleton ( " local . system . users " , user ) ) { <nl> - log ( ) < < " replauthenticate : no user in local . system . users to use " <nl> - < < " for authentication " < < endl ; <nl> - return false ; <nl> - } <nl> - } <nl> - std : : string u = user . getStringField ( " user " ) ; <nl> - std : : string p = user . getStringField ( " pwd " ) ; <nl> - massert ( 16889 , " bad user object ? [ 1 ] " , ! u . empty ( ) ) ; <nl> - massert ( 16887 , " bad user object ? [ 2 ] " , ! p . empty ( ) ) ; <nl> - <nl> - std : : string err ; <nl> - <nl> - if ( ! _connection - > auth ( " local " , u . c_str ( ) , p . c_str ( ) , err , false ) ) { <nl> - log ( ) < < " replauthenticate : can ' t authenticate to master server , user : " < < u < < endl ; <nl> - return false ; <nl> - } <nl> - <nl> - return true ; <nl> - } <nl> - <nl> - void SyncSourceFeedback : : ensureMe ( ) { <nl> - string myname = getHostName ( ) ; <nl> - { <nl> - Client : : WriteContext ctx ( " local " ) ; <nl> - / / local . me is an identifier for a server for getLastError w : 2 + <nl> - if ( ! Helpers : : getSingleton ( " local . me " , _me ) | | <nl> - ! _me . hasField ( " host " ) | | <nl> - _me [ " host " ] . String ( ) ! = myname ) { <nl> - <nl> - / / clean out local . me <nl> - Helpers : : emptyCollection ( " local . me " ) ; <nl> - <nl> - / / repopulate <nl> - BSONObjBuilder b ; <nl> - b . appendOID ( " _id " , 0 , true ) ; <nl> - b . append ( " host " , myname ) ; <nl> - _me = b . obj ( ) ; <nl> - Helpers : : putSingleton ( " local . me " , _me ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - bool SyncSourceFeedback : : replHandshake ( ) { <nl> - ensureMe ( ) ; <nl> - <nl> - / / handshake for us <nl> - BSONObjBuilder cmd ; <nl> - cmd . append ( " replSetUpdatePosition " , 1 ) ; <nl> - BSONObjBuilder sub ( cmd . subobjStart ( " handshake " ) ) ; <nl> - sub . appendAs ( _me [ " _id " ] , " handshake " ) ; <nl> - sub . append ( " member " , theReplSet - > selfId ( ) ) ; <nl> - sub . append ( " config " , theReplSet - > myConfig ( ) . asBson ( ) ) ; <nl> - sub . doneFast ( ) ; <nl> - <nl> - BSONObj res ; <nl> - try { <nl> - if ( ! _connection - > runCommand ( " admin " , cmd . obj ( ) , res ) ) { <nl> - if ( res [ " errmsg " ] . str ( ) . find ( " no such cmd " ) ! = std : : string : : npos ) { <nl> - _supportsUpdater = false ; <nl> - } <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - else { <nl> - _supportsUpdater = true ; <nl> - } <nl> - } <nl> - catch ( const DBException & e ) { <nl> - log ( ) < < " SyncSourceFeedback error sending handshake : " < < e . what ( ) < < endl ; <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - <nl> - / / handshakes for those connected to us <nl> - { <nl> - for ( OIDMemberMap : : iterator itr = _members . begin ( ) ; <nl> - itr ! = _members . end ( ) ; + + itr ) { <nl> - BSONObjBuilder slaveCmd ; <nl> - slaveCmd . append ( " replSetUpdatePosition " , 1 ) ; <nl> - / / outer handshake indicates this is a handshake command <nl> - / / inner is needed as part of the structure to be passed to gotHandshake <nl> - BSONObjBuilder slaveSub ( slaveCmd . subobjStart ( " handshake " ) ) ; <nl> - slaveSub . append ( " handshake " , itr - > first ) ; <nl> - slaveSub . append ( " member " , itr - > second - > id ( ) ) ; <nl> - slaveSub . append ( " config " , itr - > second - > config ( ) . asBson ( ) ) ; <nl> - slaveSub . doneFast ( ) ; <nl> - BSONObj slaveRes ; <nl> - try { <nl> - if ( ! _connection - > runCommand ( " admin " , slaveCmd . obj ( ) , slaveRes ) ) { <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - } <nl> - catch ( const DBException & e ) { <nl> - log ( ) < < " SyncSourceFeedback error sending chained handshakes : " <nl> - < < e . what ( ) < < endl ; <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - } <nl> - } <nl> - return true ; <nl> - } <nl> - <nl> - bool SyncSourceFeedback : : _connect ( const std : : string & hostName ) { <nl> - if ( hasConnection ( ) ) { <nl> - return true ; <nl> - } <nl> - _connection . reset ( new DBClientConnection ( false , 0 , OplogReader : : tcp_timeout ) ) ; <nl> - string errmsg ; <nl> - if ( ! _connection - > connect ( hostName . c_str ( ) , errmsg ) | | <nl> - ( AuthorizationManager : : isAuthEnabled ( ) & & ! replAuthenticate ( true ) ) ) { <nl> - resetConnection ( ) ; <nl> - log ( ) < < " repl : " < < errmsg < < endl ; <nl> - return false ; <nl> - } <nl> - <nl> - if ( ! replHandshake ( ) ) { <nl> - if ( ! supportsUpdater ( ) ) { <nl> - return connectOplogReader ( hostName ) ; <nl> - } <nl> - return false ; <nl> - } <nl> - return true ; <nl> - } <nl> - <nl> - bool SyncSourceFeedback : : connect ( const Member * target ) { <nl> - boost : : unique_lock < boost : : mutex > lock ( _connmtx ) ; <nl> - resetConnection ( ) ; <nl> - resetOplogReaderConnection ( ) ; <nl> - _syncTarget = target ; <nl> - if ( _connect ( target - > fullName ( ) ) ) { <nl> - if ( ! supportsUpdater ( ) ) { <nl> - return true ; <nl> - } <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - void SyncSourceFeedback : : forwardSlaveHandshake ( ) { <nl> - boost : : unique_lock < boost : : mutex > lock ( _mtx ) ; <nl> - _handshakeNeeded = true ; <nl> - } <nl> - <nl> - void SyncSourceFeedback : : updateMap ( const mongo : : OID & rid , const OpTime & ot ) { <nl> - boost : : unique_lock < boost : : mutex > lock ( _mtx ) ; <nl> - LOG ( 1 ) < < " replSet last : " < < _slaveMap [ rid ] . toString ( ) < < " to " < < ot . toString ( ) < < endl ; <nl> - / / only update if ot is newer than what we have already <nl> - if ( ot > _slaveMap [ rid ] ) { <nl> - _slaveMap [ rid ] = ot ; <nl> - _positionChanged = true ; <nl> - LOG ( 2 ) < < " now last is " < < _slaveMap [ rid ] . toString ( ) < < endl ; <nl> - _cond . notify_all ( ) ; <nl> - } <nl> - } <nl> - <nl> - bool SyncSourceFeedback : : updateUpstream ( ) { <nl> - if ( theReplSet - > isPrimary ( ) ) { <nl> - / / primary has no one to update to <nl> - return true ; <nl> - } <nl> - BSONObjBuilder cmd ; <nl> - cmd . append ( " replSetUpdatePosition " , 1 ) ; <nl> - / / create an array containing objects each member connected to us and for ourself <nl> - BSONArrayBuilder array ( cmd . subarrayStart ( " optimes " ) ) ; <nl> - OID myID = _me [ " _id " ] . OID ( ) ; <nl> - { <nl> - for ( map < mongo : : OID , OpTime > : : const_iterator itr = _slaveMap . begin ( ) ; <nl> - itr ! = _slaveMap . end ( ) ; + + itr ) { <nl> - BSONObjBuilder entry ( array . subobjStart ( ) ) ; <nl> - entry . append ( " _id " , itr - > first ) ; <nl> - entry . append ( " optime " , itr - > second ) ; <nl> - if ( itr - > first = = myID ) { <nl> - entry . append ( " config " , theReplSet - > myConfig ( ) . asBson ( ) ) ; <nl> - } <nl> - else { <nl> - entry . append ( " config " , _members [ itr - > first ] - > config ( ) . asBson ( ) ) ; <nl> - } <nl> - entry . doneFast ( ) ; <nl> - } <nl> - } <nl> - array . done ( ) ; <nl> - BSONObj res ; <nl> - <nl> - bool ok ; <nl> - try { <nl> - ok = _connection - > runCommand ( " admin " , cmd . obj ( ) , res ) ; <nl> - } <nl> - catch ( const DBException & e ) { <nl> - log ( ) < < " SyncSourceFeedback error sending update : " < < e . what ( ) < < endl ; <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - if ( ! ok ) { <nl> - log ( ) < < " SyncSourceFeedback error sending update , response : " < < res . toString ( ) < < endl ; <nl> - resetConnection ( ) ; <nl> - return false ; <nl> - } <nl> - return true ; <nl> - } <nl> - <nl> - void SyncSourceFeedback : : run ( ) { <nl> - Client : : initThread ( " SyncSourceFeedbackThread " ) ; <nl> - while ( true ) { <nl> - { <nl> - boost : : unique_lock < boost : : mutex > lock ( _mtx ) ; <nl> - while ( ! _positionChanged & & ! _handshakeNeeded ) { <nl> - _cond . wait ( lock ) ; <nl> - } <nl> - boost : : unique_lock < boost : : mutex > conlock ( _connmtx ) ; <nl> - const Member * target = replset : : BackgroundSync : : get ( ) - > getSyncTarget ( ) ; <nl> - if ( _syncTarget ! = target ) { <nl> - resetConnection ( ) ; <nl> - _syncTarget = target ; <nl> - } <nl> - if ( ! hasConnection ( ) ) { <nl> - / / fix connection if need be <nl> - if ( ! target ) { <nl> - continue ; <nl> - } <nl> - if ( ! _connect ( target - > fullName ( ) ) ) { <nl> - continue ; <nl> - } <nl> - else if ( ! supportsUpdater ( ) ) { <nl> - _handshakeNeeded = false ; <nl> - _positionChanged = false ; <nl> - continue ; <nl> - } <nl> - } <nl> - if ( _handshakeNeeded ) { <nl> - if ( ! replHandshake ( ) ) { <nl> - _handshakeNeeded = true ; <nl> - continue ; <nl> - } <nl> - else { <nl> - _handshakeNeeded = false ; <nl> - } <nl> - } <nl> - if ( _positionChanged ) { <nl> - if ( ! updateUpstream ( ) ) { <nl> - _positionChanged = true ; <nl> - continue ; <nl> - } <nl> - else { <nl> - _positionChanged = false ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - } <nl> - } <nl> deleted file mode 100644 <nl> index 72073841029f . . 000000000000 <nl> mmm a / src / mongo / db / repl / sync_source_feedback . h <nl> ppp / dev / null <nl> <nl> - / * * <nl> - * Copyright ( C ) 2013 10gen Inc . <nl> - * <nl> - * This program is free software : you can redistribute it and / or modify <nl> - * it under the terms of the GNU Affero General Public License , version 3 , <nl> - * as published by the Free Software Foundation . <nl> - * <nl> - * This program is distributed in the hope that it will be useful , <nl> - * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> - * GNU Affero General Public License for more details . <nl> - * <nl> - * You should have received a copy of the GNU Affero General Public License <nl> - * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> - * / <nl> - <nl> - <nl> - # pragma once <nl> - <nl> - # include " mongo / db / repl / oplogreader . h " <nl> - # include " mongo / util / background . h " <nl> - <nl> - <nl> - namespace mongo { <nl> - <nl> - class Member ; <nl> - <nl> - class SyncSourceFeedback : public BackgroundJob { <nl> - public : <nl> - SyncSourceFeedback ( ) : BackgroundJob ( false / * don ' t selfdelete * / ) , <nl> - _syncTarget ( NULL ) , <nl> - _oplogReader ( new OplogReader ( true ) ) , <nl> - _supportsUpdater ( true ) { } <nl> - <nl> - ~ SyncSourceFeedback ( ) { <nl> - delete _oplogReader ; <nl> - } <nl> - <nl> - / / / Adds an entry to _member for a secondary that has connected to us . <nl> - void associateMember ( const BSONObj & id , const int memberId ) ; <nl> - <nl> - / / / Passes handshake up the replication chain , upon receiving a handshake . <nl> - void forwardSlaveHandshake ( ) ; <nl> - <nl> - void updateSelfInMap ( const OpTime & ot ) { <nl> - ensureMe ( ) ; <nl> - updateMap ( _me [ " _id " ] . OID ( ) , ot ) ; <nl> - } <nl> - <nl> - / / / Connect to sync target and create OplogReader if needed . <nl> - bool connect ( const Member * target ) ; <nl> - <nl> - void resetConnection ( ) { <nl> - _connection . reset ( ) ; <nl> - } <nl> - <nl> - void resetOplogReaderConnection ( ) { <nl> - _oplogReader - > resetConnection ( ) ; <nl> - } <nl> - <nl> - / / / Used extensively in bgsync , to see if we need to use the OplogReader syncing method . <nl> - bool supportsUpdater ( ) const { <nl> - / / oplogReader will be NULL if new updater is supported <nl> - / / boost : : unique_lock < boost : : mutex > lock ( _mtx ) ; <nl> - return _supportsUpdater ; <nl> - } <nl> - <nl> - / / / Updates the _slaveMap to be forwarded to the sync target . <nl> - void updateMap ( const mongo : : OID & rid , const OpTime & ot ) ; <nl> - <nl> - std : : string name ( ) const { return " SyncSourceFeedbackThread " ; } <nl> - <nl> - / / / Loops forever , passing updates when they are present . <nl> - void run ( ) ; <nl> - <nl> - / * The below methods just fall through to OplogReader and are only used when our sync target <nl> - * does not support the update command . <nl> - * / <nl> - bool connectOplogReader ( const std : : string & hostName ) { <nl> - return _oplogReader - > connect ( hostName ) ; <nl> - } <nl> - <nl> - bool connect ( const BSONObj & rid , const int from , const string & to ) { <nl> - return _oplogReader - > connect ( rid , from , to ) ; <nl> - } <nl> - <nl> - void ghostQueryGTE ( const char * ns , OpTime t ) { <nl> - _oplogReader - > ghostQueryGTE ( ns , t ) ; <nl> - } <nl> - <nl> - bool haveCursor ( ) { <nl> - return _oplogReader - > haveCursor ( ) ; <nl> - } <nl> - <nl> - bool more ( ) { <nl> - return _oplogReader - > more ( ) ; <nl> - } <nl> - <nl> - bool moreInCurrentBatch ( ) { <nl> - return _oplogReader - > moreInCurrentBatch ( ) ; <nl> - } <nl> - <nl> - BSONObj nextSafe ( ) { <nl> - return _oplogReader - > nextSafe ( ) ; <nl> - } <nl> - <nl> - void tailCheck ( ) { <nl> - _oplogReader - > tailCheck ( ) ; <nl> - } <nl> - <nl> - void tailingQueryGTE ( const char * ns , OpTime t , const BSONObj * fields = 0 ) { <nl> - _oplogReader - > tailingQueryGTE ( ns , t , fields ) ; <nl> - } <nl> - <nl> - private : <nl> - / / / Ensures local . me is populated and populates it if not . <nl> - void ensureMe ( ) ; <nl> - <nl> - / * Generally replAuthenticate will only be called within system threads to fully <nl> - * authenticate connections to other nodes in the cluster that will be used as part of <nl> - * internal operations . If a user - initiated action results in needing to call <nl> - * replAuthenticate , you can call it with skipAuthCheck set to false . Only do this if you <nl> - * are certain that the proper auth checks have already run to ensure that the user is <nl> - * authorized to do everything that this connection will be used for ! <nl> - * / <nl> - bool replAuthenticate ( bool skipAuthCheck ) ; <nl> - <nl> - / * Sends initialization information to our sync target , also determines whether or not they <nl> - * support the updater command . <nl> - * / <nl> - bool replHandshake ( ) ; <nl> - <nl> - / * Inform the sync target of our current position in the oplog , as well as the positions <nl> - * of all secondaries chained through us . <nl> - * / <nl> - bool updateUpstream ( ) ; <nl> - <nl> - bool hasConnection ( ) { <nl> - return _connection . get ( ) ; <nl> - } <nl> - <nl> - / / / Connect to sync target and create OplogReader if needed . <nl> - bool _connect ( const std : : string & hostName ) ; <nl> - <nl> - / / stores our OID to be passed along in commands <nl> - BSONObj _me ; <nl> - / / the member we are currently syncing from <nl> - const Member * _syncTarget ; <nl> - / / holds the oplogReader for use when we fall back to old style updates <nl> - OplogReader * _oplogReader ; <nl> - / / our connection to our sync target <nl> - boost : : scoped_ptr < DBClientConnection > _connection ; <nl> - / / tracks whether we are in fallback mode or not <nl> - bool _supportsUpdater ; <nl> - / / protects connection <nl> - boost : : mutex _connmtx ; <nl> - / / protects cond and maps and the indicator bools <nl> - boost : : mutex _mtx ; <nl> - / / contains the most recent optime of each member syncing to us <nl> - map < mongo : : OID , OpTime > _slaveMap ; <nl> - typedef map < mongo : : OID , Member * > OIDMemberMap ; <nl> - / / contains a pointer to each member , which we can look up by oid <nl> - OIDMemberMap _members ; <nl> - / / used to alert our thread of changes which need to be passed up the chain <nl> - boost : : condition _cond ; <nl> - / / used to indicate a position change which has not yet been pushed along <nl> - bool _positionChanged ; <nl> - / / used to indicate a connection change which has not yet been shook on <nl> - bool _handshakeNeeded ; <nl> - } ; <nl> - } <nl> mmm a / src / mongo / db / repl / write_concern . cpp <nl> ppp b / src / mongo / db / repl / write_concern . cpp <nl> namespace mongo { <nl> <nl> scoped_lock mylk ( _mutex ) ; <nl> <nl> - if ( last > _slaves [ ident ] ) { <nl> - _slaves [ ident ] = last ; <nl> - _dirty = true ; <nl> + _slaves [ ident ] = last ; <nl> + _dirty = true ; <nl> <nl> - if ( theReplSet & & theReplSet - > isPrimary ( ) ) { <nl> - theReplSet - > ghost - > updateSlave ( ident . obj [ " _id " ] . OID ( ) , last ) ; <nl> - } <nl> - <nl> - if ( ! _started ) { <nl> - / / start background thread here since we definitely need it <nl> - _started = true ; <nl> - go ( ) ; <nl> - } <nl> + if ( theReplSet & & theReplSet - > isPrimary ( ) ) { <nl> + theReplSet - > ghost - > updateSlave ( ident . obj [ " _id " ] . OID ( ) , last ) ; <nl> + } <nl> <nl> - _threadsWaitingForReplication . notify_all ( ) ; <nl> + if ( ! _started ) { <nl> + / / start background thread here since we definitely need it <nl> + _started = true ; <nl> + go ( ) ; <nl> } <nl> + <nl> + _threadsWaitingForReplication . notify_all ( ) ; <nl> } <nl> <nl> bool opReplicatedEnough ( OpTime op , BSONElement w ) { <nl> namespace mongo { <nl> <nl> const char * SlaveTracking : : NS = " local . slaves " ; <nl> <nl> - / / parse optimes from replUpdatePositionCommand and pass them to SyncSourceFeedback <nl> - void updateSlaveLocations ( BSONArray optimes ) { <nl> - BSONForEach ( elem , optimes ) { <nl> - BSONObj entry = elem . Obj ( ) ; <nl> - BSONObj id = BSON ( " _id " < < entry [ " _id " ] . OID ( ) ) ; <nl> - OpTime ot = entry [ " optime " ] . _opTime ( ) ; <nl> - BSONObj config = entry [ " config " ] . Obj ( ) ; <nl> - <nl> - / / update locally <nl> - slaveTracking . update ( id , config , " local . oplog . rs " , ot ) ; <nl> - if ( theReplSet & & ! theReplSet - > isPrimary ( ) ) { <nl> - / / pass along if we are not primary <nl> - theReplSet - > syncSourceFeedback . updateMap ( entry [ " _id " ] . OID ( ) , ot ) ; <nl> - / / for to be backwards compatible <nl> - theReplSet - > ghost - > send ( boost : : bind ( & GhostSync : : percolate , <nl> - theReplSet - > ghost , <nl> - id , <nl> - ot ) ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> void updateSlaveLocation ( CurOp & curop , const char * ns , OpTime lastOp ) { <nl> if ( lastOp . isNull ( ) ) <nl> return ; <nl> mmm a / src / mongo / db / repl / write_concern . h <nl> ppp b / src / mongo / db / repl / write_concern . h <nl> namespace mongo { <nl> <nl> class CurOp ; <nl> <nl> - void updateSlaveLocations ( BSONArray optimes ) ; <nl> - <nl> void updateSlaveLocation ( CurOp & curop , const char * oplog_ns , OpTime lastOp ) ; <nl> <nl> / * * @ return true if op has made it to w servers * / <nl> | Revert " SERVER - 6071 use command on local . slaves instead of cursor " | mongodb/mongo | 6486b4035c5ac52679eb3e1a034c925ccdd20deb | 2013-07-12T19:14:09Z |
mmm a / . travis . yml <nl> ppp b / . travis . yml <nl> os : <nl> - linux <nl> - osx <nl> <nl> - dist : bionic <nl> + dist : focal <nl> osx_image : xcode12 . 2 <nl> <nl> env : <nl> env : <nl> - secure : " OI9CUjj4lTb0HwwIZU5PbECU3hLlAL6KC8KsbwohG8 / O3j5fLcnmDsK4Ad9us5cC39sS11Jcd1kDP2qRcCuST / glVNhLkcjKkiQerOfd5nQ / qL4JYfz / 1mfP5mdpz9jHKzpLUIG + TXkbSTjP6VVmsb5KPT + 3pKEdRFZB + Pu9 + J8 = " <nl> - coverity_branch : coverity_scan <nl> <nl> + jobs : <nl> + include : <nl> + - env : libt_branch = RC_2_0 gui = true build_system = qmake <nl> + os : linux <nl> + <nl> notifications : <nl> email : <nl> on_success : change <nl> addons : <nl> apt : <nl> sources : <nl> # sources list : https : / / github . com / travis - ci / apt - source - safelist / blob / master / ubuntu . json <nl> - - sourceline : ' ppa : qbittorrent - team / qbt - libtorrent - travisci ' <nl> - - sourceline : ' deb https : / / apt . kitware . com / ubuntu / bionic main ' <nl> + - sourceline : ' deb https : / / apt . kitware . com / ubuntu / focal main ' <nl> key_url : ' https : / / apt . kitware . com / keys / kitware - archive - latest . asc ' <nl> packages : <nl> # packages list : https : / / github . com / travis - ci / apt - package - safelist / blob / master / ubuntu - trusty <nl> before_install : <nl> # TravisCI installs its own cmake to another location which ovverides other installations <nl> # if they don ' t call the new binary directly <nl> alias cmake = " / usr / bin / cmake " <nl> + <nl> + export LD_LIBRARY_PATH = " $ LD_LIBRARY_PATH : / usr / local / lib " <nl> fi <nl> - | <nl> if [ " $ TRAVIS_OS_NAME " = " osx " ] ; then <nl> install : <nl> ccache - V & & ccache - - show - stats & & ccache - - zero - stats <nl> fi <nl> - | <nl> - if [ " $ libt_branch " = " RC_1_2 " ] & & [ " $ TRAVIS_OS_NAME " = " linux " ] ; then <nl> - # Will install latest 1 . 2 . x daily build from the PPA <nl> - sudo apt - get - y install libtorrent - rasterbar - dev <nl> - fi <nl> - - | <nl> - if [ " $ libt_branch " = " RC_1_2 " ] & & [ " $ TRAVIS_OS_NAME " = " osx " ] ; then <nl> - # building libtorrent manually should be faster than using the official bottle <nl> - # because the bottle will also pull in a lot of updated dependencies and prolong the overall time <nl> + if [ " $ libt_branch " = " RC_1_2 " ] ; then <nl> pushd " $ HOME " <nl> git clone - - single - branch - - branch RC_1_2 https : / / github . com / arvidn / libtorrent . git <nl> cd libtorrent <nl> git checkout tags / v1 . 2 . 10 <nl> <nl> - cmake - DCMAKE_BUILD_TYPE = Release - DCMAKE_CXX_STANDARD = 14 - Ddeprecated - functions = OFF - DOPENSSL_ROOT_DIR = " $ openssl_root_path " . / <nl> + cmake \ <nl> + - DCMAKE_BUILD_TYPE = Release \ <nl> + - DCMAKE_CXX_STANDARD = 14 \ <nl> + - Ddeprecated - functions = OFF \ <nl> + - DOPENSSL_ROOT_DIR = " $ openssl_root_path " \ <nl> + . / <nl> + make <nl> + sudo make install <nl> + popd <nl> + elif [ " $ libt_branch " = " RC_2_0 " ] ; then <nl> + pushd " $ HOME " <nl> + git clone - - single - branch - - branch RC_2_0 https : / / github . com / arvidn / libtorrent . git <nl> + cd libtorrent <nl> + git checkout tags / v2 . 0 . 1 <nl> + git submodule update - - init - - recursive <nl> + <nl> + cmake \ <nl> + - DCMAKE_BUILD_TYPE = Release \ <nl> + - DCMAKE_CXX_STANDARD = 14 \ <nl> + - Ddeprecated - functions = ON \ <nl> + - DOPENSSL_ROOT_DIR = " $ openssl_root_path " \ <nl> + . / <nl> make <nl> sudo make install <nl> popd <nl> | Add libtorrent 2 . 0 to TravisCI script | qbittorrent/qBittorrent | 990cc41e80385333ec99b63ed99f3fc30c6107ab | 2020-11-29T04:33:58Z |
mmm a / tensorflow / c / c_api . cc <nl> ppp b / tensorflow / c / c_api . cc <nl> TF_Graph : : TF_Graph ( ) <nl> refiner ( graph . versions ( ) . producer ( ) , graph . op_registry ( ) ) , <nl> delete_requested ( false ) , <nl> parent ( nullptr ) , <nl> - parent_inputs ( nullptr ) { } <nl> + parent_inputs ( nullptr ) { <nl> + / / Tell the shape refiner to also run shape inference on functions . <nl> + refiner . set_function_library_for_shape_inference ( & graph . flib_def ( ) ) ; <nl> + } <nl> <nl> TF_Graph * TF_NewGraph ( ) { return new TF_Graph ; } <nl> <nl> mmm a / tensorflow / core / BUILD <nl> ppp b / tensorflow / core / BUILD <nl> tf_cuda_library ( <nl> " common_runtime / scoped_allocator . cc " , <nl> " common_runtime / scoped_allocator_mgr . cc " , <nl> " common_runtime / shape_refiner . cc " , <nl> - " common_runtime / shape_refiner . h " , <nl> - " framework / versions . h " , <nl> + " common_runtime / graph_optimizer . h " , <nl> " graph / graph_constructor . cc " , # Depends on common_runtime . <nl> " graph / graph_def_builder_util . cc " , # Depends on common_runtime . <nl> " public / session . h " , <nl> " public / session_options . h " , <nl> " public / version . h " , <nl> - ] , <nl> + ] + CORE_CPU_BASE_HDRS , <nl> hdrs = CORE_CPU_BASE_HDRS , <nl> copts = tf_copts ( ) , <nl> deps = [ <nl> mmm a / tensorflow / core / common_runtime / eval_const_tensor . cc <nl> ppp b / tensorflow / core / common_runtime / eval_const_tensor . cc <nl> Status TryToInferTensorOutputFromInputShapes ( const Edge & edge , <nl> const Node * node = edge . src ( ) ; <nl> InferenceContext * c = refiner . GetContext ( node ) ; <nl> if ( c = = nullptr ) { <nl> - return errors : : FailedPrecondition ( " Node does not have context . " ) ; <nl> + / / An input without context is a soft failure ; we sometimes need to break <nl> + / / control flow loops by running shape inference on a node without first <nl> + / / adding its input . <nl> + return Status : : OK ( ) ; <nl> } <nl> <nl> if ( node - > type_string ( ) = = " Shape " ) { <nl> mmm a / tensorflow / core / common_runtime / function . cc <nl> ppp b / tensorflow / core / common_runtime / function . cc <nl> bool RemoveListArrayConverter ( Graph * g ) { <nl> return removed_any ; <nl> } <nl> <nl> - Status InstantiateFunctionCall ( const NodeDef & call_def , <nl> - FunctionLibraryRuntime & flr , <nl> - FunctionLibraryRuntime : : Handle * handle ) { <nl> - const string * func_name ; <nl> - AttrSlice attrs ; <nl> - <nl> - NameAttrList func ; <nl> + Status NameAndAttrsFromFunctionCall ( const NodeDef & call_def , <nl> + NameAttrList * function ) { <nl> if ( call_def . op ( ) = = " PartitionedCall " | | <nl> call_def . op ( ) = = " StatefulPartitionedCall " ) { <nl> - TF_RETURN_IF_ERROR ( GetNodeAttr ( call_def , " f " , & func ) ) ; <nl> - func_name = & func . name ( ) ; <nl> - attrs = AttrSlice ( & func . attr ( ) ) ; <nl> + TF_RETURN_IF_ERROR ( GetNodeAttr ( call_def , " f " , function ) ) ; <nl> } else { <nl> - func_name = & call_def . op ( ) ; <nl> - attrs = AttrSlice ( call_def ) ; <nl> + function - > set_name ( call_def . op ( ) ) ; <nl> + * function - > mutable_attr ( ) = call_def . attr ( ) ; <nl> } <nl> + return Status : : OK ( ) ; <nl> + } <nl> <nl> - return flr . Instantiate ( * func_name , attrs , handle ) ; <nl> + Status InstantiateFunctionCall ( const NodeDef & call_def , <nl> + FunctionLibraryRuntime & flr , <nl> + FunctionLibraryRuntime : : Handle * handle ) { <nl> + NameAttrList function ; <nl> + TF_RETURN_IF_ERROR ( NameAndAttrsFromFunctionCall ( call_def , & function ) ) ; <nl> + return flr . Instantiate ( function . name ( ) , AttrSlice ( & function . attr ( ) ) , handle ) ; <nl> } <nl> <nl> namespace { <nl> mmm a / tensorflow / core / common_runtime / function . h <nl> ppp b / tensorflow / core / common_runtime / function . h <nl> inline bool ExpandInlineFunctions ( FunctionLibraryRuntime * lib , Graph * graph ) { <nl> return ExpandInlineFunctions ( lib , graph , ExpandInlineFunctionsOptions ( ) ) ; <nl> } <nl> <nl> + / / Extracts function name and attributes from ` call_def ` <nl> + / / ` call_def ` can be a native function call ( where the op type is the function <nl> + / / name ) or a call through PartitionedCall / StatefulPartitionedCall . <nl> + Status NameAndAttrsFromFunctionCall ( const NodeDef & call_def , <nl> + NameAttrList * function ) ; <nl> + <nl> / / Extracts function name and attributes from ` call_def ` and invokes <nl> / / flr - > Instantiate ( name , attrs , handle ) . <nl> / / ` call_def ` can be a native function call ( where the op type is the function <nl> mmm a / tensorflow / core / common_runtime / shape_refiner . cc <nl> ppp b / tensorflow / core / common_runtime / shape_refiner . cc <nl> limitations under the License . <nl> # include < vector > <nl> <nl> # include " tensorflow / core / common_runtime / eval_const_tensor . h " <nl> + # include " tensorflow / core / common_runtime / function . h " <nl> # include " tensorflow / core / framework / bounds_check . h " <nl> # include " tensorflow / core / framework / common_shape_fns . h " <nl> # include " tensorflow / core / framework / node_def . pb . h " <nl> Status InferShapesForFunctionSubNode ( const Node * node , ShapeRefiner * refiner , <nl> / / Maybe we won ' t support recursive functions at all in TF , because of <nl> / / other maintainability issues . <nl> Status ShapeRefiner : : InferShapesForFunction ( <nl> - const tensorflow : : FunctionDef * function_def , bool keep_nested_shapes , <nl> - ExtendedInferenceContext * outer_context ) { <nl> + const FunctionDef * function_def , AttrSlice attributes , <nl> + bool keep_nested_shapes , ExtendedInferenceContext * outer_context ) { <nl> const Graph * graph ; <nl> auto it = functions_ . find ( function_def ) ; <nl> if ( it ! = functions_ . end ( ) ) { <nl> Status ShapeRefiner : : InferShapesForFunction ( <nl> } else { <nl> InstantiationResult result ; <nl> TF_RETURN_IF_ERROR ( InstantiateFunction ( <nl> - * function_def , outer_context - > get_context ( ) - > attrs ( ) , <nl> + * function_def , attributes , <nl> [ this ] ( const string & op , const OpDef * * sig ) { <nl> return this - > function_library_ - > LookUpOpDef ( op , sig ) ; <nl> } , <nl> Status ShapeRefiner : : AddNode ( const Node * node ) { <nl> std : : vector < ShapeHandle > input_shapes ( node - > num_inputs ( ) ) ; <nl> std : : vector < std : : unique_ptr < std : : vector < ShapeAndType > > > <nl> input_handle_shapes_and_types ( node - > num_inputs ( ) ) ; <nl> + std : : vector < bool > inputs_missing_context ( node - > num_inputs ( ) ) ; <nl> for ( const Edge * e : node - > in_edges ( ) ) { <nl> if ( e - > IsControlEdge ( ) ) continue ; <nl> <nl> + if ( e - > dst_input ( ) < 0 ) { <nl> + return tensorflow : : errors : : Internal ( <nl> + " Index " , e - > dst_input ( ) , " is negative but not a control edge . " ) ; <nl> + } <nl> + <nl> const Node * input = e - > src ( ) ; <nl> auto it = node_to_context_ . find ( input ) ; <nl> if ( it = = node_to_context_ . end ( ) ) { <nl> - return errors : : FailedPrecondition ( <nl> - " Input " , e - > dst_input ( ) , " ( ' " , input - > name ( ) , " ' ) for ' " , <nl> - node - > name ( ) , " ' was not previously added to ShapeRefiner . " ) ; <nl> + / / v1 control flow adds loops to the graph ; we have to break them <nl> + / / somewhere , so we ' ll ignore this input and leave its shape undefined . <nl> + input_nodes [ e - > dst_input ( ) ] = input ; <nl> + / / We don ' t have a context yet . We ' ll make one below and use that to <nl> + / / generate an unknown shape . An uninitialized ShapeHandle is already an <nl> + / / unknown shape , but there are debug checks that each input was <nl> + / / explicitly set and satisfying them isn ' t very costly . <nl> + inputs_missing_context [ e - > dst_input ( ) ] = true ; <nl> + continue ; <nl> } <nl> <nl> InferenceContext * c = it - > second - > get_context ( ) ; <nl> - DCHECK_GE ( e - > dst_input ( ) , 0 ) ; <nl> input_nodes [ e - > dst_input ( ) ] = input ; <nl> input_shapes [ e - > dst_input ( ) ] = c - > output ( e - > src_output ( ) ) ; <nl> <nl> Status ShapeRefiner : : AddNode ( const Node * node ) { <nl> return c - > construction_status ( ) ; <nl> } <nl> <nl> + for ( unsigned int i = 0 ; i < input_shapes . size ( ) ; + + i ) { <nl> + if ( inputs_missing_context [ i ] ) { <nl> + c - > SetInput ( i , c - > UnknownShape ( ) ) ; <nl> + } <nl> + } <nl> + <nl> std : : unique_ptr < ExtendedInferenceContext > ec ( <nl> new ExtendedInferenceContext ( std : : move ( c ) , node ) ) ; <nl> <nl> Status ShapeRefiner : : RunShapeFn ( const Node * node , <nl> / / Run the shape inference function , and return if there was an error . <nl> / / Capture as lambda , because we might need to re - run inference later on . <nl> auto run_inference_lambda = [ & ] ( ) { <nl> - if ( function_library_ & & op_reg_data - > is_function_op ) { <nl> - / / Special inference logic for user - defined functions . <nl> - <nl> - auto * func_def = function_library_ - > Find ( op_reg_data - > op_def . name ( ) ) ; <nl> - if ( func_def ) { <nl> - return InferShapesForFunction ( func_def , keep_nested_shape_inferences_ , <nl> - ec ) ; <nl> + if ( function_library_ & & IsFunctionCall ( * function_library_ , * node ) ) { <nl> + bool disable_shape_inference ; <nl> + if ( ! GetNodeAttr ( AttrSlice ( node - > def ( ) ) , " _disable_call_shape_inference " , <nl> + & disable_shape_inference ) <nl> + . ok ( ) | | <nl> + ! disable_shape_inference ) { <nl> + / / Special inference logic for user - defined functions . <nl> + NameAttrList function ; <nl> + TF_RETURN_IF_ERROR ( <nl> + NameAndAttrsFromFunctionCall ( node - > def ( ) , & function ) ) ; <nl> + const FunctionDef * function_def = <nl> + function_library_ - > Find ( function . name ( ) ) ; <nl> + if ( function_def ! = nullptr ) { <nl> + / / The constant Tensor map we have for the outside context is not <nl> + / / valid inside the function . We need to push a new clean map while <nl> + / / performing inference on the function body . <nl> + auto const_tensor_map_copy = const_tensor_map_ ; <nl> + const_tensor_map_ . clear ( ) ; <nl> + Status function_inference_status = <nl> + InferShapesForFunction ( function_def , AttrSlice ( & function . attr ( ) ) , <nl> + keep_nested_shape_inferences_ , ec ) ; <nl> + const_tensor_map_ = const_tensor_map_copy ; <nl> + return function_inference_status ; <nl> + } <nl> } <nl> } <nl> <nl> mmm a / tensorflow / core / common_runtime / shape_refiner . h <nl> ppp b / tensorflow / core / common_runtime / shape_refiner . h <nl> class ShapeRefiner { <nl> / / - outer_context will contain output shapes inferred from input shapes <nl> / / - outer_context will contain nested inferences collection , iff <nl> / / keep_nested_shapes is true <nl> - Status InferShapesForFunction ( const tensorflow : : FunctionDef * function_def , <nl> - bool keep_nested_shapes , <nl> + Status InferShapesForFunction ( const FunctionDef * function_def , <nl> + AttrSlice attributes , bool keep_nested_shapes , <nl> ExtendedInferenceContext * outer_context ) ; <nl> <nl> / / Attempts to evaluate the ' dst_idx ' - th input to ' node ' . If the input edge <nl> mmm a / tensorflow / core / common_runtime / shape_refiner_test . cc <nl> ppp b / tensorflow / core / common_runtime / shape_refiner_test . cc <nl> TEST_F ( ShapeRefinerTest , MatMul ) { <nl> EXPECT_SHAPE ( " [ 2 , 2 ] " , m , mm , 0 ) ; <nl> } <nl> <nl> - TEST_F ( ShapeRefinerTest , InvalidOrder ) { <nl> - ShapeRefiner m ( TF_GRAPH_DEF_VERSION , OpRegistry : : Global ( ) ) ; <nl> - Scope root = Scope : : NewRootScope ( ) ; <nl> - auto a = ops : : Const ( root , { { 1 . 0f } , { 2 . 0f } } ) ; <nl> - auto b = ops : : Const ( root , { { 1 . 0f , 2 . 0f } } ) ; <nl> - auto mm = ops : : MatMul ( root , a , b ) ; <nl> - <nl> - Status s = m . AddNode ( mm . node ( ) ) ; <nl> - ASSERT_FALSE ( s . ok ( ) ) ; <nl> - ASSERT_EQ ( <nl> - " Input 0 ( ' Const ' ) for ' MatMul ' was not previously added to " <nl> - " ShapeRefiner . " , <nl> - s . error_message ( ) ) ; <nl> - } <nl> - <nl> TEST_F ( ShapeRefinerTest , BadShapes ) { <nl> ShapeRefiner m ( TF_GRAPH_DEF_VERSION , OpRegistry : : Global ( ) ) ; <nl> Scope root = Scope : : NewRootScope ( ) ; <nl> mmm a / tensorflow / core / ops / functional_ops . cc <nl> ppp b / tensorflow / core / ops / functional_ops . cc <nl> REGISTER_OP ( " For " ) <nl> . Attr ( " body : func " ) <nl> . SetShapeFn ( shape_inference : : UnknownShape ) ; <nl> <nl> + / / While no useful shape function is registered for function call ops directly , <nl> + / / ShapeRefiner is run by default to perform shape inference . <nl> REGISTER_OP ( " PartitionedCall " ) <nl> . Input ( " args : Tin " ) <nl> . Output ( " output : Tout " ) <nl> mmm a / tensorflow / python / eager / function . py <nl> ppp b / tensorflow / python / eager / function . py <nl> def call ( self , ctx , args ) : <nl> if executing_eagerly : <nl> return outputs <nl> else : <nl> + # TODO ( b / 128924522 ) : This additional set_shape should not be <nl> + # necessary . ShapeRefiner likely needs to inspect handle_data . Remove this <nl> + # once that ' s done . <nl> for i , shape in enumerate ( self . _output_shapes ) : <nl> outputs [ i ] . set_shape ( shape ) <nl> for i , func_graph_output in enumerate ( self . _func_graph_outputs ) : <nl> mmm a / tensorflow / python / eager / function_test . py <nl> ppp b / tensorflow / python / eager / function_test . py <nl> def f ( ) : <nl> self . assertEqual ( var_t . shape , tensor_shape . TensorShape ( [ 2 , 2 ] ) ) <nl> <nl> def testShapeInferenceForMoreSpecificInput ( self ) : <nl> - self . skipTest ( ' b / 124219898 ' ) <nl> <nl> def f ( a ) : <nl> return array_ops . reshape ( a , [ - 1 , 3 ] ) <nl> def f ( a ) : <nl> signature = [ tensor_spec . TensorSpec ( None , dtypes . float32 ) ] <nl> compiled = def_function . function ( f , input_signature = signature ) <nl> <nl> - with ops . Graph ( ) . as_default ( ) : <nl> + @ def_function . function <nl> + def use_f ( ) : <nl> inputs = array_ops . zeros ( [ 10 , 10 , 3 ] ) <nl> self . assertAllEqual ( f ( inputs ) . shape , compiled ( inputs ) . shape ) <nl> <nl> + use_f ( ) <nl> + <nl> def testFuncListAttr ( self ) : <nl> <nl> @ function . defun <nl> def func ( x ) : <nl> # Tracing more than twice per input doesn ' t make sense . <nl> self . assertLess ( trace_count [ 0 ] , 13 ) <nl> <nl> + @ test_util . run_in_graph_and_eager_modes <nl> + def test_shape_inference_with_symbolic_shapes ( self ) : <nl> + <nl> + @ def_function . function <nl> + def _uses_symbolic_shapes ( w , x , y ) : <nl> + x = array_ops . identity ( x , name = ' name_collision ' ) <nl> + x = array_ops . transpose ( x , [ 1 , 0 , 2 ] ) <nl> + x_batch = array_ops . shape ( x ) [ 0 ] <nl> + y_batch = array_ops . shape ( y ) [ 0 ] <nl> + y * = w <nl> + n = y_batch / / x_batch <nl> + return array_ops . reshape ( y , [ n , x_batch , - 1 ] ) <nl> + <nl> + conc = _uses_symbolic_shapes . get_concrete_function ( <nl> + tensor_spec . TensorSpec ( None , dtypes . float32 ) , <nl> + tensor_spec . TensorSpec ( None , dtypes . float32 ) , <nl> + tensor_spec . TensorSpec ( None , dtypes . float32 ) ) <nl> + <nl> + @ def_function . function <nl> + def _call_concrete ( ) : <nl> + c = constant_op . constant ( 1 . ) <nl> + array_ops . identity ( c , name = ' name_collision ' ) <nl> + output1 = conc ( array_ops . ones ( [ 2 ] ) , <nl> + array_ops . ones ( [ 5 , 4 , 2 ] ) , <nl> + array_ops . ones ( [ 20 , 2 ] ) ) <nl> + self . assertEqual ( [ 5 , 4 , 2 ] , output1 . shape ) <nl> + output2 = conc ( array_ops . ones ( [ 3 ] ) , <nl> + array_ops . ones ( [ 5 , 4 , 3 ] ) , <nl> + array_ops . ones ( [ 40 , 3 ] ) ) <nl> + self . assertEqual ( [ 10 , 4 , 3 ] , output2 . shape ) <nl> + return output1 , output2 <nl> + <nl> + output1 , output2 = _call_concrete ( ) <nl> + self . assertEqual ( ( 5 , 4 , 2 ) , self . evaluate ( output1 ) . shape ) <nl> + self . assertEqual ( ( 10 , 4 , 3 ) , self . evaluate ( output2 ) . shape ) <nl> + <nl> <nl> class MultiDeviceTest ( test . TestCase , parameterized . TestCase ) : <nl> <nl> mmm a / tensorflow / python / framework / function . py <nl> ppp b / tensorflow / python / framework / function . py <nl> def _parse_kwargs_as_attrs ( func_name , * * kwargs ) : <nl> if noinline is not None : <nl> attrs [ " _noinline " ] = attr_value_pb2 . AttrValue ( b = bool ( noinline ) ) <nl> <nl> + # For compatibility with previous behavior , Defun does not perform shape <nl> + # inference through its function call operations . <nl> + attrs [ " _disable_call_shape_inference " ] = attr_value_pb2 . AttrValue ( b = True ) <nl> + <nl> compiled = kwargs . pop ( " compiled " , None ) <nl> separate_compiled_gradients = kwargs . pop ( " separate_compiled_gradients " , None ) <nl> if compiled is not None : <nl> mmm a / tensorflow / python / framework / ops_test . py <nl> ppp b / tensorflow / python / framework / ops_test . py <nl> def test ( ) : <nl> new_input1 = constant_op . constant ( 1 . 0 ) <nl> new_input2 = constant_op . constant ( True ) <nl> <nl> + # Clear output shapes to bypass shape checking . <nl> + while_op . _set_shape_list_attr ( " output_shapes " , [ ] ) <nl> while_op . _set_type_list_attr ( " T " , <nl> [ t . dtype for t in while_op . inputs ] + <nl> [ new_input1 . dtype , new_input2 . dtype ] ) <nl> mmm a / tensorflow / python / saved_model / load_test . py <nl> ppp b / tensorflow / python / saved_model / load_test . py <nl> def func ( x ) : <nl> <nl> self . assertEqual ( [ 2 ] , root . f ( [ 2 ] ) . numpy ( ) ) <nl> <nl> + def test_shapes_available ( self , cycles ) : <nl> + <nl> + @ def_function . function ( input_signature = [ <nl> + tensor_spec . TensorSpec ( [ None , 3 ] , dtypes . int32 ) , <nl> + tensor_spec . TensorSpec ( [ None , 2 ] , dtypes . int32 ) <nl> + ] ) <nl> + def func ( x , y ) : <nl> + return array_ops . concat ( [ x , y ] , axis = 1 ) <nl> + <nl> + root = tracking . AutoTrackable ( ) <nl> + root . f = func <nl> + <nl> + root = self . cycle ( root , cycles ) <nl> + <nl> + imported_graph = root . f . get_concrete_function ( ) . graph <nl> + input_x , input_y = imported_graph . inputs <nl> + self . assertEqual ( [ None , 3 ] , input_x . shape . as_list ( ) ) <nl> + self . assertEqual ( [ None , 2 ] , input_y . shape . as_list ( ) ) <nl> + output , = imported_graph . outputs <nl> + self . assertEqual ( [ None , 5 ] , output . shape . as_list ( ) ) <nl> + <nl> def test_dense_features_layer ( self , cycles ) : <nl> columns = [ feature_column_v2 . numeric_column ( " x " ) , <nl> feature_column_v2 . numeric_column ( " y " ) ] <nl> | Perform proper shape inference for call operations | tensorflow/tensorflow | ad36259ac0e923213172176493e306212f163e5d | 2019-03-19T22:38:16Z |
mmm a / tools / deployment / chocolatey / tools / chocolateyinstall . ps1 <nl> ppp b / tools / deployment / chocolatey / tools / chocolateyinstall . ps1 <nl> if ( $ installService ) { <nl> } <nl> <nl> # Add osquery binary path to machines path for ease of use . <nl> - $ oldPath = [ System . Environment ] : : GetEnvironmentVariable ( ' Path ' , ' Machine ' ) <nl> - if ( - not ( $ oldPath - imatch [ regex ] : : escape ( $ targetFolder ) ) ) { <nl> - $ newPath = $ oldPath <nl> - if ( $ oldPath [ - 1 ] - eq ' ; ' ) { <nl> - $ newPath = $ newPath + $ targetFolder <nl> - } else { <nl> - $ newPath = $ newPath + ' ; ' + $ targetFolder <nl> - } <nl> - [ System . Environment ] : : SetEnvironmentVariable ( ' Path ' , $ newPath , ' Machine ' ) <nl> - } <nl> + Add - ToSystemPath $ targetFolder <nl> mmm a / tools / provision . ps1 <nl> ppp b / tools / provision . ps1 <nl> function Main { <nl> } <nl> <nl> $ out = Install - ChocoPackage ' wixtoolset ' ' ' @ ( ' - - version ' , ' 3 . 10 . 3 . 300701 ' ) <nl> + # Add the WiX binary path to the system path for use <nl> + Add - ToSystemPath ' C : \ Program Files ( x86 ) \ WiX Toolset v3 . 10 \ bin ' <nl> <nl> # Convenience variable for accessing Python <nl> [ Environment ] : : SetEnvironmentVariable ( " OSQUERY_PYTHON_PATH " , $ pythonInstall , " Machine " ) <nl> mmm a / tools / provision / chocolatey / osquery_utils . ps1 <nl> ppp b / tools / provision / chocolatey / osquery_utils . ps1 <nl> function Get - OsqueryBuildPath { <nl> return $ ret <nl> } <nl> <nl> + # Helper function to add to the SYSTEM path <nl> + function Add - ToSystemPath { <nl> + param ( <nl> + [ string ] $ targetFolder = ' ' <nl> + ) <nl> + <nl> + $ oldPath = [ System . Environment ] : : GetEnvironmentVariable ( ' Path ' , ' Machine ' ) <nl> + if ( - not ( $ oldPath - imatch [ regex ] : : escape ( $ targetFolder ) ) ) { <nl> + $ newPath = $ oldPath <nl> + if ( $ oldPath [ - 1 ] - eq ' ; ' ) { <nl> + $ newPath = $ newPath + $ targetFolder <nl> + } else { <nl> + $ newPath = $ newPath + ' ; ' + $ targetFolder <nl> + } <nl> + [ System . Environment ] : : SetEnvironmentVariable ( ' Path ' , $ newPath , ' Machine ' ) <nl> + } <nl> + } <nl> + <nl> # A helper function for starting and waiting on processes in powershell <nl> function Start - OsqueryProcess { <nl> param ( <nl> | build : adding helper function to add paths to SYSTEM path ( ) | osquery/osquery | 9726d41c7cbf73a974657b3ef9646b9ec059310e | 2017-12-05T18:43:20Z |
mmm a / dbms / src / Compression / CompressionCodecLZ4 . cpp <nl> ppp b / dbms / src / Compression / CompressionCodecLZ4 . cpp <nl> namespace DB <nl> namespace ErrorCodes <nl> { <nl> extern const int CANNOT_COMPRESS ; <nl> - extern const int CANNOT_DECOMPRESS ; <nl> extern const int ILLEGAL_SYNTAX_FOR_CODEC_TYPE ; <nl> extern const int ILLEGAL_CODEC_PARAMETER ; <nl> } <nl> UInt32 CompressionCodecLZ4 : : doCompressData ( const char * source , UInt32 source_si <nl> <nl> void CompressionCodecLZ4 : : doDecompressData ( const char * source , UInt32 source_size , char * dest , UInt32 uncompressed_size ) const <nl> { <nl> - / / if ( LZ4_decompress_safe ( source , dest , source_size , uncompressed_size ) < 0 ) <nl> - / / throw Exception ( " Cannot LZ4_decompress_fast " , ErrorCodes : : CANNOT_DECOMPRESS ) ; <nl> LZ4 : : decompress ( source , dest , source_size , uncompressed_size , lz4_stat ) ; <nl> } <nl> <nl> | Cosmetics | ClickHouse/ClickHouse | 3501f633e44d413135f868f6162e853759c55672 | 2019-04-25T18:35:58Z |
mmm a / . github / workflows / snap . yml <nl> ppp b / . github / workflows / snap . yml <nl> jobs : <nl> <nl> - name : First set up . <nl> run : | <nl> - # Workaround for permanent problems with third - party repository keys <nl> - sudo rm - rf / etc / apt / sources . list . d / * <nl> - <nl> - sudo apt - get update <nl> sudo snap install - - classic snapcraft <nl> <nl> # Workaround for snapcraft <nl> # See https : / / forum . snapcraft . io / t / 13258 <nl> sudo chown root : root / <nl> <nl> + sudo usermod - aG lxd $ USER <nl> + <nl> + sudo snap run lxd init - - auto <nl> + sudo snap run lxd waitready <nl> + <nl> - name : Telegram Desktop snap build . <nl> - run : sudo snap run snapcraft - - destructive - mode <nl> + run : sg lxd - c ' snap run snapcraft - - use - lxd ' <nl> <nl> - name : Move artifact . <nl> if : env . UPLOAD_ARTIFACT = = ' true ' <nl> | Use lxd for snapcraft | telegramdesktop/tdesktop | 3a66d317ee105d48cb371aa119852f272f25cbd1 | 2020-08-19T05:58:24Z |
mmm a / tensorflow / compiler / xla / service / algebraic_simplifier . cc <nl> ppp b / tensorflow / compiler / xla / service / algebraic_simplifier . cc <nl> Status AlgebraicSimplifierVisitor : : HandlePad ( HloInstruction * pad ) { <nl> / / slice instruction should all have the same layout . <nl> TF_RETURN_IF_ERROR ( LayoutUtil : : CopyLayoutBetweenShapes ( <nl> pad - > shape ( ) , nonzero_pad - > mutable_shape ( ) ) ) ; <nl> + simplifier_ - > UpdateLayout ( nonzero_pad - > mutable_shape ( ) ) ; <nl> <nl> / / Second , construct the slice instruction to perform the negative padding . <nl> std : : vector < int64 > start_indices ; <nl> Status AlgebraicSimplifierVisitor : : HandlePad ( HloInstruction * pad ) { <nl> MakeSliceHlo ( nonzero_pad , start_indices , end_indices , strides ) ) ; <nl> TF_RETURN_IF_ERROR ( LayoutUtil : : CopyLayoutBetweenShapes ( <nl> pad - > shape ( ) , slice - > mutable_shape ( ) ) ) ; <nl> + simplifier_ - > UpdateLayout ( slice - > mutable_shape ( ) ) ; <nl> <nl> / / Verify that the slice shape matches the pad shape . <nl> - TF_RET_CHECK ( ShapeUtil : : Equal ( slice - > shape ( ) , pad - > shape ( ) ) ) ; <nl> + auto equal = Shape : : Equal ( ) ; <nl> + if ( ! options_ . is_layout_sensitive ( ) ) { <nl> + equal . IgnoreTilesInLayout ( ) ; <nl> + } <nl> + TF_RET_CHECK ( equal ( slice - > shape ( ) , pad - > shape ( ) ) ) ; <nl> <nl> return ReplaceInstruction ( pad , slice ) ; <nl> } <nl> | [ XLA ] Update layout tiling in some algebraic simplifier cases . | tensorflow/tensorflow | 6ddc7f8d99435c352b55aef79e7674973d6d3512 | 2020-06-09T05:30:48Z |
mmm a / LICENSE <nl> ppp b / LICENSE <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + The MIT License ( MIT ) <nl> + <nl> + Copyright ( c ) 2015 Gabi Melman . <nl> + <nl> + Permission is hereby granted , free of charge , to any person obtaining a copy <nl> + of this software and associated documentation files ( the " Software " ) , to deal <nl> + in the Software without restriction , including without limitation the rights <nl> + to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> + copies of the Software , and to permit persons to whom the Software is <nl> + furnished to do so , subject to the following conditions : <nl> + <nl> + The above copyright notice and this permission notice shall be included in <nl> + all copies or substantial portions of the Software . <nl> + <nl> + THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> + IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> + LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> + THE SOFTWARE . <nl> + <nl> mmm a / bench / boost - bench - mt . cpp <nl> ppp b / bench / boost - bench - mt . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # include < thread > <nl> # include < vector > <nl> # include < atomic > <nl> mmm a / bench / boost - bench . cpp <nl> ppp b / bench / boost - bench . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # include < boost / log / core . hpp > <nl> # include < boost / log / trivial . hpp > <nl> mmm a / bench / easylogging - bench - mt . cpp <nl> ppp b / bench / easylogging - bench - mt . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # include < thread > <nl> # include < vector > <nl> # include < atomic > <nl> mmm a / bench / easylogging - bench . cpp <nl> ppp b / bench / easylogging - bench . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> + <nl> # include " easylogging + + . h " <nl> <nl> _INITIALIZE_EASYLOGGINGPP <nl> mmm a / bench / g2log - async . cpp <nl> ppp b / bench / g2log - async . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # include < thread > <nl> # include < vector > <nl> # include < atomic > <nl> mmm a / bench / glog - bench - mt . cpp <nl> ppp b / bench / glog - bench - mt . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # include < thread > <nl> # include < vector > <nl> # include < atomic > <nl> mmm a / bench / glog - bench . cpp <nl> ppp b / bench / glog - bench . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # include " glog / logging . h " <nl> <nl> mmm a / bench / spdlog - async . cpp <nl> ppp b / bench / spdlog - async . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # include < thread > <nl> # include < vector > <nl> mmm a / bench / spdlog - bench - mt . cpp <nl> ppp b / bench / spdlog - bench - mt . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # include < thread > <nl> # include < vector > <nl> # include < atomic > <nl> mmm a / bench / spdlog - bench . cpp <nl> ppp b / bench / spdlog - bench . cpp <nl> <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # include " spdlog / spdlog . h " <nl> <nl> mmm a / example / bench . cpp <nl> ppp b / example / bench . cpp <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> / / <nl> / / bench . cpp : spdlog benchmarks <nl> mmm a / example / example . cpp <nl> ppp b / example / example . cpp <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> / / <nl> / / spdlog usage example <nl> / / <nl> mmm a / example / utils . h <nl> ppp b / example / utils . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / async_logger . h <nl> ppp b / include / spdlog / async_logger . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / common . h <nl> ppp b / include / spdlog / common . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / details / async_log_helper . h <nl> ppp b / include / spdlog / details / async_log_helper . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> / / async log helper : <nl> / / Process logs asynchronously using a back thread . <nl> inline bool spdlog : : details : : async_log_helper : : process_next_msg ( log_clock : : time_ <nl> inline void spdlog : : details : : async_log_helper : : handle_flush_interval ( log_clock : : time_point & now , log_clock : : time_point & last_flush ) <nl> { <nl> auto should_flush = _flush_requested | | ( _flush_interval_ms ! = std : : chrono : : milliseconds : : zero ( ) & & now - last_flush > = _flush_interval_ms ) ; <nl> - if ( should_flush ) <nl> + if ( should_flush ) <nl> { <nl> for ( auto & s : _sinks ) <nl> s - > flush ( ) ; <nl> mmm a / include / spdlog / details / async_logger_impl . h <nl> ppp b / include / spdlog / details / async_logger_impl . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> + / / Async Logger implementation <nl> + / / Use an async_sink ( queue per logger ) to perform the logging in a worker thread <nl> <nl> # include " . / async_log_helper . h " <nl> <nl> - / / <nl> - / / Async Logger implementation <nl> - / / Use single async_sink ( queue ) to perform the logging in a worker thread <nl> - / / <nl> - <nl> <nl> template < class It > <nl> inline spdlog : : async_logger : : async_logger ( const std : : string & logger_name , <nl> mmm a / include / spdlog / details / file_helper . h <nl> ppp b / include / spdlog / details / file_helper . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / details / line_logger . h <nl> ppp b / include / spdlog / details / line_logger . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> # pragma once <nl> # include < type_traits > <nl> # include " . . / common . h " <nl> mmm a / include / spdlog / details / log_msg . h <nl> ppp b / include / spdlog / details / log_msg . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / details / logger_impl . h <nl> ppp b / include / spdlog / details / logger_impl . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - # pragma once <nl> / / <nl> - / / Logger implementation <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> / / <nl> <nl> - # include " . / line_logger . h " <nl> + # pragma once <nl> <nl> + # include " . / line_logger . h " <nl> <nl> / / create logger with given name , sinks and the default pattern formatter <nl> / / all other ctors will call this one <nl> mmm a / include / spdlog / details / mpmc_bounded_q . h <nl> ppp b / include / spdlog / details / mpmc_bounded_q . h <nl> should not be interpreted as representing official policies , either expressed or <nl> / * <nl> The code in its current form adds the license below : <nl> <nl> - spdlog - an extremely fast and easy to use c + + 11 logging library . <nl> - Copyright ( c ) 2014 Gabi Melman . <nl> - <nl> - Permission is hereby granted , free of charge , to any person obtaining <nl> - a copy of this software and associated documentation files ( the <nl> - " Software " ) , to deal in the Software without restriction , including <nl> - without limitation the rights to use , copy , modify , merge , publish , <nl> - distribute , sublicense , and / or sell copies of the Software , and to <nl> - permit persons to whom the Software is furnished to do so , subject to <nl> - the following conditions : <nl> - <nl> - The above copyright notice and this permission notice shall be <nl> - included in all copies or substantial portions of the Software . <nl> - <nl> - THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , <nl> - EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF <nl> - MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . <nl> - IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY <nl> - CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , <nl> - TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE <nl> - SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . <nl> + Copyright ( c ) 2015 Gabi Melman . <nl> + Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + <nl> * / <nl> <nl> # pragma once <nl> mmm a / include / spdlog / details / null_mutex . h <nl> ppp b / include / spdlog / details / null_mutex . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / details / os . h <nl> ppp b / include / spdlog / details / os . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> # include < string > <nl> mmm a / include / spdlog / details / pattern_formatter_impl . h <nl> ppp b / include / spdlog / details / pattern_formatter_impl . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / details / registry . h <nl> ppp b / include / spdlog / details / registry . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> + <nl> / / Loggers registy of unique name - > logger pointer <nl> / / An attempt to create a logger with an alreasy existing name will be ignored <nl> / / If user requests a non existing logger , nullptr will be returned <nl> mmm a / include / spdlog / details / spdlog_impl . h <nl> ppp b / include / spdlog / details / spdlog_impl . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / formatter . h <nl> ppp b / include / spdlog / formatter . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> # pragma once <nl> <nl> # include " details / log_msg . h " <nl> mmm a / include / spdlog / logger . h <nl> ppp b / include / spdlog / logger . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / android_sink . h <nl> ppp b / include / spdlog / sinks / android_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * Copyright ( c ) 2015 Ruslan Baratov . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / base_sink . h <nl> ppp b / include / spdlog / sinks / base_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> / / <nl> mmm a / include / spdlog / sinks / dist_sink . h <nl> ppp b / include / spdlog / sinks / dist_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2015 David Schury . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 David Schury , Gabi Melman <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / file_sinks . h <nl> ppp b / include / spdlog / sinks / file_sinks . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - # pragma once <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # include < mutex > <nl> # include " base_sink . h " <nl> mmm a / include / spdlog / sinks / null_sink . h <nl> ppp b / include / spdlog / sinks / null_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> # include < mutex > <nl> mmm a / include / spdlog / sinks / ostream_sink . h <nl> ppp b / include / spdlog / sinks / ostream_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / sink . h <nl> ppp b / include / spdlog / sinks / sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> + <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / stdout_sinks . h <nl> ppp b / include / spdlog / sinks / stdout_sinks . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / sinks / syslog_sink . h <nl> ppp b / include / spdlog / sinks / syslog_sink . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / spdlog . h <nl> ppp b / include / spdlog / spdlog . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> / / spdlog main header file . <nl> - / / see example . cpp for usage example <nl> + / / see example . cpp for usage example <nl> <nl> # pragma once <nl> <nl> mmm a / include / spdlog / tweakme . h <nl> ppp b / include / spdlog / tweakme . h <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - / * spdlog - an extremely fast and easy to use c + + 11 logging library . * / <nl> - / * Copyright ( c ) 2014 Gabi Melman . * / <nl> - / * * / <nl> - / * Permission is hereby granted , free of charge , to any person obtaining * / <nl> - / * a copy of this software and associated documentation files ( the * / <nl> - / * " Software " ) , to deal in the Software without restriction , including * / <nl> - / * without limitation the rights to use , copy , modify , merge , publish , * / <nl> - / * distribute , sublicense , and / or sell copies of the Software , and to * / <nl> - / * permit persons to whom the Software is furnished to do so , subject to * / <nl> - / * the following conditions : * / <nl> - / * * / <nl> - / * The above copyright notice and this permission notice shall be * / <nl> - / * included in all copies or substantial portions of the Software . * / <nl> - / * * / <nl> - / * THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , * / <nl> - / * EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * / <nl> - / * MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . * / <nl> - / * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * / <nl> - / * CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , * / <nl> - / * TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE * / <nl> - / * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . * / <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> + / / <nl> + / / Copyright ( c ) 2015 Gabi Melman . <nl> + / / Distributed under the MIT License ( http : / / opensource . org / licenses / MIT ) <nl> + / / <nl> <nl> # pragma once <nl> <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / <nl> / / Edit this file to squeeze every last drop of performance out of spdlog . <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> + / / <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Under Linux , the much faster CLOCK_REALTIME_COARSE clock can be used . <nl> mmm a / tests / file_log . cpp <nl> ppp b / tests / file_log . cpp <nl> <nl> + / * <nl> + * This content is released under the MIT License as specified in https : / / raw . githubusercontent . com / gabime / spdlog / master / LICENSE <nl> + * / <nl> # include " includes . h " <nl> <nl> static std : : string file_contents ( const std : : string & filename ) <nl> | MIT license | gabime/spdlog | 8c38b4ee9e7db076b264437a90e762b2eee6fd43 | 2015-11-28T16:24:20Z |
mmm a / jstests / ssl / initial_sync1_x509 . js <nl> ppp b / jstests / ssl / initial_sync1_x509 . js <nl> load ( " jstests / replsets / initial_sync1 . js " ) ; <nl> / / Mixed clusterAuthMode : sendX509 and sendKeyfile and try adding - - auth <nl> x509_options1 = Object . merge ( common_options , <nl> { sslClusterFile : " jstests / libs / cluster - cert . pem " , <nl> - clusterAuthMode : " x509 " , <nl> + clusterAuthMode : " sendX509 " , <nl> auth : " " } ) ; <nl> x509_options2 = Object . merge ( common_options , { clusterAuthMode : " sendKeyfile " } ) ; <nl> load ( " jstests / replsets / initial_sync1 . js " ) ; <nl> mmm a / src / mongo / shell / servers . js <nl> ppp b / src / mongo / shell / servers . js <nl> MongoRunner . mongoOptions = function ( opts ) { <nl> if ( waitForConnect = = undefined | | waitForConnect = = null ) opts . waitForConnect = true ; <nl> <nl> if ( jsTestOptions ( ) . useSSL ) { <nl> - opts . sslMode = " sslOnly " ; <nl> - opts . sslPEMKeyFile = " jstests / libs / server . pem " ; <nl> - opts . sslCAFile = " jstests / libs / ca . pem " ; <nl> + if ( ! opts . sslMode ) opts . sslMode = " sslOnly " ; <nl> + if ( ! opts . sslPEMKeyFile ) opts . sslPEMKeyFile = " jstests / libs / server . pem " ; <nl> + if ( ! opts . sslCAFile ) opts . sslCAFile = " jstests / libs / ca . pem " ; <nl> opts . sslWeakCertificateValidation = " " ; <nl> } <nl> <nl> - if ( jsTestOptions ( ) . useX509 ) { <nl> + if ( jsTestOptions ( ) . useX509 & & ! opts . clusterAuthMode ) { <nl> opts . clusterAuthMode = " x509 " ; <nl> } <nl> <nl> MongoRunner . mongodOptions = function ( opts ) { <nl> } <nl> <nl> if ( jsTestOptions ( ) . useSSL ) { <nl> - opts . sslMode = " sslOnly " ; <nl> - opts . sslPEMKeyFile = " jstests / libs / server . pem " ; <nl> - opts . sslCAFile = " jstests / libs / ca . pem " ; <nl> + if ( ! opts . sslMode ) opts . sslMode = " sslOnly " ; <nl> + if ( ! opts . sslPEMKeyFile ) opts . sslPEMKeyFile = " jstests / libs / server . pem " ; <nl> + if ( ! opts . sslCAFile ) opts . sslCAFile = " jstests / libs / ca . pem " ; <nl> opts . sslWeakCertificateValidation = " " ; <nl> } <nl> <nl> - if ( jsTestOptions ( ) . useX509 ) { <nl> + if ( jsTestOptions ( ) . useX509 & & ! opts . clusterAuthMode ) { <nl> opts . clusterAuthMode = " x509 " ; <nl> } <nl> <nl> startMongodTest = function ( port , dirname , restart , extraOptions ) { <nl> if ( jsTestOptions ( ) . keyFile & & ( ! extraOptions | | ! extraOptions [ ' keyFile ' ] ) ) options [ ' keyFile ' ] = jsTestOptions ( ) . keyFile <nl> <nl> if ( jsTestOptions ( ) . useSSL ) { <nl> - options [ " sslMode " ] = " sslOnly " ; <nl> - options [ " sslPEMKeyFile " ] = " jstests / libs / server . pem " ; <nl> - options [ " sslCAFile " ] = " jstests / libs / ca . pem " ; <nl> + if ( ! options [ " sslMode " ] ) options [ " sslMode " ] = " sslOnly " ; <nl> + if ( ! options [ " sslPEMKeyFile " ] ) options [ " sslPEMKeyFile " ] = " jstests / libs / server . pem " ; <nl> + if ( ! options [ " sslCAFile " ] ) options [ " sslCAFile " ] = " jstests / libs / ca . pem " ; <nl> options [ " sslWeakCertificateValidation " ] = " " ; <nl> } <nl> <nl> - if ( jsTestOptions ( ) . useX509 ) { <nl> + if ( jsTestOptions ( ) . useX509 & & ! options [ " clusterAuthMode " ] ) { <nl> options [ " clusterAuthMode " ] = " x509 " ; <nl> } <nl> <nl> mmm a / src / mongo / shell / servers_misc . js <nl> ppp b / src / mongo / shell / servers_misc . js <nl> ReplTest . prototype . getOptions = function ( master , extra , putBinaryFirst , norep <nl> } <nl> <nl> if ( jsTestOptions ( ) . useSSL ) { <nl> - a . push ( " - - sslMode " ) <nl> - a . push ( " sslOnly " ) <nl> - a . push ( " - - sslPEMKeyFile " ) <nl> - a . push ( " jstests / libs / server . pem " ) <nl> - a . push ( " - - sslCAFile " ) <nl> - a . push ( " jstests / libs / ca . pem " ) <nl> + if ( ! a . contains ( " - - sslMode " ) ) { <nl> + a . push ( " - - sslMode " ) <nl> + a . push ( " sslOnly " ) <nl> + } <nl> + if ( ! a . contains ( " - - sslPEMKeyFile " ) ) { <nl> + a . push ( " - - sslPEMKeyFile " ) <nl> + a . push ( " jstests / libs / server . pem " ) <nl> + } <nl> + if ( ! a . contains ( " - - sslCAFile " ) ) { <nl> + a . push ( " - - sslCAFile " ) <nl> + a . push ( " jstests / libs / ca . pem " ) <nl> + } <nl> a . push ( " - - sslWeakCertificateValidation " ) <nl> } <nl> - if ( jsTestOptions ( ) . useX509 ) { <nl> + if ( jsTestOptions ( ) . useX509 & & ! a . contains ( " - - clusterAuthMode " ) ) { <nl> a . push ( " - - clusterAuthMode " ) <nl> a . push ( " x509 " ) <nl> } <nl> | SERVER - 7455 Improvements to the use - x509 and use - ssl passthroughs | mongodb/mongo | 8cd3c95861417614dfac3a8cbf9f6c7621d40e93 | 2013-10-11T21:14:20Z |
mmm a / Doxygen / Scripts / xml2md . py <nl> ppp b / Doxygen / Scripts / xml2md . py <nl> <nl> replDict [ " s_listitem_4 " ] = " < li > " <nl> replDict [ " e_listitem_4 " ] = " < / li > " <nl> <nl> + replDict [ " s_xrefsect " ] = " " <nl> + replDict [ " e_xrefsect " ] = " " <nl> + <nl> + replDict [ " s_xreftitle " ] = " " <nl> + replDict [ " e_xreftitle " ] = " \ n " <nl> + <nl> + replDict [ " s_xrefdescription " ] = " " <nl> + replDict [ " e_xrefdescription " ] = " " <nl> + <nl> replDict [ " s_para " ] = " " <nl> replDict [ " e_para " ] = " \ n \ n " <nl> <nl> <nl> <nl> gencDict [ " verbatim " ] = True <nl> <nl> + gencDict [ " xrefsect " ] = True <nl> + gencDict [ " xreftitle " ] = True <nl> + gencDict [ " xrefdescription " ] = True <nl> + <nl> # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> # # # # @ brief table entry <nl> # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> | fixed xrefsect | arangodb/arangodb | 22db9dd361d352747befab7365f2badf13f9f37c | 2012-04-10T12:10:21Z |
mmm a / aten / src / ATen / native / BinaryOps . cpp <nl> ppp b / aten / src / ATen / native / BinaryOps . cpp <nl> Tensor & remainder_ ( Tensor & self , const Tensor & other ) { <nl> return native : : remainder_out ( self , self , other ) ; <nl> } <nl> <nl> + Tensor truncate ( const Tensor & tensor ) { <nl> + if ( tensor . is_floating_point ( ) ) { <nl> + return tensor . trunc ( ) ; <nl> + } <nl> + return tensor ; <nl> + } <nl> + <nl> Tensor & true_divide_out ( Tensor & result , const Tensor & self , const Tensor & divisor ) { <nl> TORCH_CHECK ( ! isIntegralType ( result . scalar_type ( ) , / * includeBool = * / true ) , <nl> " True division requires a floating output type , but got " , <nl> Tensor true_divide ( const Tensor & self , const Tensor & divisor ) { <nl> return iter . output ( ) ; <nl> } <nl> <nl> - Tensor & floor_divide_out ( Tensor & result , const Tensor & self , const Tensor & other ) { <nl> - auto iter = TensorIterator : : binary_op ( result , self , other , <nl> - / * check_mem_overlap = * / true ) ; <nl> - div_stub ( iter . device_type ( ) , iter ) ; <nl> - <nl> - if ( result . is_floating_point ( ) ) { <nl> - result . trunc_ ( ) ; <nl> - } <nl> - <nl> - return result ; <nl> - } <nl> - <nl> - Tensor floor_divide ( const Tensor & self , const Tensor & other ) { <nl> - Tensor result ; <nl> - auto iter = TensorIterator : : binary_op ( result , self , other ) ; <nl> - <nl> - div_stub ( iter . device_type ( ) , iter ) ; <nl> - <nl> - auto out = iter . output ( ) ; <nl> - if ( out . is_floating_point ( ) ) { <nl> - out . trunc_ ( ) ; <nl> - } <nl> - <nl> - return out ; <nl> + Tensor floor_divide ( const Tensor & input , const Tensor & other ) { <nl> + Tensor out = input / other ; <nl> + return truncate ( out ) ; <nl> } <nl> <nl> - Tensor & floor_divide_ ( Tensor & self , const Tensor & other ) { <nl> - return native : : floor_divide_out ( self , self , other ) ; <nl> + Tensor floor_divide ( const Tensor & input , Scalar other ) { <nl> + Tensor out = input / other ; <nl> + return truncate ( out ) ; <nl> } <nl> <nl> Tensor & mul_out ( Tensor & result , const Tensor & self , const Tensor & other ) { <nl> Tensor min ( const Tensor & self , const Tensor & other ) { <nl> <nl> Tensor & min_ ( Tensor & self , const Tensor & other ) { return at : : min_out ( self , self , other ) ; } <nl> <nl> - Tensor floor_divide ( const Tensor & self , Scalar other ) { <nl> - return at : : floor_divide ( self , wrapped_scalar_tensor ( other ) ) ; <nl> - } <nl> - <nl> - Tensor & floor_divide_ ( Tensor & self , Scalar other ) { <nl> - return at : : floor_divide_out ( self , self , wrapped_scalar_tensor ( other ) ) ; <nl> - } <nl> - <nl> Tensor & fmod_out ( Tensor & result , const Tensor & self , const Tensor & other ) { <nl> auto iter = TensorIterator : : binary_op ( result , self , other , <nl> / * check_mem_overlap = * / true ) ; <nl> mmm a / aten / src / ATen / native / native_functions . yaml <nl> ppp b / aten / src / ATen / native / native_functions . yaml <nl> <nl> CPU : floor_out <nl> CUDA : floor_out <nl> <nl> - - func : floor_divide ( Tensor self , Tensor other ) - > Tensor <nl> - variants : function , method <nl> - dispatch : <nl> - CPU : floor_divide <nl> - CUDA : floor_divide <nl> - SparseCPU : floor_divide_sparse <nl> - SparseCUDA : floor_divide_sparse <nl> + - func : floor_divide ( Tensor input , Tensor other ) - > Tensor <nl> supports_named_tensor : True <nl> <nl> - - func : floor_divide_ . Tensor ( Tensor ( a ! ) self , Tensor other ) - > Tensor ( a ! ) <nl> - variants : method <nl> - dispatch : <nl> - CPU : floor_divide_ <nl> - CUDA : floor_divide_ <nl> - SparseCPU : floor_divide_sparse_ <nl> - SparseCUDA : floor_divide_sparse_ <nl> - supports_named_tensor : True <nl> - <nl> - - func : floor_divide . out ( Tensor self , Tensor other , * , Tensor ( a ! ) out ) - > Tensor ( a ! ) <nl> - dispatch : <nl> - CPU : floor_divide_out <nl> - CUDA : floor_divide_out <nl> - SparseCPU : floor_divide_out_sparse_zerodim <nl> - SparseCUDA : floor_divide_out_sparse_zerodim <nl> - supports_named_tensor : True <nl> - <nl> - - func : floor_divide . Scalar ( Tensor self , Scalar other ) - > Tensor <nl> - variants : function , method <nl> - supports_named_tensor : True <nl> - <nl> - - func : floor_divide_ . Scalar ( Tensor ( a ! ) self , Scalar other ) - > Tensor ( a ! ) <nl> - variants : method <nl> + - func : floor_divide . Scalar ( Tensor input , Scalar other ) - > Tensor <nl> supports_named_tensor : True <nl> <nl> - func : frac ( Tensor self ) - > Tensor <nl> mmm a / aten / src / ATen / native / sparse / SparseTensorMath . cpp <nl> ppp b / aten / src / ATen / native / sparse / SparseTensorMath . cpp <nl> SparseTensor pow_sparse_scalar ( const SparseTensor & t , Scalar value ) { <nl> / / div ( SparseTensor , Scalar ) <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> + SparseTensor & div_out_sparse_zerodim ( SparseTensor & r , const SparseTensor & t , const Tensor & value ) ; <nl> + <nl> + Tensor div_sparse ( const Tensor & self , const Tensor & value ) { <nl> + auto commonDtype = at : : result_type ( self , value ) ; <nl> + Tensor result = at : : empty ( { 0 } , self . options ( ) . dtype ( commonDtype ) ) ; <nl> + return div_out_sparse_zerodim ( result , self , value ) ; <nl> + } <nl> + <nl> + Tensor & div_sparse_ ( Tensor & self , const Tensor & value ) { <nl> + return div_out_sparse_zerodim ( self , self , value ) ; <nl> + } <nl> + <nl> static SparseTensor & coalesce_ ( SparseTensor & tensor ) { <nl> SparseTensor coalesced = tensor . coalesce ( ) ; <nl> tensor . _values ( ) . resize_as_ ( coalesced . _values ( ) ) ; <nl> static SparseTensor & coalesce_ ( SparseTensor & tensor ) { <nl> return tensor ; <nl> } <nl> <nl> - / / Note [ Sparse Floor Division ] <nl> - / / ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ <nl> - / / Uncoalesced sparse tensors cannot be floor divided correctly . Integer <nl> - / / division is considered a special - case of floor division for purposes of <nl> - / / this note . <nl> - / / For example , an integer tensor with values = [ 3 , 3 ] divided by 2 would produce <nl> - / / values = [ 1 , 1 ] , which sum to 2 instead of 3 ( = 6 / 2 ) . <nl> - / / A float tensor with values = [ 3 . , 3 . ] floor divided by 2 would also produce <nl> - / / values = [ 1 . , 1 . ] ( after truncation ) , which sum to 2 . f instead of 3 . f . <nl> - / / To perform floor division the sparse tensor must be coalesced first . <nl> - <nl> SparseTensor & div_out_sparse_zerodim ( SparseTensor & r , const SparseTensor & t , const Tensor & value ) { <nl> - TORCH_CHECK ( value . dim ( ) = = 0 , " Sparse division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got shape " , value . sizes ( ) , " for divisor ) " ) ; <nl> - TORCH_CHECK ( ! value . is_sparse ( ) , " Sparse division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got a sparse divisor ) " ) ; <nl> + TORCH_CHECK ( value . dim ( ) = = 0 , " sparse division only supports division by a scalar ( got shape " , <nl> + value . sizes ( ) , " for argument ' other ' ) " ) ; <nl> + TORCH_CHECK ( ! value . is_sparse ( ) , " A Sparse Tensor can only be divided by a scalar or zero - dim dense tensor " ) ; <nl> <nl> AT_ASSERT ( r . is_sparse ( ) ) ; <nl> AT_ASSERT ( t . is_sparse ( ) ) ; <nl> <nl> if ( is_same_tensor ( r , t ) ) { <nl> - / / See note " Sparse Floor Division " <nl> + / / Can ' t divide an uncoalesced integral tensor accurately . e . g . for a sparse int tensor with value 6 <nl> + / / represented as values = [ 3 , 3 ] , integer division by 2 would give values = [ 1 , 1 ] = > 2 instead <nl> + / / of 6 / 2 = > 3 <nl> if ( ! r . is_coalesced ( ) & & isIntegralType ( r . scalar_type ( ) , / * includeBool = * / true ) ) { <nl> coalesce_ ( r ) ; <nl> } <nl> SparseTensor & div_out_sparse_zerodim ( SparseTensor & r , const SparseTensor & t , con <nl> return r ; <nl> } <nl> <nl> - Tensor div_sparse ( const Tensor & self , const Tensor & value ) { <nl> - auto commonDtype = at : : result_type ( self , value ) ; <nl> - Tensor result = at : : empty ( { 0 } , self . options ( ) . dtype ( commonDtype ) ) ; <nl> - return div_out_sparse_zerodim ( result , self , value ) ; <nl> - } <nl> - <nl> - Tensor & div_sparse_ ( Tensor & self , const Tensor & value ) { <nl> - return div_out_sparse_zerodim ( self , self , value ) ; <nl> - } <nl> - <nl> SparseTensor & div_out_sparse_scalar ( SparseTensor & r , const SparseTensor & t , Scalar value ) { <nl> return div_out_sparse_zerodim ( r , t , wrapped_scalar_tensor ( value ) ) ; <nl> } <nl> SparseTensor & div_out_sparse_scalar ( SparseTensor & r , const SparseTensor & t , Scal <nl> / / true_divide ( SparseTensor , Scalar ) <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> + Tensor true_divide_sparse ( const Tensor & self , const Tensor & value ) { <nl> + auto commonDtype = at : : result_type ( self , value ) ; <nl> + <nl> + / / Ensures floating dtype <nl> + if ( isIntegralType ( commonDtype , / * includeBool = * / true ) ) { <nl> + commonDtype = typeMetaToScalarType ( c10 : : get_default_dtype ( ) ) ; <nl> + } <nl> + <nl> + Tensor result = at : : empty ( { 0 } , self . options ( ) . dtype ( commonDtype ) ) ; <nl> + return div_out_sparse_zerodim ( result , self , value ) ; <nl> + } <nl> + <nl> SparseTensor & true_divide_out_sparse_zerodim ( <nl> SparseTensor & result , <nl> const SparseTensor & dividend , <nl> const Tensor & divisor ) { <nl> - TORCH_CHECK ( divisor . dim ( ) = = 0 , " Sparse true division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got shape " , divisor . sizes ( ) , " for divisor ) " ) ; <nl> - TORCH_CHECK ( ! divisor . is_sparse ( ) , " Sparse true division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got a sparse divisor ) " ) ; <nl> + TORCH_CHECK ( divisor . dim ( ) = = 0 , " Sparse true division only supports " , <nl> + " scalar or zero - dim dense tensor divisors ( got shape " , divisor . sizes ( ) ) ; <nl> + TORCH_CHECK ( ! divisor . is_sparse ( ) , " A Sparse Tensor can only be divided by " , <nl> + " a scalar or zero - dim dense tensor divisor , but got a sparse divisor . " ) ; <nl> <nl> AT_ASSERT ( result . is_sparse ( ) ) ; <nl> AT_ASSERT ( dividend . is_sparse ( ) ) ; <nl> SparseTensor & true_divide_out_sparse_zerodim ( <nl> Tensor dividend_tmp = dividend ; <nl> result . resize_as_ ( dividend_tmp ) ; <nl> auto indices = result . _indices ( ) ; <nl> - indices . resize_as_ ( dividend_tmp . _indices ( ) ) ; <nl> + indices . resize_as_ ( dividend_tmp . indices ( ) ) ; <nl> indices . copy_ ( dividend_tmp . _indices ( ) ) ; <nl> Tensor result_values = result . _values ( ) ; <nl> at : : true_divide_out ( result_values , dividend_tmp . _values ( ) , divisor ) ; <nl> SparseTensor & true_divide_out_sparse_zerodim ( <nl> return result ; <nl> } <nl> <nl> - Tensor true_divide_sparse ( const Tensor & self , const Tensor & value ) { <nl> - auto commonDtype = at : : result_type ( self , value ) ; <nl> - <nl> - / / Ensures floating dtype <nl> - if ( isIntegralType ( commonDtype , / * includeBool = * / true ) ) { <nl> - commonDtype = typeMetaToScalarType ( c10 : : get_default_dtype ( ) ) ; <nl> - } <nl> - <nl> - Tensor result = at : : empty ( { 0 } , self . options ( ) . dtype ( commonDtype ) ) ; <nl> - return true_divide_out_sparse_zerodim ( result , self , value ) ; <nl> - } <nl> - <nl> SparseTensor & true_divide_out_sparse_scalar ( <nl> SparseTensor & result , <nl> const SparseTensor & dividend , <nl> SparseTensor & true_divide_out_sparse_scalar ( <nl> return true_divide_out_sparse_zerodim ( result , dividend , wrapped_scalar_tensor ( divisor ) ) ; <nl> } <nl> <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - / / floor_divide ( SparseTensor , Scalar ) <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - <nl> - SparseTensor & floor_divide_out_sparse_zerodim ( <nl> - SparseTensor & result , <nl> - const SparseTensor & dividend , <nl> - const Tensor & divisor ) { <nl> - TORCH_CHECK ( divisor . dim ( ) = = 0 , " Sparse floor division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got shape " , divisor . sizes ( ) , " for divisor ) " ) ; <nl> - TORCH_CHECK ( ! divisor . is_sparse ( ) , " Sparse floor division requires a scalar or " , <nl> - " zero - dim dense tensor divisor ( got a sparse divisor ) " ) ; <nl> - <nl> - AT_ASSERT ( result . is_sparse ( ) ) ; <nl> - AT_ASSERT ( dividend . is_sparse ( ) ) ; <nl> - <nl> - / / Case 1 : result and dividend are the same tensor <nl> - / / Performs floor division in - place <nl> - if ( is_same_tensor ( result , dividend ) ) { <nl> - <nl> - / / See note " Sparse Floor Division " <nl> - if ( ! result . is_coalesced ( ) ) { <nl> - coalesce_ ( result ) ; <nl> - } <nl> - <nl> - result . _values ( ) . floor_divide_ ( divisor ) ; <nl> - return result ; <nl> - } <nl> - <nl> - / / Case 2 : result and dividend are different tensors <nl> - Tensor dividend_tmp = dividend ; <nl> - <nl> - / / Ensures dividend_tmp is coalesced ( see note above ) <nl> - if ( ! dividend . is_coalesced ( ) ) { <nl> - dividend_tmp = dividend . coalesce ( ) ; <nl> - } <nl> - <nl> - / / Resizes and indexes result like dividend_tmp <nl> - result . resize_as_ ( dividend_tmp ) ; <nl> - result . _indices ( ) . resize_as_ ( dividend_tmp . _indices ( ) ) ; <nl> - result . _indices ( ) . copy_ ( dividend_tmp . _indices ( ) ) ; <nl> - <nl> - / / Computes result <nl> - Tensor result_values = result . _values ( ) ; <nl> - at : : floor_divide_out ( result_values , dividend_tmp . _values ( ) , divisor ) ; <nl> - get_sparse_impl ( result ) - > set_nnz_and_narrow ( dividend_tmp . _nnz ( ) ) ; <nl> - result . _coalesced_ ( dividend_tmp . is_coalesced ( ) ) ; <nl> - return result ; <nl> - } <nl> - <nl> - Tensor floor_divide_sparse ( const Tensor & self , const Tensor & value ) { <nl> - auto commonDtype = at : : result_type ( self , value ) ; <nl> - Tensor result = at : : empty ( { 0 } , self . options ( ) . dtype ( commonDtype ) ) ; <nl> - return floor_divide_out_sparse_zerodim ( result , self , value ) ; <nl> - } <nl> - <nl> - Tensor & floor_divide_sparse_ ( Tensor & self , const Tensor & value ) { <nl> - return floor_divide_out_sparse_zerodim ( self , self , value ) ; <nl> - } <nl> - <nl> - SparseTensor & floor_divide_out_sparse_scalar ( SparseTensor & r , const SparseTensor & t , Scalar value ) { <nl> - return floor_divide_out_sparse_zerodim ( r , t , wrapped_scalar_tensor ( value ) ) ; <nl> - } <nl> - <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> / / norm ( SparseTensor , Scalar ) <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> mmm a / docs / source / tensors . rst <nl> ppp b / docs / source / tensors . rst <nl> view of a storage and defines numeric operations on it . <nl> . . automethod : : float <nl> . . automethod : : floor <nl> . . automethod : : floor_ <nl> - . . automethod : : floor_divide <nl> - . . automethod : : floor_divide_ <nl> . . automethod : : fmod <nl> . . automethod : : fmod_ <nl> . . automethod : : frac <nl> mmm a / test / backward_compatibility / check_backward_compatibility . py <nl> ppp b / test / backward_compatibility / check_backward_compatibility . py <nl> <nl> ( ' aten : : ones_like ' , datetime . date ( 2020 , 3 , 15 ) ) , <nl> ( ' aten : : randint_like ' , datetime . date ( 2020 , 3 , 15 ) ) , <nl> ( ' aten : : zeros_like ' , datetime . date ( 2020 , 3 , 15 ) ) , <nl> - ( ' aten : : floor_divide ' , datetime . date ( 2020 , 4 , 1 ) ) , <nl> ( ' aten : : Bool ' , datetime . date ( 2020 , 4 , 1 ) ) , <nl> ( ' aten : : Float ' , datetime . date ( 2020 , 4 , 1 ) ) , <nl> ( ' aten : : to ' , datetime . date ( 2020 , 4 , 1 ) ) , <nl> mmm a / test / test_sparse . py <nl> ppp b / test / test_sparse . py <nl> def test_shape ( sparse_dims , nnz , with_size ) : <nl> self . assertEqual ( x . _indices ( ) . numel ( ) , 0 ) <nl> self . assertEqual ( x . _values ( ) . numel ( ) , 0 ) <nl> <nl> - def test_coalesce ( self ) : <nl> + def test_coalecce ( self ) : <nl> for empty_i , empty_v , empty_nnz in itertools . product ( [ True , False ] , repeat = 3 ) : <nl> sparse_size = [ ] if empty_i else [ 2 , 1 ] <nl> dense_size = [ 1 , 0 , 2 ] if empty_v else [ 1 , 2 ] <nl> def test_tensor ( x , res ) : <nl> x . to_dense ( ) # Tests triple to_dense for memory corruption <nl> x . to_dense ( ) <nl> x . to_dense ( ) <nl> - # We dont have to_dense for half types , so we don ' t request <nl> + # We dont have to_dense for half types , so we don ' t request <nl> # exact_dtype if res . type is torch . float16 . <nl> dense_x = x . to_dense ( ) <nl> safe_dense_x = self . safeToDense ( x ) <nl> def _test_basic_ops_shape ( self , nnz_x1 , nnz_x2 , shape_i , shape_v = None ) : <nl> self . assertEqual ( self . safeToDense ( y1 ) , expected ) <nl> self . assertEqual ( self . safeToDense ( y2 ) , expected ) <nl> <nl> - # Note : true_divide does not have a method variant <nl> - y1 = torch . true_divide ( x1 , 37 . 5 ) <nl> - y2 = x1 . clone ( ) <nl> - expected = torch . true_divide ( self . safeToDense ( x1 ) , 37 . 5 ) <nl> - self . assertEqual ( self . safeToDense ( y1 ) , expected ) <nl> - <nl> - y1 = x1 / / 37 . 5 <nl> - y2 = x1 . clone ( ) <nl> - y2 . floor_divide_ ( 37 . 5 ) <nl> - expected = self . safeToDense ( x1 ) / / 37 . 5 <nl> - self . assertEqual ( self . safeToDense ( y1 ) , expected ) <nl> - self . assertEqual ( self . safeToDense ( y2 ) , expected ) <nl> - <nl> # TODO : add back inplace support <nl> y1 = x1 * * 2 <nl> y2 = x1 . clone ( ) <nl> def test_isnan ( self ) : <nl> self . assertEqual ( torch . isnan ( t ) . int ( ) , t_nan . int ( ) ) <nl> <nl> def test_div_by_sparse_error ( self ) : <nl> - self . assertRaisesRegex ( RuntimeError , ' Sparse division requires ' , <nl> + self . assertRaisesRegex ( RuntimeError , ' A Sparse Tensor can only be divided ' , <nl> lambda : torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) <nl> / torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) ) <nl> <nl> - def test_true_divide_by_sparse_error ( self ) : <nl> - def fn ( ) : <nl> - x = torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) <nl> - y = torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) <nl> - torch . true_divide ( x , y ) <nl> - <nl> - self . assertRaisesRegex ( RuntimeError , ' Sparse true division requires ' , <nl> - fn ) <nl> - <nl> - def test_floor_divide_by_sparse_error ( self ) : <nl> - self . assertRaisesRegex ( RuntimeError , ' Sparse floor division requires ' , <nl> - lambda : torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) <nl> - / / torch . tensor ( 1 . , device = self . device ) . to_sparse ( ) ) <nl> - <nl> @ unittest . skipIf ( not TEST_NUMPY , " Numpy not found " ) <nl> def test_sparse_to_numpy ( self ) : <nl> t = torch . sparse_coo_tensor ( torch . tensor ( ( [ 0 , 0 ] , [ 2 , 0 ] ) ) , torch . tensor ( [ 1 , 4 ] ) ) <nl> mmm a / test / test_torch . py <nl> ppp b / test / test_torch . py <nl> <nl> from torch . testing . _internal . common_device_type import instantiate_device_type_tests , \ <nl> skipCPUIfNoLapack , skipCUDAIfNoMagma , skipCUDAIfRocm , skipCUDAIfNotRocm , onlyCUDA , onlyCPU , \ <nl> dtypes , dtypesIfCUDA , deviceCountAtLeast , skipCUDAIf , precisionOverride , \ <nl> - PYTORCH_CUDA_MEMCHECK , largeCUDATensorTest , onlyOnCPUAndCUDA <nl> + PYTORCH_CUDA_MEMCHECK , largeCUDATensorTest <nl> import torch . backends . quantized <nl> import torch . testing . _internal . data <nl> <nl> def test_div ( self , device , dtype ) : <nl> 0 . 01 ) <nl> self . assertEqual ( a1 . div ( a2 ) , a1 / a2 ) <nl> <nl> - @ dtypesIfCUDA ( * torch . testing . get_all_math_dtypes ( ' cuda ' ) ) <nl> + @ onlyCPU <nl> @ dtypes ( * torch . testing . get_all_math_dtypes ( ' cpu ' ) ) <nl> - def test_floor_divide_tensor ( self , device , dtype ) : <nl> - x = torch . randn ( 10 , device = device ) . mul ( 30 ) . to ( dtype ) <nl> - y = torch . arange ( 1 , 11 , dtype = dtype , device = device ) <nl> - <nl> - z = x / / y <nl> - z_alt = torch . trunc ( x . double ( ) / y . double ( ) ) . to ( dtype ) <nl> - <nl> - self . assertEqual ( z . dtype , x . dtype ) <nl> - self . assertEqual ( z , z_alt ) <nl> + def test_floordiv ( self , device , dtype ) : <nl> + if dtype is torch . float16 : <nl> + return <nl> <nl> - @ dtypesIfCUDA ( * torch . testing . get_all_math_dtypes ( ' cuda ' ) ) <nl> - @ dtypes ( * torch . testing . get_all_math_dtypes ( ' cpu ' ) ) <nl> - def test_floor_divide_scalar ( self , device , dtype ) : <nl> x = torch . randn ( 100 , device = device ) . mul ( 10 ) . to ( dtype ) <nl> - <nl> - z = x / / 3 <nl> - z_alt = torch . tensor ( [ math . trunc ( v . item ( ) / 3 . ) for v in x ] , dtype = x . dtype , device = device ) <nl> - <nl> - self . assertEqual ( z . dtype , x . dtype ) <nl> - self . assertEqual ( z , z_alt ) <nl> - <nl> - # Note : this tests fails on XLA <nl> - @ onlyOnCPUAndCUDA <nl> - @ dtypes ( torch . float , torch . long ) <nl> - def test_floor_divide_out ( self , device , dtype ) : <nl> - x = torch . randn ( 10 , device = device ) . mul ( 10 ) . to ( dtype ) <nl> - y = torch . arange ( 1 , 11 , dtype = dtype , device = device ) <nl> - o = torch . empty ( 10 , dtype = dtype , device = device ) <nl> - <nl> - torch . floor_divide ( x , y , out = o ) <nl> - self . assertEqual ( o , x / / y ) <nl> - <nl> - # Tests scalar with out <nl> - torch . floor_divide ( x , 2 , out = o ) <nl> - self . assertEqual ( o , x / / 2 ) <nl> - <nl> - if dtype = = torch . int : <nl> - o = torch . empty ( 10 , dtype = torch . float , device = device ) <nl> - torch . floor_divide ( x , y , out = o ) <nl> - self . assertEqual ( o , torch . floor_divide ( x . float ( ) , y . float ( ) ) ) <nl> + y = x / / 3 <nl> + self . assertEqual ( y . dtype , x . dtype ) <nl> + z = torch . tensor ( [ math . trunc ( v . item ( ) / 3 . ) for v in x ] , dtype = y . dtype , device = device ) <nl> + self . assertEqual ( y , z ) <nl> <nl> @ onlyCPU <nl> @ dtypes ( * torch . testing . get_all_math_dtypes ( ' cpu ' ) ) <nl> def test_split_view ( self , device ) : <nl> torch . uint8 <nl> ] <nl> <nl> - # _types2 adds bfloat16 type to _types only on ROCm . Should eventually be unified <nl> + # _types2 adds bfloat16 type to _types only on ROCm . Should eventually be unified <nl> # with _types when bfloat16 bringup is complete on all platforms . <nl> _types2 = _types + [ torch . bfloat16 ] if TEST_WITH_ROCM else _types <nl> <nl> def inner ( self , device , dtype ) : <nl> ( ' div ' , ' ' , _small_3d , lambda t , d : [ _number ( 3 . 14 , 3 , t ) ] , 1e - 1 ) , <nl> ( ' div ' , ' tensor ' , _small_3d , <nl> lambda t , d : [ _small_3d ( t , d , has_zeros = False ) ] , 1e - 1 ) , <nl> - # Note : precision for floor_divide is 1 since a small ( 1e - 5 , for example ) <nl> - # error in division can lead to an difference of 1 post - truncation <nl> - # ( e . g . . 9999 vs 1 post truncation is 0 vs 1 ) <nl> - ( ' floor_divide ' , ' ' , _small_3d , lambda t , d : [ _number ( 3 . 14 , 3 , t ) ] , 1 , 1e - 5 , 1e - 5 , _types ) , <nl> - ( ' floor_divide ' , ' tensor ' , _small_3d , <nl> - lambda t , d : [ _small_3d ( t , d , has_zeros = False ) ] , 1 , 1e - 5 , 1e - 5 , _types ) , <nl> ( ' pow ' , ' ' , _small_3d , lambda t , d : [ _number ( 3 . 14 , 3 , t ) ] , 1e - 1 , 1e - 5 , 1e - 5 , _float_types ) , <nl> ( ' pow ' , ' 1 ' , _small_3d , lambda t , d : [ _number ( 1 . , 1 , t ) ] , 1e - 1 ) , <nl> ( ' pow ' , ' 2 ' , _small_3d , lambda t , d : [ _number ( 2 . , 2 , t ) ] , 1e - 1 ) , <nl> mmm a / test / test_type_promotion . py <nl> ppp b / test / test_type_promotion . py <nl> def test_true_divide ( self , device , dtype ) : <nl> casting_result = dividend . to ( torch . get_default_dtype ( ) ) / divisor . to ( torch . get_default_dtype ( ) ) <nl> self . assertEqual ( casting_result , torch . true_divide ( dividend , divisor ) ) <nl> <nl> - @ onlyOnCPUAndCUDA <nl> @ dtypes ( torch . bool , torch . short , torch . uint8 , torch . int , torch . long ) <nl> def test_true_divide_out ( self , device , dtype ) : <nl> dividend = torch . randn ( 5 , device = device ) . to ( dtype ) <nl> mmm a / tools / pyi / gen_pyi . py <nl> ppp b / tools / pyi / gen_pyi . py <nl> <nl> ' div ' , <nl> ' div_ ' , <nl> ' div_out ' , <nl> - ' floor_divide ' , ' floor_divide_ ' , ' floor_divide_out ' , <nl> ] <nl> <nl> <nl> def arg_to_type_hint ( arg ) : <nl> <nl> <nl> binary_ops = ( ' add ' , ' sub ' , ' mul ' , ' div ' , ' pow ' , ' lshift ' , ' rshift ' , ' mod ' , ' truediv ' , <nl> - ' matmul ' , ' floordiv ' , ' floor_divide ' <nl> + ' matmul ' , ' floordiv ' , <nl> ' radd ' , ' rsub ' , ' rmul ' , ' rtruediv ' , ' rfloordiv ' , ' rpow ' , # reverse arithmetic <nl> ' and ' , ' or ' , ' xor ' , # logic <nl> ' iadd ' , ' iand ' , ' idiv ' , ' ilshift ' , ' imul ' , <nl> def gen_pyi ( declarations_path , out ) : <nl> . format ( FACTORY_PARAMS ) ] , <nl> ' is_grad_enabled ' : [ ' def is_grad_enabled ( ) - > _bool : . . . ' ] <nl> } ) <nl> - for binop in [ ' mul ' , ' div ' , ' floor_divide ' ] : <nl> + for binop in [ ' mul ' , ' div ' ] : <nl> unsorted_function_hints [ binop ] . append ( <nl> ' def { } ( input : Union [ Tensor , Number ] , ' <nl> ' other : Union [ Tensor , Number ] , ' <nl> ' * , out : Optional [ Tensor ] = None ) - > Tensor : . . . ' . format ( binop ) ) <nl> - for binop in [ ' add ' , ' sub ' ] : <nl> + for binop in [ ' add ' , ' sub ' ] : <nl> unsorted_function_hints [ binop ] . append ( <nl> ' def { } ( input : Union [ Tensor , Number ] , ' <nl> ' other : Union [ Tensor , Number ] , ' <nl> def gen_pyi ( declarations_path , out ) : <nl> ] , <nl> ' item ' : [ " def item ( self ) - > Number : . . . " ] , <nl> } ) <nl> - for binop in [ ' mul ' , ' div ' , ' floor_divide ' ] : <nl> + for binop in [ ' mul ' , ' div ' ] : <nl> for inplace in [ False , True ] : <nl> out_suffix = ' , * , out : Optional [ Tensor ] = None ' <nl> if inplace : <nl> def gen_pyi ( declarations_path , out ) : <nl> unsorted_tensor_method_hints [ binop ] . append ( <nl> ' def { } ( self , other : Union [ Tensor , Number ] { } ) ' <nl> ' - > Tensor : . . . ' . format ( binop , out_suffix ) ) <nl> - for binop in [ ' add ' , ' sub ' ] : <nl> - for inplace in [ False , True ] : <nl> + for binop in [ ' add ' , ' sub ' ] : <nl> + for inplace in [ False , True ] : <nl> out_suffix = ' , out : Optional [ Tensor ] = None ' <nl> if inplace : <nl> binop + = ' _ ' <nl> mmm a / torch / _tensor_docs . py <nl> ppp b / torch / _tensor_docs . py <nl> def scale_channels ( input , scale ) : <nl> <nl> " " " ) <nl> <nl> - add_docstr_all ( ' floor_divide ' , <nl> - r " " " <nl> - floor_divide ( value ) - > Tensor <nl> - <nl> - See : func : ` torch . floor_divide ` <nl> - " " " ) <nl> - <nl> - add_docstr_all ( ' floor_divide_ ' , <nl> - r " " " <nl> - floor_divide_ ( value ) - > Tensor <nl> - <nl> - In - place version of : meth : ` ~ Tensor . floor_divide ` <nl> - " " " ) <nl> - <nl> add_docstr_all ( ' digamma ' , <nl> r " " " <nl> digamma ( ) - > Tensor <nl> mmm a / torch / _torch_docs . py <nl> ppp b / torch / _torch_docs . py <nl> def merge_dicts ( * dicts ) : <nl> <nl> add_docstr ( torch . floor_divide , <nl> r " " " <nl> - floor_divide ( input , other , out = None ) - > Tensor <nl> + floor_divide ( input , other ) - > Tensor <nl> <nl> Return the division of the inputs rounded down to the nearest integer . See : func : ` torch . div ` <nl> for type promotion and broadcasting rules . <nl> def merge_dicts ( * dicts ) : <nl> input ( Tensor ) : the numerator tensor <nl> other ( Tensor or Scalar ) : the denominator <nl> <nl> - Keyword args : <nl> - { out } <nl> - <nl> Example : : <nl> <nl> > > > a = torch . tensor ( [ 4 . 0 , 3 . 0 ] ) <nl> mmm a / torch / csrc / utils / python_arg_parser . cpp <nl> ppp b / torch / csrc / utils / python_arg_parser . cpp <nl> static bool should_allow_numbers_as_tensors ( const std : : string & name ) { <nl> " div " , " div_ " , " div_out " , <nl> " mul " , " mul_ " , " mul_out " , <nl> " sub " , " sub_ " , " sub_out " , <nl> - " floor_divide " , " floor_divide_ " , " floor_divide_out " <nl> } ; <nl> return allowed . find ( name ) ! = allowed . end ( ) ; <nl> } <nl> | Revert D20497453 : [ pytorch ] [ PR ] Makes floor_divide a method , adds sparse floor division | pytorch/pytorch | a1eaaea288cf51abcd69eb9b0993b1aa9c0ce41f | 2020-03-18T08:48:50Z |
mmm a / buildscripts / evergreen_generate_resmoke_tasks . py <nl> ppp b / buildscripts / evergreen_generate_resmoke_tasks . py <nl> <nl> <nl> LOGGER = structlog . getLogger ( __name__ ) <nl> <nl> + AVG_SETUP_TIME = int ( timedelta ( minutes = 5 ) . total_seconds ( ) ) <nl> DEFAULT_TEST_SUITE_DIR = os . path . join ( " buildscripts " , " resmokeconfig " , " suites " ) <nl> CONFIG_FILE = " . / . evergreen . yml " <nl> MIN_TIMEOUT_SECONDS = int ( timedelta ( minutes = 5 ) . total_seconds ( ) ) <nl> def round_to_minute ( runtime ) : <nl> distance_to_min = 60 - ( runtime % 60 ) <nl> return int ( math . ceil ( runtime + distance_to_min ) ) <nl> <nl> - return max ( MIN_TIMEOUT_SECONDS , round_to_minute ( avg_runtime ) ) * scaling_factor <nl> + return max ( MIN_TIMEOUT_SECONDS , round_to_minute ( avg_runtime ) ) * scaling_factor + AVG_SETUP_TIME <nl> <nl> <nl> def should_tasks_be_generated ( evg_api , task_id ) : <nl> mmm a / buildscripts / tests / test_evergreen_generate_resmoke_tasks . py <nl> ppp b / buildscripts / tests / test_evergreen_generate_resmoke_tasks . py <nl> def test_excluded_files_extended_in_misc ( self ) : <nl> <nl> class CalculateTimeoutTest ( unittest . TestCase ) : <nl> def test_min_timeout ( self ) : <nl> - self . assertEqual ( under_test . MIN_TIMEOUT_SECONDS , under_test . calculate_timeout ( 15 , 1 ) ) <nl> + self . assertEqual ( under_test . MIN_TIMEOUT_SECONDS + under_test . AVG_SETUP_TIME , <nl> + under_test . calculate_timeout ( 15 , 1 ) ) <nl> <nl> def test_over_timeout_by_one_minute ( self ) : <nl> - self . assertEqual ( 360 , under_test . calculate_timeout ( 301 , 1 ) ) <nl> + self . assertEqual ( 660 , under_test . calculate_timeout ( 301 , 1 ) ) <nl> <nl> def test_float_runtimes ( self ) : <nl> - self . assertEqual ( 360 , under_test . calculate_timeout ( 300 . 14 , 1 ) ) <nl> + self . assertEqual ( 660 , under_test . calculate_timeout ( 300 . 14 , 1 ) ) <nl> <nl> def test_scaling_factor ( self ) : <nl> scaling_factor = 10 <nl> - self . assertEqual ( under_test . MIN_TIMEOUT_SECONDS * scaling_factor , <nl> - under_test . calculate_timeout ( 30 , scaling_factor ) ) <nl> + self . assertEqual ( <nl> + under_test . MIN_TIMEOUT_SECONDS * scaling_factor + under_test . AVG_SETUP_TIME , <nl> + under_test . calculate_timeout ( 30 , scaling_factor ) ) <nl> <nl> <nl> class EvergreenConfigGeneratorTest ( unittest . TestCase ) : <nl> | SERVER - 45764 : Generate resmoke tasks need to take setup time into account when setting timeouts | mongodb/mongo | d35374c3a5c61e88340e1db6cb3e13599f6f75c0 | 2020-01-27T18:13:34Z |
mmm a / R - package / R / xgb . plot . tree . R <nl> ppp b / R - package / R / xgb . plot . tree . R <nl> <nl> # ' Plotting only works for boosted tree model ( not linear model ) . <nl> # ' <nl> # ' @ importFrom data . table data . table <nl> - # ' @ importFrom data . table set <nl> - # ' @ importFrom data . table rbindlist <nl> # ' @ importFrom data . table : = <nl> - # ' @ importFrom data . table copy <nl> # ' @ importFrom magrittr % > % <nl> # ' @ param feature_names names of each feature as a character vector . Can be extracted from a sparse matrix ( see example ) . If model dump already contains feature names , this argument should be \ code { NULL } . <nl> # ' @ param filename_dump the path to the text file storing the model . Model dump must include the gain per feature and per tree ( parameter \ code { with . stats = T } in function \ code { xgb . dump } ) . Possible to provide a model directly ( see \ code { model } argument ) . <nl> # ' @ param model generated by the \ code { xgb . train } function . Avoid the creation of a dump file . <nl> # ' @ param n_first_tree limit the plot to the n first trees . If \ code { NULL } , all trees of the model are plotted . Performance can be low for huge models . <nl> - # ' @ param width the width of the diagram in pixels . <nl> + # ' @ param width the width of the diagram in pixels . <nl> # ' @ param height the height of the diagram in pixels . <nl> # ' <nl> # ' @ return A \ code { DiagrammeR } of the model . <nl> xgb . plot . tree < - function ( feature_names = NULL , filename_dump = NULL , model = NU <nl> allTrees < - xgb . model . dt . tree ( feature_names = feature_names , model = model , n_first_tree = n_first_tree ) <nl> } <nl> <nl> - allTrees [ Feature ! = " Leaf " , yesPath : = paste ( ID , " ( " , Feature , " < br / > Cover : " , Cover , " < br / > Gain : " , Quality , " ) - - > | < " , Split , " | " , Yes , " > " , Yes . Feature , " ] " , sep = " " ) ] <nl> - <nl> - allTrees [ Feature ! = " Leaf " , noPath : = paste ( ID , " ( " , Feature , " ) - - > | > = " , Split , " | " , No , " > " , No . Feature , " ] " , sep = " " ) ] <nl> - <nl> allTrees [ , label : = paste0 ( Feature , " \ nCover : " , Cover , " \ nGain : " , Quality ) ] <nl> allTrees [ , shape : = " rectangle " ] [ Feature = = " Leaf " , shape : = " oval " ] <nl> allTrees [ , filledcolor : = " Beige " ] [ Feature = = " Leaf " , filledcolor : = " Khaki " ] <nl> <nl> - nodes < - DiagrammeR : : create_nodes ( nodes = allTrees [ , ID ] , <nl> - label = allTrees [ , label ] , <nl> - # type = c ( " lower " , " lower " , " upper " , " upper " ) , <nl> + # rev is used to put the first tree on top . <nl> + nodes < - DiagrammeR : : create_nodes ( nodes = allTrees [ , ID ] % > % rev , <nl> + label = allTrees [ , label ] % > % rev , <nl> style = " filled " , <nl> color = " DimGray " , <nl> - fillcolor = allTrees [ , filledcolor ] , <nl> - shape = allTrees [ , shape ] , <nl> - data = allTrees [ , Feature ] , <nl> + fillcolor = allTrees [ , filledcolor ] % > % rev , <nl> + shape = allTrees [ , shape ] % > % rev , <nl> + data = allTrees [ , Feature ] % > % rev , <nl> fontname = " Helvetica " <nl> ) <nl> <nl> xgb . plot . tree < - function ( feature_names = NULL , filename_dump = NULL , model = NU <nl> # Avoid error messages during CRAN check . <nl> # The reason is that these variables are never declared <nl> # They are mainly column names inferred by Data . table . . . <nl> - globalVariables ( c ( " Feature " , " ID " , " Cover " , " Quality " , " Split " , " Yes " , " No " , " . " , " shape " , " filledcolor " ) ) <nl> + globalVariables ( c ( " Feature " , " ID " , " Cover " , " Quality " , " Split " , " Yes " , " No " , " . " , " shape " , " filledcolor " , " label " ) ) <nl> | Fix some bug + improve display + code clean | dmlc/xgboost | 7cb34e3ad678200d8b2dc47b702d70601b41c6f6 | 2015-11-07T21:24:37Z |
mmm a / src / core / lib / gprpp / ref_counted . h <nl> ppp b / src / core / lib / gprpp / ref_counted . h <nl> namespace grpc_core { <nl> <nl> / / PolymorphicRefCount enforces polymorphic destruction of RefCounted . <nl> class PolymorphicRefCount { <nl> - public : <nl> protected : <nl> GRPC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE <nl> <nl> class PolymorphicRefCount { <nl> / / RefCounted . Please refer to grpc_core : : RefCounted for more details , and <nl> / / when in doubt use PolymorphicRefCount . <nl> class NonPolymorphicRefCount { <nl> - public : <nl> protected : <nl> GRPC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE <nl> <nl> | Remove unnecessary public : | grpc/grpc | 90e5ade73b3b042344a8822268269521766c4d16 | 2019-10-01T23:36:56Z |
similarity index 95 % <nl> rename from tools / extra / parselog . sh <nl> rename to tools / extra / parse_log . sh <nl> mmm a / tools / extra / parselog . sh <nl> ppp b / tools / extra / parse_log . sh <nl> <nl> # ! / bin / bash <nl> - # Usage parselog . sh caffe . log <nl> + # Usage parse_log . sh caffe . log <nl> # It creates two files one caffe . log . test that contains the loss and test accuracy of the test and <nl> # another one caffe . log . loss that contains the loss computed during the training <nl> <nl> DIR = " $ ( readlink - f $ ( dirname " $ 0 " ) ) " <nl> <nl> if [ " $ # " - lt 1 ] <nl> then <nl> - echo " Usage parselog . sh / path / to / your . log " <nl> + echo " Usage parse_log . sh / path / to / your . log " <nl> exit <nl> fi <nl> LOG = ` basename $ 1 ` <nl> mmm a / tools / extra / plot_log . gnuplot . example <nl> ppp b / tools / extra / plot_log . gnuplot . example <nl> <nl> # You had better check the data files before designing your own plots . <nl> <nl> # Please generate the neccessary data files with <nl> - # / path / to / caffe / scripts / parselog . sh before plotting . <nl> + # / path / to / caffe / scripts / parse_log . sh before plotting . <nl> # Example usage : <nl> - # . / parselog . sh mnist . log <nl> + # . / parse_log . sh mnist . log <nl> # Now you have mnist . log . train and mnist . log . test . <nl> # gnuplot mnist . gnuplot <nl> <nl> mmm a / tools / extra / plot_training_log . py . example <nl> ppp b / tools / extra / plot_training_log . py . example <nl> import matplotlib . markers as mks <nl> <nl> def get_log_parsing_script ( ) : <nl> dirname = os . path . dirname ( os . path . abspath ( inspect . getfile ( inspect . currentframe ( ) ) ) ) <nl> - return dirname + ' / parselog . sh ' <nl> + return dirname + ' / parse_log . sh ' <nl> <nl> def get_log_file_suffix ( ) : <nl> return ' . log ' <nl> | File naming convention requires that two words be split by a underscore | BVLC/caffe | 2ac5785561246ed99df3e32137257d412b1815bd | 2014-03-19T19:31:38Z |
mmm a / tensorflow / contrib / cluster_resolver / BUILD <nl> ppp b / tensorflow / contrib / cluster_resolver / BUILD <nl> py_library ( <nl> " python / training / __init__ . py " , <nl> ] , <nl> srcs_version = " PY2AND3 " , <nl> + visibility = [ " / / visibility : public " ] , <nl> deps = [ <nl> " : cluster_resolver_py " , <nl> " : gce_cluster_resolver_py " , <nl> | Internal - only change . | tensorflow/tensorflow | 4f1e771c10d75cc9014662e49b7906e0a16e2fe5 | 2018-02-21T23:24:47Z |
mmm a / tools / python / src / basic . cpp <nl> ppp b / tools / python / src / basic . cpp <nl> void resize ( T & v , unsigned long n ) { v . resize ( n ) ; } <nl> <nl> void bind_basic_types ( ) <nl> { <nl> - class_ < std : : vector < double > > ( " array " , init < > ( ) ) <nl> + class_ < std : : vector < double > > ( " array " , " This object represents a 1D array of floating point numbers . " <nl> + " Moreover , it binds directly to the C + + type std : : vector < double > . " , init < > ( ) <nl> + ) <nl> . def ( vector_indexing_suite < std : : vector < double > > ( ) ) <nl> . def ( " __init__ " , make_constructor ( & array_from_object ) ) <nl> . def ( " __str__ " , array__str__ ) <nl> mmm a / tools / python / src / matrix . cpp <nl> ppp b / tools / python / src / matrix . cpp <nl> void bind_matrix ( ) <nl> . def ( " __setitem__ " , & mat_row__setitem__ ) <nl> . def ( " __getitem__ " , & mat_row__getitem__ ) ; <nl> <nl> - class_ < matrix < double > > ( " matrix " , init < > ( ) ) <nl> + class_ < matrix < double > > ( " matrix " , " This object represents a dense 2D matrix of floating point numbers . " <nl> + " Moreover , it binds directly to the C + + type dlib : : matrix < double > . " , init < > ( ) ) <nl> . def ( " __init__ " , make_constructor ( & make_matrix_from_size ) ) <nl> . def ( " set_size " , & matrix_set_size , ( arg ( " rows " ) , arg ( " cols " ) ) , " Set the size of the matrix to the given number of rows and columns . " ) <nl> . def ( " __init__ " , make_constructor ( & from_object ) ) <nl> mmm a / tools / python / src / sequence_segmenter . cpp <nl> ppp b / tools / python / src / sequence_segmenter . cpp <nl> train_sequence_segmenter ( ) and cross_validate_sequence_segmenter ( ) routines . " <nl> . def ( " __str__ " , & segmenter_params__str__ ) <nl> . def_pickle ( serialize_pickle < segmenter_params > ( ) ) ; <nl> <nl> - class_ < segmenter_type > ( " segmenter_type " ) <nl> + class_ < segmenter_type > ( " segmenter_type " , " This object represents a sequence segmenter and is the type of object " <nl> + " returned by the dlib . train_sequence_segmenter ( ) routine . " ) <nl> . def ( " __call__ " , & segmenter_type : : segment_sequence_dense ) <nl> . def ( " __call__ " , & segmenter_type : : segment_sequence_sparse ) <nl> . def_readonly ( " weights " , & segmenter_type : : get_weights ) <nl> . def_pickle ( serialize_pickle < segmenter_type > ( ) ) ; <nl> <nl> - class_ < segmenter_test > ( " segmenter_test " ) <nl> + class_ < segmenter_test > ( " segmenter_test " , " This object is the output of the dlib . test_sequence_segmenter ( ) and " <nl> + " dlib . cross_validate_sequence_segmenter ( ) routines . " ) <nl> . def_readwrite ( " precision " , & segmenter_test : : precision ) <nl> . def_readwrite ( " recall " , & segmenter_test : : recall ) <nl> . def_readwrite ( " f1 " , & segmenter_test : : f1 ) <nl> | Added more python doc strings | davisking/dlib | a09a8f6a95fdb79a155e19028aa1ebd5094bf26e | 2013-08-09T15:56:06Z |
mmm a / src / mongo / client / connpool . cpp <nl> ppp b / src / mongo / client / connpool . cpp <nl> namespace mongo { <nl> c . conn = NULL ; <nl> } <nl> if ( alive ) { <nl> - c . conn - > clearAuthenticationTable ( ) ; <nl> all . push_back ( c ) ; <nl> } <nl> } <nl> mmm a / src / mongo / client / connpool . h <nl> ppp b / src / mongo / client / connpool . h <nl> namespace mongo { <nl> kill ( ) ; <nl> else <nl> * / <nl> - _conn - > clearAuthenticationTable ( ) ; <nl> pool . release ( _host , _conn ) ; <nl> _conn = 0 ; <nl> } <nl> mmm a / src / mongo / client / dbclient . cpp <nl> ppp b / src / mongo / client / dbclient . cpp <nl> namespace mongo { <nl> return QueryOptions ( 0 ) ; <nl> } <nl> <nl> - void DBClientWithCommands : : setAuthenticationTable ( const AuthenticationTable & auth ) { <nl> - _authTable = auth ; <nl> - _hasAuthentication = true ; <nl> - } <nl> - <nl> - void DBClientWithCommands : : clearAuthenticationTable ( ) { <nl> - _authTable . clearAuth ( ) ; / / This probably isn ' t necessary , but better to be safe . <nl> - _hasAuthentication = false ; <nl> - } <nl> - <nl> inline bool DBClientWithCommands : : runCommand ( const string & dbname , <nl> const BSONObj & cmd , <nl> BSONObj & info , <nl> mmm a / src / mongo / client / dbclientinterface . h <nl> ppp b / src / mongo / client / dbclientinterface . h <nl> namespace mongo { <nl> <nl> DBClientWithCommands ( ) : _logLevel ( 0 ) , <nl> _cachedAvailableOptions ( ( enum QueryOptions ) 0 ) , <nl> - _haveCachedAvailableOptions ( false ) , <nl> - _hasAuthentication ( false ) { } <nl> + _haveCachedAvailableOptions ( false ) { } <nl> <nl> / * * helper function . run a simple command where the command expression is simply <nl> { command : 1 } <nl> namespace mongo { <nl> <nl> bool exists ( const string & ns ) ; <nl> <nl> - virtual void setAuthenticationTable ( const AuthenticationTable & auth ) ; <nl> - virtual void clearAuthenticationTable ( ) ; <nl> - <nl> / * * Create an index if it does not already exist . <nl> ensureIndex calls are remembered so it is safe / fast to call this function many <nl> times in your code . <nl> namespace mongo { <nl> private : <nl> enum QueryOptions _cachedAvailableOptions ; <nl> bool _haveCachedAvailableOptions ; <nl> - AuthenticationTable _authTable ; <nl> - bool _hasAuthentication ; <nl> } ; <nl> <nl> / * * <nl> mmm a / src / mongo / db / security . h <nl> ppp b / src / mongo / db / security . h <nl> namespace mongo { <nl> / / Returns true if this AuthenticationInfo has been auth ' d to use the internal user <nl> bool usingInternalUser ( ) ; <nl> <nl> - const AuthenticationTable getAuthTable ( ) const ; <nl> - <nl> / / When TemporaryAuthReleaser goes out of scope it clears the temporary authentication set <nl> / / in its AuthenticationInfo object , unless that AuthenticationInfo already had temporary <nl> / / auth set at the time that the TemporaryAuthReleaser was initialized . <nl> mmm a / src / mongo / db / security_common . cpp <nl> ppp b / src / mongo / db / security_common . cpp <nl> namespace mongo { <nl> / / createPWDigest should really not be a member func <nl> DBClientConnection conn ; <nl> internalSecurity . pwd = conn . createPasswordDigest ( internalSecurity . user , str ) ; <nl> - AuthenticationTable : : getMutableInternalSecurityAuthenticationTable ( ) . addAuth ( <nl> - " local " , <nl> - internalSecurity . user , <nl> - Auth : : WRITE ) ; <nl> <nl> return true ; <nl> } <nl> namespace mongo { <nl> return authTable . getAuthForDb ( dbname ) . level > = level ; <nl> } <nl> <nl> - const AuthenticationTable AuthenticationInfo : : getAuthTable ( ) const { <nl> - scoped_spinlock lk ( _lock ) ; <nl> - return _authTable ; <nl> - } <nl> - <nl> } / / namespace mongo <nl> mmm a / src / mongo / s / shardconnection . cpp <nl> ppp b / src / mongo / s / shardconnection . cpp <nl> namespace mongo { <nl> } <nl> <nl> void release ( const string & addr , DBClientBase * conn ) { <nl> - conn - > clearAuthenticationTable ( ) ; <nl> shardConnectionPool . release ( addr , conn ) ; <nl> } <nl> <nl> | SERVER - 7572 Remove all references to AuthenticationTable outside of AuthenticationInfo | mongodb/mongo | 133677e7186540ac36a3a1502dd46d108bca24ee | 2012-12-19T18:27:45Z |
mmm a / system / include / emscripten / wire . h <nl> ppp b / system / include / emscripten / wire . h <nl> namespace emscripten { <nl> } <nl> <nl> static shared_ptr fromWireType ( WireType wt ) { <nl> - if ( wt ) { <nl> - return shared_ptr ( * wt ) ; <nl> - } else { <nl> - return shared_ptr ( ) ; <nl> - } <nl> + return * wt ; <nl> } <nl> <nl> static void destroy ( WireType p ) { <nl> | checkpoint work towards unifying the binding type for classes and smart pointers . | emscripten-core/emscripten | 3dd168f24a0294e7f306514d0bb71985457ab976 | 2013-04-12T11:25:12Z |
mmm a / src / network / transportsender . cc <nl> ppp b / src / network / transportsender . cc <nl> TransportSender < MyState > : : TransportSender ( Connection * s_connection , MyState & in <nl> ack_num ( 0 ) , <nl> pending_data_ack ( false ) , <nl> SEND_MINDELAY ( 15 ) , <nl> - last_heard ( 0 ) <nl> + last_heard ( 0 ) , <nl> + prng ( ) <nl> { <nl> - srand ( time ( NULL ) ) ; / * for chaff * / <nl> } <nl> <nl> / * Try to send roughly two frames per RTT , bounded by limits on frame rate * / <nl> void TransportSender < MyState > : : rationalize_states ( void ) <nl> template < class MyState > <nl> const string TransportSender < MyState > : : make_chaff ( void ) <nl> { <nl> - const int CHAFF_MAX = 16 ; <nl> + const size_t CHAFF_MAX = 16 ; <nl> + const size_t chaff_len = prng . uint8 ( ) % ( CHAFF_MAX + 1 ) ; <nl> <nl> char chaff [ CHAFF_MAX ] ; <nl> - for ( int i = 0 ; i < CHAFF_MAX ; i + + ) { <nl> - chaff [ i ] = rand ( ) % 256 ; <nl> - } <nl> - int chaff_len = rand ( ) % ( CHAFF_MAX + 1 ) ; <nl> + prng . fill ( chaff , chaff_len ) ; <nl> return string ( chaff , chaff_len ) ; <nl> } <nl> <nl> mmm a / src / network / transportsender . h <nl> ppp b / src / network / transportsender . h <nl> <nl> # include " transportinstruction . pb . h " <nl> # include " transportstate . h " <nl> # include " transportfragment . h " <nl> + # include " prng . h " <nl> <nl> using std : : list ; <nl> using std : : pair ; <nl> namespace Network { <nl> <nl> uint64_t last_heard ; / * last time received new state * / <nl> <nl> - static const string make_chaff ( void ) ; <nl> + / * chaff to disguise instruction length * / <nl> + PRNG prng ; <nl> + const string make_chaff ( void ) ; <nl> <nl> public : <nl> / * constructor * / <nl> | Use the PRNG class for chaff | mobile-shell/mosh | 4eb3cace0bc0ed5b5f09681ceac5097bc9d8f7f5 | 2012-03-20T03:12:07Z |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.