diff
stringlengths
41
2.03M
msg
stringlengths
1
1.5k
repo
stringlengths
5
40
sha
stringlengths
40
40
time
stringlengths
20
20
mmm a / src / heap / memory - reducer . cc <nl> ppp b / src / heap / memory - reducer . cc <nl> void MemoryReducer : : NotifyTimer ( const Event & event ) { <nl> PrintIsolate ( heap ( ) - > isolate ( ) , " Memory reducer : started GC # % d \ n " , <nl> state_ . started_gcs ) ; <nl> } <nl> - if ( heap ( ) - > ShouldOptimizeForMemoryUsage ( ) ) { <nl> - / / Do full GC if memory usage has higher priority than latency . This is <nl> - / / important for background tabs that do not send idle notifications . <nl> - heap ( ) - > CollectAllGarbage ( Heap : : kReduceMemoryFootprintMask , <nl> - " memory reducer " ) ; <nl> - } else { <nl> - heap ( ) - > StartIdleIncrementalMarking ( ) ; <nl> - } <nl> + heap ( ) - > StartIdleIncrementalMarking ( ) ; <nl> } else if ( state_ . action = = kWait ) { <nl> if ( ! heap ( ) - > incremental_marking ( ) - > IsStopped ( ) & & <nl> heap ( ) - > ShouldOptimizeForMemoryUsage ( ) ) { <nl>
Remove non - incremental GC from memory reducer .
v8/v8
5449c9848aac7ccf79f440a269bcd5d3865b0738
2015-10-22T10:23:58Z
mmm a / tensorflow / docs_src / api_guides / python / math_ops . md <nl> ppp b / tensorflow / docs_src / api_guides / python / math_ops . md <nl> operators to your graph . <nl> * ` tf . realdiv ` <nl> * ` tf . truncatediv ` <nl> * ` tf . floor_div ` <nl> + * ` tf . div_no_nan ` <nl> * ` tf . truncatemod ` <nl> * ` tf . floormod ` <nl> * ` tf . mod ` <nl>
DOC : add div_no_nan in math module of user_guide
tensorflow/tensorflow
28758916d4e970a4ccd9c6af6dc393c3cdc16c58
2018-08-23T12:17:00Z
mmm a / tensorflow / core / ops / math_ops . cc <nl> ppp b / tensorflow / core / ops / math_ops . cc <nl> REGISTER_OP ( " Mod " ) <nl> . SetShapeFn ( shape_inference : : BroadcastBinaryOpShapeFn ) <nl> . Doc ( R " doc ( <nl> Returns element - wise remainder of division . This emulates C semantics in that <nl> - the result here is consistent with a truncating divide . E . g . ` truncate ( x / y ) * <nl> - y + truncate_mod ( x , y ) = x ` . <nl> + the result here is consistent with a truncating divide . E . g . <nl> + ` tf . truncatediv ( x , y ) * y + truncate_mod ( x , y ) = x ` . <nl> <nl> * NOTE * : ` Mod ` supports broadcasting . More about broadcasting <nl> [ here ] ( http : / / docs . scipy . org / doc / numpy / user / basics . broadcasting . html ) <nl>
Define truncatemod in terms of tf . truncatediv to be explicit .
tensorflow/tensorflow
9954458183ebd8d0ab5f7d06f063c8372dbcf6fb
2017-10-10T19:18:45Z
mmm a / lib / Sema / TypeCheckConstraints . cpp <nl> ppp b / lib / Sema / TypeCheckConstraints . cpp <nl> namespace { <nl> llvm : : DenseMap < TypeVariableType * , TypeVariableType * > Representatives ; <nl> llvm : : DenseMap < TypeVariableType * , Type > FixedTypes ; <nl> <nl> + / / Valid everywhere , for debugging <nl> + SmallVector < Constraint * , 16 > SolvedConstraints ; <nl> + <nl> unsigned assignTypeVariableID ( ) { <nl> return getTopConstraintSystem ( ) . TypeCounter + + ; <nl> } <nl> namespace { <nl> case SolutionKind : : Solved : <nl> / / This constraint has already been solved ; there is nothing more <nl> / / to do . <nl> + if ( TC . getLangOpts ( ) . DebugConstraintSolver ) <nl> + SolvedConstraints . push_back ( constraint ) ; <nl> return true ; <nl> <nl> case SolutionKind : : Unsolved : <nl> bool ConstraintSystem : : simplify ( ) { <nl> if ( typeVarResolved . count ( constraint ) ) <nl> continue ; <nl> <nl> - if ( addConstraint ( constraint ) ) <nl> + if ( addConstraint ( constraint ) ) { <nl> solvedAny = true ; <nl> <nl> + if ( TC . getLangOpts ( ) . DebugConstraintSolver ) <nl> + SolvedConstraints . push_back ( constraint ) ; <nl> + } <nl> + <nl> if ( failedConstraint ) { <nl> return true ; <nl> } <nl> void ConstraintSystem : : dump ( ) { <nl> } <nl> } <nl> <nl> - out < < " \ nConstraints : \ n " ; <nl> + out < < " \ nUnsolved Constraints : \ n " ; <nl> for ( auto constraint : Constraints ) { <nl> out . indent ( 2 ) ; <nl> constraint - > print ( out ) ; <nl> out < < " \ n " ; <nl> } <nl> <nl> + out < < " \ nSolved Constraints : \ n " ; <nl> + for ( auto constraint : SolvedConstraints ) { <nl> + out . indent ( 2 ) ; <nl> + constraint - > print ( out ) ; <nl> + out < < " \ n " ; <nl> + } <nl> + <nl> if ( ! UnresolvedOverloadSets . empty ( ) ) { <nl> out < < " \ nUnresolved overload sets : \ n " ; <nl> for ( auto overload : UnresolvedOverloadSets ) { <nl> mmm a / lib / Sema / TypeCheckREPL . cpp <nl> ppp b / lib / Sema / TypeCheckREPL . cpp <nl> <nl> # include " swift / AST / Expr . h " <nl> # include " swift / AST / NameLookup . h " <nl> # include " swift / AST / Stmt . h " <nl> + # include " llvm / Support / SaveAndRestore . h " <nl> <nl> using namespace swift ; <nl> <nl> PrintReplExpr ( TypeChecker & TC , VarDecl * Arg , CanType T , SourceLoc Loc , <nl> <nl> / / / Check an expression at the top level in a REPL . <nl> void TypeChecker : : typeCheckTopLevelReplExpr ( Expr * & E , TopLevelCodeDecl * TLCD ) { <nl> + / / FIXME : Remove this once the constraints - based type checker is the <nl> + / / only type checker . <nl> + llvm : : SaveAndRestore < bool > turnOffConstraints ( <nl> + getLangOpts ( ) . UseConstraintSolver , <nl> + false ) ; <nl> + <nl> / / If the input is an lvalue , force an lvalue - to - rvalue conversion . <nl> Expr * ConvertedE = convertToMaterializable ( E ) ; <nl> if ( ! ConvertedE ) <nl> struct PatternBindingPrintLHS : public ASTVisitor < PatternBindingPrintLHS > { <nl> } ; <nl> <nl> void TypeChecker : : REPLCheckPatternBinding ( PatternBindingDecl * D ) { <nl> + / / FIXME : Remove this once the constraints - based type checker is the <nl> + / / only type checker . <nl> + llvm : : SaveAndRestore < bool > turnOffConstraints ( <nl> + getLangOpts ( ) . UseConstraintSolver , <nl> + false ) ; <nl> Expr * E = D - > getInit ( ) ; <nl> <nl> / / FIXME : I ' m assuming we don ' t need to bother printing the pattern binding <nl>
Improve debug output of the constraint - based type checker by showing
apple/swift
8764d659b3a20c20a4ac95b3e6a603edb4309ac0
2012-10-08T18:01:51Z
mmm a / src / compiler / effect - control - linearizer . cc <nl> ppp b / src / compiler / effect - control - linearizer . cc <nl> void EffectControlLinearizer : : TransitionElementsTo ( Node * node , Node * array , <nl> Node * EffectControlLinearizer : : IsElementsKindGreaterThan ( <nl> Node * kind , ElementsKind reference_kind ) { <nl> Node * ref_kind = __ Int32Constant ( reference_kind ) ; <nl> - Node * ret = __ Int32LessThanOrEqual ( ref_kind , kind ) ; <nl> + Node * ret = __ Int32LessThan ( ref_kind , kind ) ; <nl> return ret ; <nl> } <nl> <nl> void EffectControlLinearizer : : LowerTransitionAndStoreElement ( Node * node ) { <nl> / / if kind = = HOLEY_SMI_ELEMENTS { <nl> / / if value is heap number { <nl> / / Transition array to HOLEY_DOUBLE_ELEMENTS <nl> + / / kind = HOLEY_DOUBLE_ELEMENTS <nl> / / } else { <nl> / / Transition array to HOLEY_ELEMENTS <nl> + / / kind = HOLEY_ELEMENTS <nl> / / } <nl> / / } else if kind = = HOLEY_DOUBLE_ELEMENTS { <nl> / / if value is not heap number { <nl> / / Transition array to HOLEY_ELEMENTS <nl> + / / kind = HOLEY_ELEMENTS <nl> / / } <nl> / / } <nl> / / } <nl> / / <nl> / / - - STORE PHASE mmmmmmmmmmmmmmmmmmmmm - <nl> + / / [ make sure { kind } is up - to - date ] <nl> / / if kind = = HOLEY_DOUBLE_ELEMENTS { <nl> / / if value is smi { <nl> / / float_value = convert smi to float <nl> void EffectControlLinearizer : : LowerTransitionAndStoreElement ( Node * node ) { <nl> kind = __ Word32Shr ( andit , shift ) ; <nl> } <nl> <nl> - auto do_store = __ MakeLabel < 6 > ( ) ; <nl> + auto do_store = __ MakeLabel < 6 > ( MachineRepresentation : : kWord32 ) ; <nl> Node * check1 = ObjectIsSmi ( value ) ; <nl> - __ GotoIf ( check1 , & do_store ) ; <nl> + __ GotoIf ( check1 , & do_store , kind ) ; <nl> { <nl> / / { value } is a HeapObject . <nl> Node * check2 = IsElementsKindGreaterThan ( kind , HOLEY_SMI_ELEMENTS ) ; <nl> void EffectControlLinearizer : : LowerTransitionAndStoreElement ( Node * node ) { <nl> / / { value } is a HeapNumber . <nl> TransitionElementsTo ( node , array , HOLEY_SMI_ELEMENTS , <nl> HOLEY_DOUBLE_ELEMENTS ) ; <nl> - __ Goto ( & do_store ) ; <nl> + __ Goto ( & do_store , __ Int32Constant ( HOLEY_DOUBLE_ELEMENTS ) ) ; <nl> } <nl> __ Bind ( & if_value_not_heap_number ) ; <nl> { <nl> TransitionElementsTo ( node , array , HOLEY_SMI_ELEMENTS , HOLEY_ELEMENTS ) ; <nl> - __ Goto ( & do_store ) ; <nl> + __ Goto ( & do_store , __ Int32Constant ( HOLEY_ELEMENTS ) ) ; <nl> } <nl> } <nl> __ Bind ( & if_array_not_fast_smi ) ; <nl> { <nl> Node * check3 = IsElementsKindGreaterThan ( kind , HOLEY_ELEMENTS ) ; <nl> - __ GotoUnless ( check3 , & do_store ) ; <nl> + __ GotoUnless ( check3 , & do_store , kind ) ; <nl> / / We have double elements kind . <nl> Node * value_map = __ LoadField ( AccessBuilder : : ForMap ( ) , value ) ; <nl> Node * heap_number_map = __ HeapNumberMapConstant ( ) ; <nl> Node * check4 = __ WordEqual ( value_map , heap_number_map ) ; <nl> - __ GotoIf ( check4 , & do_store ) ; <nl> + __ GotoIf ( check4 , & do_store , kind ) ; <nl> / / But the value is not a heap number , so we must transition . <nl> TransitionElementsTo ( node , array , HOLEY_DOUBLE_ELEMENTS , HOLEY_ELEMENTS ) ; <nl> - __ Goto ( & do_store ) ; <nl> + __ Goto ( & do_store , __ Int32Constant ( HOLEY_ELEMENTS ) ) ; <nl> } <nl> } <nl> <nl> + / / Make sure kind is up - to - date . <nl> __ Bind ( & do_store ) ; <nl> + kind = do_store . PhiAt ( 0 ) ; <nl> + <nl> Node * elements = __ LoadField ( AccessBuilder : : ForJSObjectElements ( ) , array ) ; <nl> Node * check2 = IsElementsKindGreaterThan ( kind , HOLEY_ELEMENTS ) ; <nl> auto if_kind_is_double = __ MakeLabel < 1 > ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 3b5116acd0d <nl> mmm / dev / null <nl> ppp b / test / mjsunit / regress / regress - 747075 . js <nl> <nl> + / / Copyright 2017 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + r = [ <nl> + 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , <nl> + 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 , 14 <nl> + ] ; <nl> + <nl> + <nl> + for ( i = - 1 ; i < 100000 ; i + + ) { <nl> + r2 = r . map ( function ( y ) { return y / 64 } ) ; <nl> + assertTrue ( r2 [ 0 ] < 1 ) ; <nl> + } <nl>
[ TurboFan ] Array . prototype . map inlining error
v8/v8
d9b98f3d050da5c6fe2182bfc60fdfa3a397e2b8
2017-07-25T04:05:56Z
mmm a / hphp / hack / src / facts / symbols / sqliteSymbolIndexWriter . ml <nl> ppp b / hphp / hack / src / facts / symbols / sqliteSymbolIndexWriter . ml <nl> <nl> * <nl> * ) <nl> open SearchUtils <nl> + open Core_kernel <nl> <nl> ( * Some SQL commands we ' ll need * ) <nl> let sql_begin_transaction = <nl> let sql_create_indexes = <nl> " CREATE INDEX IF NOT EXISTS ix_symbols_name ON symbols ( name ) ; " ^ <nl> " CREATE INDEX IF NOT EXISTS ix_symbols_kindname ON symbols ( kind , name ) ; " <nl> <nl> - ( * Gather a database and prepared statement into a tuple * ) <nl> - let prepare_or_reset_statement ( db ) ( stmt_ref ) ( sql_command_text ) = <nl> - let stmt = match ! stmt_ref with <nl> - | Some s - > <nl> - let _ = Sqlite3 . reset s in <nl> - s <nl> - | None - > <nl> - let s = Sqlite3 . prepare db sql_command_text in <nl> - stmt_ref : = Some s ; <nl> - s <nl> - in <nl> - stmt <nl> - ; ; <nl> + ( * Capture results from sqlite and crash if database fails * ) <nl> + let check_rc ( rc : Sqlite3 . Rc . t ) : unit = <nl> + if rc < > Sqlite3 . Rc . OK & & rc < > Sqlite3 . Rc . DONE <nl> + then failwith ( Printf . sprintf " SQLite operation failed : % s " ( Sqlite3 . Rc . to_string rc ) ) <nl> <nl> ( * Begin the work of creating an SQLite index DB * ) <nl> let record_in_db <nl> ( filename : string ) <nl> ( symbols : si_results ) : unit = <nl> <nl> + ( * If the file exists , remove it before starting over * ) <nl> + if Sys . file_exists filename then begin <nl> + Unix . unlink filename ; <nl> + end ; <nl> + <nl> ( * Open the database and do basic prep * ) <nl> let db = Sqlite3 . db_open filename in <nl> - let _ = Sqlite3 . exec db sql_create_symbols_table in <nl> - let _ = Sqlite3 . exec db sql_create_indexes in <nl> - let _ = Sqlite3 . exec db sql_create_kinds_table in <nl> - let _ = Sqlite3 . exec db sql_insert_kinds in <nl> - let _ = Sqlite3 . exec db sql_begin_transaction in <nl> + Sqlite3 . exec db " PRAGMA synchronous = OFF ; " | > check_rc ; <nl> + Sqlite3 . exec db " PRAGMA journal_mode = MEMORY ; " | > check_rc ; <nl> + Sqlite3 . exec db sql_create_symbols_table | > check_rc ; <nl> + Sqlite3 . exec db sql_create_indexes | > check_rc ; <nl> + Sqlite3 . exec db sql_create_kinds_table | > check_rc ; <nl> + Sqlite3 . exec db sql_insert_kinds | > check_rc ; <nl> + Sqlite3 . exec db sql_begin_transaction | > check_rc ; <nl> <nl> ( * Insert records * ) <nl> - let insert_symbol_stmt = ref None in <nl> - Core_kernel . List . iter symbols ~ f : ( fun symbol - > begin <nl> - let stmt = prepare_or_reset_statement db insert_symbol_stmt sql_insert_symbol in <nl> - let _ = Sqlite3 . bind stmt 1 ( Sqlite3 . Data . TEXT symbol . si_name ) in <nl> - let _ = Sqlite3 . bind stmt 2 ( Sqlite3 . Data . INT <nl> - ( Int64 . of_int ( kind_to_int symbol . si_kind ) ) ) in <nl> - let _ = Sqlite3 . step stmt in <nl> - ( ) <nl> - end ) ; <nl> + begin <nl> + let stmt = Sqlite3 . prepare db sql_insert_symbol in <nl> + List . iter symbols ~ f : ( fun symbol - > begin <nl> + Sqlite3 . reset stmt | > check_rc ; <nl> + Sqlite3 . bind stmt 1 ( Sqlite3 . Data . TEXT symbol . si_name ) | > check_rc ; <nl> + Sqlite3 . bind stmt 2 ( Sqlite3 . Data . INT <nl> + ( Int64 . of_int ( kind_to_int symbol . si_kind ) ) ) | > check_rc ; <nl> + Sqlite3 . step stmt | > check_rc ; <nl> + end ) ; <nl> + Sqlite3 . finalize stmt | > check_rc ; <nl> + end ; <nl> <nl> ( * Finish up * ) <nl> - let _ = Sqlite3 . exec db sql_commit_transaction in <nl> - let _ = Sqlite3 . db_close db in <nl> - ( ) <nl> + Sqlite3 . exec db sql_commit_transaction | > check_rc ; <nl> + if not ( Sqlite3 . db_close db ) then <nl> + failwith ( " Unable to close database " ^ filename ) <nl> ; ; <nl>
Clean up usage of sqlite in writer
facebook/hhvm
6398b61b02569c162b35b9292ee84987415976be
2019-05-15T01:39:19Z
mmm a / modules / planning / optimizer / dp_st_speed / dp_st_graph . cc <nl> ppp b / modules / planning / optimizer / dp_st_speed / dp_st_graph . cc <nl> Status DpStGraph : : GetObjectDecision ( const StGraphData & st_graph_data , <nl> boundary_it ! = obs_boundaries . end ( ) ; + + boundary_it ) { <nl> CHECK_EQ ( boundary_it - > points ( ) . size ( ) , 4 ) ; <nl> <nl> - if ( boundary_it - > points ( ) . front ( ) . x ( ) < = 0 ) { <nl> - ObjectDecisionType yield_decision ; <nl> - if ( ! CreateYieldDecision ( * boundary_it , & yield_decision ) ) { <nl> - AERROR < < " Failed to create yield decision for boundary with id " <nl> - < < boundary_it - > id ( ) ; <nl> - return Status ( ErrorCode : : PLANNING_ERROR , <nl> - " faind to create yield decision " ) ; <nl> - } <nl> - if ( ! path_decision - > AddDecision ( " dp_st_graph " , boundary_it - > id ( ) , <nl> - yield_decision ) ) { <nl> - AERROR < < " Failed to add decision to object " < < boundary_it - > id ( ) ; <nl> - return Status ( ErrorCode : : PLANNING_ERROR , " failed to find object " ) ; <nl> - } <nl> - continue ; <nl> - } <nl> double start_t = 0 . 0 ; <nl> double end_t = 0 . 0 ; <nl> boundary_it - > GetBoundaryTimeScope ( & start_t , & end_t ) ; <nl>
planning : remove an incorrect and redundant logic to determing yield decision in dp_st_graph . cc
ApolloAuto/apollo
7713b572b45a0b0993288c3c11f72923ebb433c2
2017-08-12T00:21:19Z
new file mode 100644 <nl> index 0000000000 . . 114309b749 <nl> mmm / dev / null <nl> ppp b / change / react - native - windows - 2019 - 10 - 07 - 13 - 38 - 26 - jsi - packagebump . json <nl> <nl> + { <nl> + " type " : " prerelease " , <nl> + " comment " : " Bump V8 and Hermes JSI package versions " , <nl> + " packageName " : " react - native - windows " , <nl> + " email " : " tudorm @ microsoft . com " , <nl> + " commit " : " 03687c736e10aa54998572a706c7718ca8342f34 " , <nl> + " date " : " 2019 - 10 - 07T20 : 38 : 25 . 953Z " , <nl> + " file " : " D : \ \ React \ \ react - native - windows \ \ change \ \ react - native - windows - 2019 - 10 - 07 - 13 - 38 - 26 - jsi - packagebump . json " <nl> + } <nl> \ No newline at end of file <nl> mmm a / vnext / ReactUWP / ReactUWP . vcxproj <nl> ppp b / vnext / ReactUWP / ReactUWP . vcxproj <nl> <nl> < ImportGroup Label = " ExtensionTargets " > <nl> < Import Project = " $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) " / > <nl> < Import Project = " $ ( SolutionDir ) packages \ boost_date_time - vc141 . 1 . 68 . 0 . 0 \ build \ boost_date_time - vc141 . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ boost_date_time - vc141 . 1 . 68 . 0 . 0 \ build \ boost_date_time - vc141 . targets ' ) " / > <nl> - < Import Project = " $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> - < Import Project = " $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> + < Import Project = " $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> + < Import Project = " $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> < / ImportGroup > <nl> < Target Name = " EnsureNuGetPackageBuildImports " BeforeTargets = " PrepareForBuild " > <nl> < PropertyGroup > <nl> <nl> < / PropertyGroup > <nl> < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) ) " / > <nl> < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ boost_date_time - vc141 . 1 . 68 . 0 . 0 \ build \ boost_date_time - vc141 . targets ' ) " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ boost_date_time - vc141 . 1 . 68 . 0 . 0 \ build \ boost_date_time - vc141 . targets ' ) ) " / > <nl> - < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) ) " / > <nl> - < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) ) " / > <nl> + < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) ) " / > <nl> + < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) ) " / > <nl> < / Target > <nl> < Target Name = " AfterCppClean " > <nl> < RemoveDir Directories = " $ ( IdlHeaderDirectory ) " ContinueOnError = " true " / > <nl> mmm a / vnext / ReactUWP / packages . config <nl> ppp b / vnext / ReactUWP / packages . config <nl> <nl> < packages > <nl> < package id = " boost " version = " 1 . 68 . 0 . 0 " targetFramework = " native " / > <nl> < package id = " boost_date_time - vc141 " version = " 1 . 68 . 0 . 0 " targetFramework = " native " / > <nl> - < package id = " ReactNative . Hermes . Windows " version = " 0 . 1 . 1 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> - < package id = " ReactNative . V8JSI . Windows " version = " 0 . 1 . 3 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> + < package id = " ReactNative . Hermes . Windows " version = " 0 . 1 . 3 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> + < package id = " ReactNative . V8JSI . Windows " version = " 0 . 1 . 4 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> < / packages > <nl> \ No newline at end of file <nl> mmm a / vnext / ReactWindowsCore / ReactWindowsCore . vcxproj <nl> ppp b / vnext / ReactWindowsCore / ReactWindowsCore . vcxproj <nl> <nl> < PreprocessorDefinitions Condition = " ' $ ( ENABLE_JS_SYSTRACE ) ' = = ' true ' AND ' $ ( OSS_RN ) ' ! = ' true ' " > ENABLE_JS_SYSTRACE ; WITH_FBSYSTRACE ; % ( PreprocessorDefinitions ) < / PreprocessorDefinitions > <nl> < PreprocessorDefinitions Condition = " ' $ ( ENABLE_TRACE_POSTPROCESSING ) ' = = ' true ' AND ' $ ( OSS_RN ) ' ! = ' true ' " > ENABLE_TRACE_POSTPROCESSING ; % ( PreprocessorDefinitions ) < / PreprocessorDefinitions > <nl> < AdditionalIncludeDirectories > $ ( ReactNativeWindowsDir ) Shared ; $ ( ReactNativeWindowsDir ) ; $ ( ReactNativeWindowsDir ) include ; $ ( ReactNativeWindowsDir ) include \ ReactWindowsCore ; $ ( ReactNativeDir ) \ ReactCommon ; $ ( ReactNativeDir ) \ ReactCommon \ jsi ; $ ( ReactNativeWindowsDir ) stubs ; $ ( FollyDir ) ; $ ( ReactNativeWindowsDir ) \ ReactWindowsCore \ tracing ; $ ( ProjectDir ) ; $ ( GeneratedFilesDir ) ; $ ( IntDir ) ; % ( AdditionalIncludeDirectories ) < / AdditionalIncludeDirectories > <nl> - < AdditionalIncludeDirectories Condition = " ' $ ( USE_HERMES ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ installed \ $ ( VcpkgTriplet ) \ include \ ; % ( AdditionalIncludeDirectories ) < / AdditionalIncludeDirectories > <nl> - < AdditionalIncludeDirectories Condition = " ' $ ( USE_V8 ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ installed \ $ ( VcpkgTriplet ) \ include \ ; % ( AdditionalIncludeDirectories ) < / AdditionalIncludeDirectories > <nl> + < AdditionalIncludeDirectories Condition = " ' $ ( USE_HERMES ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ installed \ $ ( VcpkgTriplet ) \ include \ ; % ( AdditionalIncludeDirectories ) < / AdditionalIncludeDirectories > <nl> + < AdditionalIncludeDirectories Condition = " ' $ ( USE_V8 ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ installed \ $ ( VcpkgTriplet ) \ include \ ; % ( AdditionalIncludeDirectories ) < / AdditionalIncludeDirectories > <nl> < PreprocessToFile > false < / PreprocessToFile > <nl> < RuntimeTypeInfo Condition = " ' $ ( Configuration ) | $ ( Platform ) ' = = ' Debug | Win32 ' " > false < / RuntimeTypeInfo > <nl> < RuntimeTypeInfo Condition = " ' $ ( Configuration ) | $ ( Platform ) ' = = ' Debug | x64 ' " > false < / RuntimeTypeInfo > <nl> <nl> < GenerateWindowsMetadata > false < / GenerateWindowsMetadata > <nl> < / Link > <nl> < Lib > <nl> - < AdditionalLibraryDirectories Condition = " ' $ ( USE_HERMES ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ installed \ $ ( VcpkgTriplet ) \ lib ; % ( AdditionalLibraryDirectories ) < / AdditionalLibraryDirectories > <nl> - < AdditionalLibraryDirectories Condition = " ' $ ( USE_V8 ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ installed \ $ ( VcpkgTriplet ) \ lib ; % ( AdditionalLibraryDirectories ) < / AdditionalLibraryDirectories > <nl> + < AdditionalLibraryDirectories Condition = " ' $ ( USE_HERMES ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ installed \ $ ( VcpkgTriplet ) \ lib ; % ( AdditionalLibraryDirectories ) < / AdditionalLibraryDirectories > <nl> + < AdditionalLibraryDirectories Condition = " ' $ ( USE_V8 ) ' = = ' true ' " > $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ installed \ $ ( VcpkgTriplet ) \ lib ; % ( AdditionalLibraryDirectories ) < / AdditionalLibraryDirectories > <nl> < AdditionalDependencies Condition = " ' $ ( USE_HERMES ) ' = = ' true ' " > hermes . lib ; % ( AdditionalDependencies ) < / AdditionalDependencies > <nl> < AdditionalDependencies Condition = " ' $ ( USE_V8 ) ' = = ' true ' " > v8jsi . lib ; % ( AdditionalDependencies ) < / AdditionalDependencies > <nl> < / Lib > <nl> <nl> < Import Project = " $ ( VCTargetsPath ) \ Microsoft . Cpp . targets " / > <nl> < ImportGroup Label = " ExtensionTargets " > <nl> < Import Project = " $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) " / > <nl> - < Import Project = " $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> - < Import Project = " $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> + < Import Project = " $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> + < Import Project = " $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets " Condition = " Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> < / ImportGroup > <nl> < Target Name = " EnsureNuGetPackageBuildImports " BeforeTargets = " PrepareForBuild " > <nl> < PropertyGroup > <nl> < ErrorText > This project references NuGet package ( s ) that are missing on this computer . Use NuGet Package Restore to download them . For more information , see http : / / go . microsoft . com / fwlink / ? LinkID = 322105 . The missing file is { 0 } . < / ErrorText > <nl> < / PropertyGroup > <nl> < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ boost . 1 . 68 . 0 . 0 \ build \ boost . targets ' ) ) " / > <nl> - < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 1 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) ) " / > <nl> - < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) ) " / > <nl> + < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . Hermes . Windows . 0 . 1 . 3 \ build \ native \ ReactNative . Hermes . Windows . targets ' ) ) " / > <nl> + < Error Condition = " ! Exists ( ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) AND ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " Text = " $ ( [ System . String ] : : Format ( ' $ ( ErrorText ) ' , ' $ ( SolutionDir ) packages \ ReactNative . V8JSI . Windows . 0 . 1 . 4 \ build \ native \ ReactNative . V8JSI . Windows . targets ' ) ) " / > <nl> < / Target > <nl> < Target Name = " EnsureReactNativewindowsDir " BeforeTargets = " PrepareForBuild " > <nl> < Error Condition = " ' $ ( MSBuildThisFileDirectory ) ' ! = ' $ ( ReactNativeWindowsDir ) ReactWindowsCore \ ' " Text = " The value of ReactNativeWindowsDir should be the actual location of the package , not a symlink . & # xD ; & # xA ; MSBuildThisFileDirectory : ' $ ( MSBuildThisFileDirectory ) ' - ReactNativeWindowsDir : ' $ ( ReactNativeWindowsDir ) ReactWindowsCore \ ' ' " / > <nl> mmm a / vnext / ReactWindowsCore / packages . config <nl> ppp b / vnext / ReactWindowsCore / packages . config <nl> <nl>  < ? xml version = " 1 . 0 " encoding = " utf - 8 " ? > <nl> < packages > <nl> < package id = " boost " version = " 1 . 68 . 0 . 0 " targetFramework = " native " / > <nl> - < package id = " ReactNative . Hermes . Windows " version = " 0 . 1 . 1 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> - < package id = " ReactNative . V8JSI . Windows " version = " 0 . 1 . 3 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> + < package id = " ReactNative . Hermes . Windows " version = " 0 . 1 . 3 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_HERMES ) ' = = ' true ' " / > <nl> + < package id = " ReactNative . V8JSI . Windows " version = " 0 . 1 . 4 " targetFramework = " native " Condition = " ' $ ( OSS_RN ) ' ! = ' true ' AND ' $ ( USE_V8 ) ' = = ' true ' " / > <nl> < / packages > <nl> \ No newline at end of file <nl> mmm a / vnext / Shared / OInstance . cpp <nl> ppp b / vnext / Shared / OInstance . cpp <nl> InstanceImpl : : InstanceImpl ( <nl> m_devSettings - > jsiRuntimeHolder = <nl> std : : make_shared < facebook : : react : : V8JSIRuntimeHolder > ( <nl> m_devSettings , <nl> - jsQueue , <nl> + m_jsThread , <nl> std : : move ( scriptStore ) , <nl> std : : move ( preparedScriptStore ) ) ; <nl> break ; <nl> InstanceImpl : : InstanceImpl ( <nl> default : / / TODO : Add other engines once supported <nl> m_devSettings - > jsiRuntimeHolder = <nl> std : : make_shared < Microsoft : : JSI : : ChakraRuntimeHolder > ( <nl> - m_devSettings , jsQueue , nullptr , nullptr ) ; <nl> + m_devSettings , m_jsThread , nullptr , nullptr ) ; <nl> break ; <nl> } <nl> jsef = std : : make_shared < OJSIExecutorFactory > ( <nl>
Bump V8 and Hermes package versions ( )
microsoft/react-native-windows
f205027a004708b2bcfbbdd5a7f782775ba672a8
2019-10-07T21:41:38Z
mmm a / aten / src / ATen / core / Error . cpp <nl> ppp b / aten / src / ATen / core / Error . cpp <nl> <nl> # include < ATen / core / Backtrace . h > <nl> <nl> # include < iostream > <nl> + # include < numeric > <nl> # include < string > <nl> <nl> namespace at { <nl> + <nl> + namespace detail { <nl> + <nl> + std : : string StripBasename ( const std : : string & full_path ) { <nl> + const char kSeparator = ' / ' ; <nl> + size_t pos = full_path . rfind ( kSeparator ) ; <nl> + if ( pos ! = std : : string : : npos ) { <nl> + return full_path . substr ( pos + 1 , std : : string : : npos ) ; <nl> + } else { <nl> + return full_path ; <nl> + } <nl> + } <nl> + <nl> + } / / namespace detail <nl> + <nl> std : : ostream & operator < < ( std : : ostream & out , const SourceLocation & loc ) { <nl> out < < loc . function < < " at " < < loc . file < < " : " < < loc . line ; <nl> return out ; <nl> } <nl> <nl> - Error : : Error ( SourceLocation source_location , std : : string err ) <nl> - : what_without_backtrace_ ( err ) , <nl> - what_ ( <nl> - str ( err , <nl> - " ( " , <nl> + Error : : Error ( <nl> + const std : : string & new_msg , <nl> + const std : : string & backtrace , <nl> + const void * caller ) <nl> + : msg_stack_ { new_msg } , backtrace_ ( backtrace ) , caller_ ( caller ) { <nl> + msg_ = msg ( ) ; <nl> + msg_without_backtrace_ = msg_without_backtrace ( ) ; <nl> + } <nl> + <nl> + / / PyTorch - style error message <nl> + Error : : Error ( SourceLocation source_location , const std : : string & msg ) <nl> + : Error ( <nl> + msg , <nl> + str ( " ( " , <nl> source_location , <nl> " ) \ n " , <nl> get_backtrace ( / * frames_to_skip = * / 2 ) ) ) { } <nl> <nl> + / / Caffe2 - style error message <nl> + Error : : Error ( <nl> + const char * file , <nl> + const int line , <nl> + const char * condition , <nl> + const std : : string & msg , <nl> + const void * caller ) <nl> + : Error ( <nl> + str ( " [ enforce fail at " , <nl> + detail : : StripBasename ( file ) , <nl> + " : " , <nl> + line , <nl> + " ] " , <nl> + condition , <nl> + " . " , <nl> + msg ) , <nl> + str ( " \ n " , get_backtrace ( / * frames_to_skip = * / 2 ) ) , <nl> + caller ) { } <nl> + <nl> + std : : string Error : : msg ( ) const { <nl> + return std : : accumulate ( <nl> + msg_stack_ . begin ( ) , msg_stack_ . end ( ) , std : : string ( " " ) ) + <nl> + backtrace_ ; <nl> + } <nl> + <nl> + std : : string Error : : msg_without_backtrace ( ) const { <nl> + return std : : accumulate ( msg_stack_ . begin ( ) , msg_stack_ . end ( ) , std : : string ( " " ) ) ; <nl> + } <nl> + <nl> + void Error : : AppendMessage ( const std : : string & new_msg ) { <nl> + msg_stack_ . push_back ( new_msg ) ; <nl> + / / Refresh the cache <nl> + / / TODO : Calling AppendMessage O ( n ) times has O ( n ^ 2 ) cost . We can fix <nl> + / / this perf problem by populating the fields lazily . . . if this ever <nl> + / / actually is a problem . <nl> + msg_ = msg ( ) ; <nl> + msg_without_backtrace_ = msg_without_backtrace ( ) ; <nl> + } <nl> + <nl> void Warning : : warn ( SourceLocation source_location , std : : string msg ) { <nl> warning_handler_ ( source_location , msg . c_str ( ) ) ; <nl> } <nl> mmm a / aten / src / ATen / core / Error . h <nl> ppp b / aten / src / ATen / core / Error . h <nl> <nl> # include < ostream > <nl> # include < sstream > <nl> # include < string > <nl> + # include < vector > <nl> <nl> # if defined ( _MSC_VER ) & & _MSC_VER < = 1900 <nl> # define __func__ __FUNCTION__ <nl> namespace at { <nl> <nl> namespace detail { <nl> <nl> + / / Obtains the base name from a full path . <nl> + std : : string StripBasename ( const std : : string & full_path ) ; <nl> + <nl> inline std : : ostream & _str ( std : : ostream & ss ) { <nl> return ss ; <nl> } <nl> std : : ostream & operator < < ( std : : ostream & out , const SourceLocation & loc ) ; <nl> / / / NB : at : : Error is handled specially by the default torch to suppress the <nl> / / / backtrace , see torch / csrc / Exceptions . h <nl> class AT_CORE_API Error : public std : : exception { <nl> - std : : string what_without_backtrace_ ; <nl> - std : : string what_ ; <nl> + std : : vector < std : : string > msg_stack_ ; <nl> + std : : string backtrace_ ; <nl> + <nl> + / / These two are derived fields from msg_stack_ and backtrace_ , but we need <nl> + / / fields for the strings so that we can return a const char * ( as the <nl> + / / signature of std : : exception requires ) . <nl> + std : : string msg_ ; <nl> + std : : string msg_without_backtrace_ ; <nl> + <nl> + / / This is a little debugging trick : you can stash a relevant pointer <nl> + / / in caller , and then when you catch the exception , you can compare <nl> + / / against pointers you have on hand to get more information about <nl> + / / where the exception came from . In Caffe2 , this is used to figure <nl> + / / out which operator raised an exception . <nl> + const void * caller_ ; <nl> <nl> public : <nl> - Error ( SourceLocation source_location , std : : string err ) ; <nl> + Error ( <nl> + const std : : string & msg , <nl> + const std : : string & backtrace , <nl> + const void * caller = nullptr ) ; <nl> + Error ( SourceLocation source_location , const std : : string & msg ) ; <nl> + Error ( <nl> + const char * file , <nl> + const int line , <nl> + const char * condition , <nl> + const std : : string & msg , <nl> + const void * caller ) ; <nl> + <nl> + void AppendMessage ( const std : : string & msg ) ; <nl> + <nl> + / / Compute the full message from msg_ and msg_without_backtrace_ <nl> + / / TODO : Maybe this should be private <nl> + std : : string msg ( ) const ; <nl> + std : : string msg_without_backtrace ( ) const ; <nl> + <nl> + const std : : vector < std : : string > & msg_stack ( ) const { <nl> + return msg_stack_ ; <nl> + } <nl> <nl> / / / Returns the complete error message , including the source location . <nl> const char * what ( ) const noexcept override { <nl> - return what_ . c_str ( ) ; <nl> + return msg_ . c_str ( ) ; <nl> + } <nl> + <nl> + const void * caller ( ) const noexcept { <nl> + return caller_ ; <nl> } <nl> <nl> / / / Returns only the error message string , without source location . <nl> const char * what_without_backtrace ( ) const noexcept { <nl> - return what_without_backtrace_ . c_str ( ) ; <nl> + return msg_without_backtrace_ . c_str ( ) ; <nl> } <nl> } ; <nl> <nl> mmm a / caffe2 / core / logging . cc <nl> ppp b / caffe2 / core / logging . cc <nl> CAFFE2_DEFINE_bool ( caffe2_use_fatal_for_enforce , false , <nl> " of throwing an exception . " ) ; <nl> <nl> namespace caffe2 { <nl> - std : : string StripBasename ( const std : : string & full_path ) { <nl> - const char kSeparator = ' / ' ; <nl> - size_t pos = full_path . rfind ( kSeparator ) ; <nl> - if ( pos ! = std : : string : : npos ) { <nl> - return full_path . substr ( pos + 1 , std : : string : : npos ) ; <nl> - } else { <nl> - return full_path ; <nl> - } <nl> - } <nl> <nl> size_t ReplaceAll ( string & s , const char * from , const char * to ) { <nl> CAFFE_ENFORCE ( from & & * from ) ; <nl> EnforceNotMet : : EnforceNotMet ( <nl> const void * caller ) <nl> : msg_stack_ { MakeString ( <nl> " [ enforce fail at " , <nl> - StripBasename ( std : : string ( file ) ) , <nl> + at : : detail : : StripBasename ( std : : string ( file ) ) , <nl> " : " , <nl> line , <nl> " ] " , <nl> MessageLogger : : MessageLogger ( const char * file , int line , int severity ) <nl> / / < < " : " < < std : : setw ( 2 ) < < timeinfo - > tm_min <nl> / / < < " : " < < std : : setw ( 2 ) < < timeinfo - > tm_sec <nl> / / < < " . " < < std : : setw ( 9 ) < < ns . count ( ) % 1000000000 <nl> - < < " " < < StripBasename ( std : : string ( file ) ) < < " : " < < line < < " ] " ; <nl> + < < " " < < at : : detail : : StripBasename ( std : : string ( file ) ) < < " : " < < line < < " ] " ; <nl> } <nl> <nl> / / Output the contents of the stream to the proper channel on destruction . <nl> mmm a / caffe2 / core / logging . h <nl> ppp b / caffe2 / core / logging . h <nl> <nl> # include < limits > <nl> # include < sstream > <nl> <nl> + # include < ATen / core / Error . h > <nl> # include " caffe2 / core / flags . h " <nl> # include " caffe2 / proto / caffe2 . pb . h " <nl> <nl> inline string Join ( const string & delimiter , const Container & v ) { <nl> return s . str ( ) ; <nl> } <nl> <nl> - / / Obtains the base name from a full path . <nl> - string StripBasename ( const std : : string & full_path ) ; <nl> - <nl> / / Replace all occurrences of " from " substring to " to " string . <nl> / / Returns number of replacements <nl> size_t ReplaceAll ( string & s , const char * from , const char * to ) ; <nl>
Make at : : Error look more like caffe2 : : EnforceNotMet ( )
pytorch/pytorch
e2976ea5196731674ea9281dc63353e939b7ba47
2018-08-04T02:25:03Z
mmm a / lib / IRGen / GenClangType . cpp <nl> ppp b / lib / IRGen / GenClangType . cpp <nl> GenClangType : : visitBoundGenericType ( CanBoundGenericType type ) { <nl> / / The first two are structs ; the last is an enum . <nl> if ( auto underlyingTy = type - > getAnyOptionalObjectType ( ) ) { <nl> assert ( ( underlyingTy - > is < FunctionType > ( ) | | <nl> + underlyingTy - > is < DynamicSelfType > ( ) | | <nl> underlyingTy - > getClassOrBoundGenericClass ( ) | | <nl> underlyingTy - > isClassExistentialType ( ) ) & & <nl> " Unexpected optional type in Clang type generation ! " ) ; <nl>
When looking through optional types in clang - type IRGen ,
apple/swift
e77c9e835d44211766ad570ebacf7dca0824ce1d
2014-03-01T00:26:39Z
mmm a / src / compiler / wasm - compiler . cc <nl> ppp b / src / compiler / wasm - compiler . cc <nl> Node * WasmGraphBuilder : : TableSet ( uint32_t table_index , Node * index , Node * val , <nl> } <nl> <nl> Node * WasmGraphBuilder : : CheckBoundsAndAlignment ( <nl> - uint8_t access_size , Node * index , uint32_t offset , <nl> + int8_t access_size , Node * index , uint32_t offset , <nl> wasm : : WasmCodePosition position ) { <nl> / / Atomic operations need bounds checks until the backend can emit protected <nl> / / loads . <nl> Node * WasmGraphBuilder : : Simd8x16ShuffleOp ( const uint8_t shuffle [ 16 ] , <nl> inputs [ 0 ] , inputs [ 1 ] ) ; <nl> } <nl> <nl> - # define ATOMIC_BINOP_LIST ( V ) \ <nl> - V ( I32AtomicAdd , Add , Uint32 , Word32 ) \ <nl> - V ( I64AtomicAdd , Add , Uint64 , Word64 ) \ <nl> - V ( I32AtomicAdd8U , Add , Uint8 , Word32 ) \ <nl> - V ( I32AtomicAdd16U , Add , Uint16 , Word32 ) \ <nl> - V ( I64AtomicAdd8U , Add , Uint8 , Word64 ) \ <nl> - V ( I64AtomicAdd16U , Add , Uint16 , Word64 ) \ <nl> - V ( I64AtomicAdd32U , Add , Uint32 , Word64 ) \ <nl> - V ( I32AtomicSub , Sub , Uint32 , Word32 ) \ <nl> - V ( I64AtomicSub , Sub , Uint64 , Word64 ) \ <nl> - V ( I32AtomicSub8U , Sub , Uint8 , Word32 ) \ <nl> - V ( I32AtomicSub16U , Sub , Uint16 , Word32 ) \ <nl> - V ( I64AtomicSub8U , Sub , Uint8 , Word64 ) \ <nl> - V ( I64AtomicSub16U , Sub , Uint16 , Word64 ) \ <nl> - V ( I64AtomicSub32U , Sub , Uint32 , Word64 ) \ <nl> - V ( I32AtomicAnd , And , Uint32 , Word32 ) \ <nl> - V ( I64AtomicAnd , And , Uint64 , Word64 ) \ <nl> - V ( I32AtomicAnd8U , And , Uint8 , Word32 ) \ <nl> - V ( I64AtomicAnd16U , And , Uint16 , Word64 ) \ <nl> - V ( I32AtomicAnd16U , And , Uint16 , Word32 ) \ <nl> - V ( I64AtomicAnd8U , And , Uint8 , Word64 ) \ <nl> - V ( I64AtomicAnd32U , And , Uint32 , Word64 ) \ <nl> - V ( I32AtomicOr , Or , Uint32 , Word32 ) \ <nl> - V ( I64AtomicOr , Or , Uint64 , Word64 ) \ <nl> - V ( I32AtomicOr8U , Or , Uint8 , Word32 ) \ <nl> - V ( I32AtomicOr16U , Or , Uint16 , Word32 ) \ <nl> - V ( I64AtomicOr8U , Or , Uint8 , Word64 ) \ <nl> - V ( I64AtomicOr16U , Or , Uint16 , Word64 ) \ <nl> - V ( I64AtomicOr32U , Or , Uint32 , Word64 ) \ <nl> - V ( I32AtomicXor , Xor , Uint32 , Word32 ) \ <nl> - V ( I64AtomicXor , Xor , Uint64 , Word64 ) \ <nl> - V ( I32AtomicXor8U , Xor , Uint8 , Word32 ) \ <nl> - V ( I32AtomicXor16U , Xor , Uint16 , Word32 ) \ <nl> - V ( I64AtomicXor8U , Xor , Uint8 , Word64 ) \ <nl> - V ( I64AtomicXor16U , Xor , Uint16 , Word64 ) \ <nl> - V ( I64AtomicXor32U , Xor , Uint32 , Word64 ) \ <nl> - V ( I32AtomicExchange , Exchange , Uint32 , Word32 ) \ <nl> - V ( I64AtomicExchange , Exchange , Uint64 , Word64 ) \ <nl> - V ( I32AtomicExchange8U , Exchange , Uint8 , Word32 ) \ <nl> - V ( I32AtomicExchange16U , Exchange , Uint16 , Word32 ) \ <nl> - V ( I64AtomicExchange8U , Exchange , Uint8 , Word64 ) \ <nl> - V ( I64AtomicExchange16U , Exchange , Uint16 , Word64 ) \ <nl> - V ( I64AtomicExchange32U , Exchange , Uint32 , Word64 ) <nl> - <nl> - # define ATOMIC_CMP_EXCHG_LIST ( V ) \ <nl> - V ( I32AtomicCompareExchange , Uint32 , Word32 ) \ <nl> - V ( I64AtomicCompareExchange , Uint64 , Word64 ) \ <nl> - V ( I32AtomicCompareExchange8U , Uint8 , Word32 ) \ <nl> - V ( I32AtomicCompareExchange16U , Uint16 , Word32 ) \ <nl> - V ( I64AtomicCompareExchange8U , Uint8 , Word64 ) \ <nl> - V ( I64AtomicCompareExchange16U , Uint16 , Word64 ) \ <nl> - V ( I64AtomicCompareExchange32U , Uint32 , Word64 ) <nl> - <nl> - # define ATOMIC_LOAD_LIST ( V ) \ <nl> - V ( I32AtomicLoad , Uint32 , Word32 ) \ <nl> - V ( I64AtomicLoad , Uint64 , Word64 ) \ <nl> - V ( I32AtomicLoad8U , Uint8 , Word32 ) \ <nl> - V ( I32AtomicLoad16U , Uint16 , Word32 ) \ <nl> - V ( I64AtomicLoad8U , Uint8 , Word64 ) \ <nl> - V ( I64AtomicLoad16U , Uint16 , Word64 ) \ <nl> - V ( I64AtomicLoad32U , Uint32 , Word64 ) <nl> - <nl> - # define ATOMIC_STORE_LIST ( V ) \ <nl> - V ( I32AtomicStore , Uint32 , kWord32 , Word32 ) \ <nl> - V ( I64AtomicStore , Uint64 , kWord64 , Word64 ) \ <nl> - V ( I32AtomicStore8U , Uint8 , kWord8 , Word32 ) \ <nl> - V ( I32AtomicStore16U , Uint16 , kWord16 , Word32 ) \ <nl> - V ( I64AtomicStore8U , Uint8 , kWord8 , Word64 ) \ <nl> - V ( I64AtomicStore16U , Uint16 , kWord16 , Word64 ) \ <nl> - V ( I64AtomicStore32U , Uint32 , kWord32 , Word64 ) <nl> - <nl> Node * WasmGraphBuilder : : AtomicOp ( wasm : : WasmOpcode opcode , Node * const * inputs , <nl> uint32_t alignment , uint32_t offset , <nl> wasm : : WasmCodePosition position ) { <nl> - Node * node ; <nl> + struct AtomicOpInfo { <nl> + enum Type : int8_t { <nl> + kNoInput = 0 , <nl> + kOneInput = 1 , <nl> + kTwoInputs = 2 , <nl> + kSpecial <nl> + } ; <nl> + <nl> + using OperatorByType = <nl> + const Operator * ( MachineOperatorBuilder : : * ) ( MachineType ) ; <nl> + using OperatorByRep = <nl> + const Operator * ( MachineOperatorBuilder : : * ) ( MachineRepresentation ) ; <nl> + <nl> + const Type type ; <nl> + const MachineType machine_type ; <nl> + const OperatorByType operator_by_type = nullptr ; <nl> + const OperatorByRep operator_by_rep = nullptr ; <nl> + <nl> + constexpr AtomicOpInfo ( Type t , MachineType m , OperatorByType o ) <nl> + : type ( t ) , machine_type ( m ) , operator_by_type ( o ) { } <nl> + constexpr AtomicOpInfo ( Type t , MachineType m , OperatorByRep o ) <nl> + : type ( t ) , machine_type ( m ) , operator_by_rep ( o ) { } <nl> + <nl> + / / Constexpr , hence just a table lookup in most compilers . <nl> + static constexpr AtomicOpInfo Get ( wasm : : WasmOpcode opcode ) { <nl> + switch ( opcode ) { <nl> + # define CASE ( Name , Type , MachType , Op ) \ <nl> + case wasm : : kExpr # # Name : \ <nl> + return { Type , MachineType : : MachType ( ) , & MachineOperatorBuilder : : Op } ; <nl> + <nl> + / / Binops . <nl> + CASE ( I32AtomicAdd , kOneInput , Uint32 , Word32AtomicAdd ) <nl> + CASE ( I64AtomicAdd , kOneInput , Uint64 , Word64AtomicAdd ) <nl> + CASE ( I32AtomicAdd8U , kOneInput , Uint8 , Word32AtomicAdd ) <nl> + CASE ( I32AtomicAdd16U , kOneInput , Uint16 , Word32AtomicAdd ) <nl> + CASE ( I64AtomicAdd8U , kOneInput , Uint8 , Word64AtomicAdd ) <nl> + CASE ( I64AtomicAdd16U , kOneInput , Uint16 , Word64AtomicAdd ) <nl> + CASE ( I64AtomicAdd32U , kOneInput , Uint32 , Word64AtomicAdd ) <nl> + CASE ( I32AtomicSub , kOneInput , Uint32 , Word32AtomicSub ) <nl> + CASE ( I64AtomicSub , kOneInput , Uint64 , Word64AtomicSub ) <nl> + CASE ( I32AtomicSub8U , kOneInput , Uint8 , Word32AtomicSub ) <nl> + CASE ( I32AtomicSub16U , kOneInput , Uint16 , Word32AtomicSub ) <nl> + CASE ( I64AtomicSub8U , kOneInput , Uint8 , Word64AtomicSub ) <nl> + CASE ( I64AtomicSub16U , kOneInput , Uint16 , Word64AtomicSub ) <nl> + CASE ( I64AtomicSub32U , kOneInput , Uint32 , Word64AtomicSub ) <nl> + CASE ( I32AtomicAnd , kOneInput , Uint32 , Word32AtomicAnd ) <nl> + CASE ( I64AtomicAnd , kOneInput , Uint64 , Word64AtomicAnd ) <nl> + CASE ( I32AtomicAnd8U , kOneInput , Uint8 , Word32AtomicAnd ) <nl> + CASE ( I32AtomicAnd16U , kOneInput , Uint16 , Word32AtomicAnd ) <nl> + CASE ( I64AtomicAnd8U , kOneInput , Uint8 , Word64AtomicAnd ) <nl> + CASE ( I64AtomicAnd16U , kOneInput , Uint16 , Word64AtomicAnd ) <nl> + CASE ( I64AtomicAnd32U , kOneInput , Uint32 , Word64AtomicAnd ) <nl> + CASE ( I32AtomicOr , kOneInput , Uint32 , Word32AtomicOr ) <nl> + CASE ( I64AtomicOr , kOneInput , Uint64 , Word64AtomicOr ) <nl> + CASE ( I32AtomicOr8U , kOneInput , Uint8 , Word32AtomicOr ) <nl> + CASE ( I32AtomicOr16U , kOneInput , Uint16 , Word32AtomicOr ) <nl> + CASE ( I64AtomicOr8U , kOneInput , Uint8 , Word64AtomicOr ) <nl> + CASE ( I64AtomicOr16U , kOneInput , Uint16 , Word64AtomicOr ) <nl> + CASE ( I64AtomicOr32U , kOneInput , Uint32 , Word64AtomicOr ) <nl> + CASE ( I32AtomicXor , kOneInput , Uint32 , Word32AtomicXor ) <nl> + CASE ( I64AtomicXor , kOneInput , Uint64 , Word64AtomicXor ) <nl> + CASE ( I32AtomicXor8U , kOneInput , Uint8 , Word32AtomicXor ) <nl> + CASE ( I32AtomicXor16U , kOneInput , Uint16 , Word32AtomicXor ) <nl> + CASE ( I64AtomicXor8U , kOneInput , Uint8 , Word64AtomicXor ) <nl> + CASE ( I64AtomicXor16U , kOneInput , Uint16 , Word64AtomicXor ) <nl> + CASE ( I64AtomicXor32U , kOneInput , Uint32 , Word64AtomicXor ) <nl> + CASE ( I32AtomicExchange , kOneInput , Uint32 , Word32AtomicExchange ) <nl> + CASE ( I64AtomicExchange , kOneInput , Uint64 , Word64AtomicExchange ) <nl> + CASE ( I32AtomicExchange8U , kOneInput , Uint8 , Word32AtomicExchange ) <nl> + CASE ( I32AtomicExchange16U , kOneInput , Uint16 , Word32AtomicExchange ) <nl> + CASE ( I64AtomicExchange8U , kOneInput , Uint8 , Word64AtomicExchange ) <nl> + CASE ( I64AtomicExchange16U , kOneInput , Uint16 , Word64AtomicExchange ) <nl> + CASE ( I64AtomicExchange32U , kOneInput , Uint32 , Word64AtomicExchange ) <nl> + <nl> + / / Compare - exchange . <nl> + CASE ( I32AtomicCompareExchange , kTwoInputs , Uint32 , <nl> + Word32AtomicCompareExchange ) <nl> + CASE ( I64AtomicCompareExchange , kTwoInputs , Uint64 , <nl> + Word64AtomicCompareExchange ) <nl> + CASE ( I32AtomicCompareExchange8U , kTwoInputs , Uint8 , <nl> + Word32AtomicCompareExchange ) <nl> + CASE ( I32AtomicCompareExchange16U , kTwoInputs , Uint16 , <nl> + Word32AtomicCompareExchange ) <nl> + CASE ( I64AtomicCompareExchange8U , kTwoInputs , Uint8 , <nl> + Word64AtomicCompareExchange ) <nl> + CASE ( I64AtomicCompareExchange16U , kTwoInputs , Uint16 , <nl> + Word64AtomicCompareExchange ) <nl> + CASE ( I64AtomicCompareExchange32U , kTwoInputs , Uint32 , <nl> + Word64AtomicCompareExchange ) <nl> + <nl> + / / Load . <nl> + CASE ( I32AtomicLoad , kNoInput , Uint32 , Word32AtomicLoad ) <nl> + CASE ( I64AtomicLoad , kNoInput , Uint64 , Word64AtomicLoad ) <nl> + CASE ( I32AtomicLoad8U , kNoInput , Uint8 , Word32AtomicLoad ) <nl> + CASE ( I32AtomicLoad16U , kNoInput , Uint16 , Word32AtomicLoad ) <nl> + CASE ( I64AtomicLoad8U , kNoInput , Uint8 , Word64AtomicLoad ) <nl> + CASE ( I64AtomicLoad16U , kNoInput , Uint16 , Word64AtomicLoad ) <nl> + CASE ( I64AtomicLoad32U , kNoInput , Uint32 , Word64AtomicLoad ) <nl> + <nl> + / / Store . <nl> + CASE ( I32AtomicStore , kOneInput , Uint32 , Word32AtomicStore ) <nl> + CASE ( I64AtomicStore , kOneInput , Uint64 , Word64AtomicStore ) <nl> + CASE ( I32AtomicStore8U , kOneInput , Uint8 , Word32AtomicStore ) <nl> + CASE ( I32AtomicStore16U , kOneInput , Uint16 , Word32AtomicStore ) <nl> + CASE ( I64AtomicStore8U , kOneInput , Uint8 , Word64AtomicStore ) <nl> + CASE ( I64AtomicStore16U , kOneInput , Uint16 , Word64AtomicStore ) <nl> + CASE ( I64AtomicStore32U , kOneInput , Uint32 , Word64AtomicStore ) <nl> + <nl> + # undef CASE <nl> + <nl> + case wasm : : kExprAtomicNotify : <nl> + return { kSpecial , MachineType : : Int32 ( ) , OperatorByType { nullptr } } ; <nl> + case wasm : : kExprI32AtomicWait : <nl> + return { kSpecial , MachineType : : Int32 ( ) , OperatorByType { nullptr } } ; <nl> + case wasm : : kExprI64AtomicWait : <nl> + return { kSpecial , MachineType : : Int64 ( ) , OperatorByType { nullptr } } ; <nl> + default : <nl> + # if V8_HAS_CXX14_CONSTEXPR <nl> + UNREACHABLE ( ) ; <nl> + # else <nl> + / / Return something for older GCC . <nl> + return { kSpecial , MachineType : : Int64 ( ) , OperatorByType { nullptr } } ; <nl> + # endif <nl> + } <nl> + } <nl> + } ; <nl> + <nl> + AtomicOpInfo info = AtomicOpInfo : : Get ( opcode ) ; <nl> + <nl> + Node * index = CheckBoundsAndAlignment ( info . machine_type . MemSize ( ) , inputs [ 0 ] , <nl> + offset , position ) ; <nl> + <nl> + if ( info . type ! = AtomicOpInfo : : kSpecial ) { <nl> + const Operator * op = <nl> + info . operator_by_type <nl> + ? ( mcgraph ( ) - > machine ( ) - > * info . operator_by_type ) ( info . machine_type ) <nl> + : ( mcgraph ( ) - > machine ( ) - > * info . operator_by_rep ) ( <nl> + info . machine_type . representation ( ) ) ; <nl> + <nl> + Node * input_nodes [ 6 ] = { MemBuffer ( offset ) , index } ; <nl> + int num_actual_inputs = info . type ; <nl> + std : : copy_n ( inputs + 1 , num_actual_inputs , input_nodes + 2 ) ; <nl> + input_nodes [ num_actual_inputs + 2 ] = effect ( ) ; <nl> + input_nodes [ num_actual_inputs + 3 ] = control ( ) ; <nl> + return gasm_ - > AddNode ( <nl> + graph ( ) - > NewNode ( op , num_actual_inputs + 4 , input_nodes ) ) ; <nl> + } <nl> + <nl> + / / After we ' ve bounds - checked , compute the effective address . <nl> + Node * address = graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > Int32Add ( ) , <nl> + Uint32Constant ( offset ) , index ) ; <nl> + <nl> switch ( opcode ) { <nl> - # define BUILD_ATOMIC_BINOP ( Name , Operation , Type , Prefix ) \ <nl> - case wasm : : kExpr # # Name : { \ <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Type ( ) . MemSize ( ) , \ <nl> - inputs [ 0 ] , offset , position ) ; \ <nl> - node = graph ( ) - > NewNode ( \ <nl> - mcgraph ( ) - > machine ( ) - > Prefix # # Atomic # # Operation ( MachineType : : Type ( ) ) , \ <nl> - MemBuffer ( offset ) , index , inputs [ 1 ] , effect ( ) , control ( ) ) ; \ <nl> - break ; \ <nl> - } <nl> - ATOMIC_BINOP_LIST ( BUILD_ATOMIC_BINOP ) <nl> - # undef BUILD_ATOMIC_BINOP <nl> - <nl> - # define BUILD_ATOMIC_CMP_EXCHG ( Name , Type , Prefix ) \ <nl> - case wasm : : kExpr # # Name : { \ <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Type ( ) . MemSize ( ) , \ <nl> - inputs [ 0 ] , offset , position ) ; \ <nl> - node = graph ( ) - > NewNode ( \ <nl> - mcgraph ( ) - > machine ( ) - > Prefix # # AtomicCompareExchange ( \ <nl> - MachineType : : Type ( ) ) , \ <nl> - MemBuffer ( offset ) , index , inputs [ 1 ] , inputs [ 2 ] , effect ( ) , control ( ) ) ; \ <nl> - break ; \ <nl> - } <nl> - ATOMIC_CMP_EXCHG_LIST ( BUILD_ATOMIC_CMP_EXCHG ) <nl> - # undef BUILD_ATOMIC_CMP_EXCHG <nl> - <nl> - # define BUILD_ATOMIC_LOAD_OP ( Name , Type , Prefix ) \ <nl> - case wasm : : kExpr # # Name : { \ <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Type ( ) . MemSize ( ) , \ <nl> - inputs [ 0 ] , offset , position ) ; \ <nl> - node = graph ( ) - > NewNode ( \ <nl> - mcgraph ( ) - > machine ( ) - > Prefix # # AtomicLoad ( MachineType : : Type ( ) ) , \ <nl> - MemBuffer ( offset ) , index , effect ( ) , control ( ) ) ; \ <nl> - break ; \ <nl> - } <nl> - ATOMIC_LOAD_LIST ( BUILD_ATOMIC_LOAD_OP ) <nl> - # undef BUILD_ATOMIC_LOAD_OP <nl> - <nl> - # define BUILD_ATOMIC_STORE_OP ( Name , Type , Rep , Prefix ) \ <nl> - case wasm : : kExpr # # Name : { \ <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Type ( ) . MemSize ( ) , \ <nl> - inputs [ 0 ] , offset , position ) ; \ <nl> - node = graph ( ) - > NewNode ( \ <nl> - mcgraph ( ) - > machine ( ) - > Prefix # # AtomicStore ( MachineRepresentation : : Rep ) , \ <nl> - MemBuffer ( offset ) , index , inputs [ 1 ] , effect ( ) , control ( ) ) ; \ <nl> - break ; \ <nl> - } <nl> - ATOMIC_STORE_LIST ( BUILD_ATOMIC_STORE_OP ) <nl> - # undef BUILD_ATOMIC_STORE_OP <nl> case wasm : : kExprAtomicNotify : { <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Uint32 ( ) . MemSize ( ) , <nl> - inputs [ 0 ] , offset , position ) ; <nl> - / / Now that we ' ve bounds - checked , compute the effective address . <nl> - Node * address = graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > Int32Add ( ) , <nl> - Uint32Constant ( offset ) , index ) ; <nl> - WasmAtomicNotifyDescriptor interface_descriptor ; <nl> - auto call_descriptor = Linkage : : GetStubCallDescriptor ( <nl> - mcgraph ( ) - > zone ( ) , interface_descriptor , <nl> - interface_descriptor . GetStackParameterCount ( ) , <nl> - CallDescriptor : : kNoFlags , Operator : : kNoProperties , <nl> - StubCallMode : : kCallWasmRuntimeStub ) ; <nl> + auto * call_descriptor = <nl> + GetBuiltinCallDescriptor < WasmAtomicNotifyDescriptor > ( <nl> + this , StubCallMode : : kCallWasmRuntimeStub ) ; <nl> Node * call_target = mcgraph ( ) - > RelocatableIntPtrConstant ( <nl> wasm : : WasmCode : : kWasmAtomicNotify , RelocInfo : : WASM_STUB_CALL ) ; <nl> - node = graph ( ) - > NewNode ( mcgraph ( ) - > common ( ) - > Call ( call_descriptor ) , <nl> - call_target , address , inputs [ 1 ] , effect ( ) , <nl> - control ( ) ) ; <nl> - break ; <nl> + return gasm_ - > Call ( call_descriptor , call_target , address , inputs [ 1 ] ) ; <nl> } <nl> <nl> case wasm : : kExprI32AtomicWait : { <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Uint32 ( ) . MemSize ( ) , <nl> - inputs [ 0 ] , offset , position ) ; <nl> - / / Now that we ' ve bounds - checked , compute the effective address . <nl> - Node * address = graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > Int32Add ( ) , <nl> - Uint32Constant ( offset ) , index ) ; <nl> - <nl> - auto call_descriptor = GetI32AtomicWaitCallDescriptor ( ) ; <nl> + auto * call_descriptor = GetI32AtomicWaitCallDescriptor ( ) ; <nl> <nl> intptr_t target = mcgraph ( ) - > machine ( ) - > Is64 ( ) <nl> ? wasm : : WasmCode : : kWasmI32AtomicWait64 <nl> Node * WasmGraphBuilder : : AtomicOp ( wasm : : WasmOpcode opcode , Node * const * inputs , <nl> Node * call_target = mcgraph ( ) - > RelocatableIntPtrConstant ( <nl> target , RelocInfo : : WASM_STUB_CALL ) ; <nl> <nl> - node = graph ( ) - > NewNode ( mcgraph ( ) - > common ( ) - > Call ( call_descriptor ) , <nl> - call_target , address , inputs [ 1 ] , inputs [ 2 ] , <nl> - effect ( ) , control ( ) ) ; <nl> - break ; <nl> + return gasm_ - > Call ( call_descriptor , call_target , address , inputs [ 1 ] , <nl> + inputs [ 2 ] ) ; <nl> } <nl> <nl> case wasm : : kExprI64AtomicWait : { <nl> - Node * index = CheckBoundsAndAlignment ( MachineType : : Uint64 ( ) . MemSize ( ) , <nl> - inputs [ 0 ] , offset , position ) ; <nl> - / / Now that we ' ve bounds - checked , compute the effective address . <nl> - Node * address = graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > Int32Add ( ) , <nl> - Uint32Constant ( offset ) , index ) ; <nl> - <nl> - CallDescriptor * call_descriptor = GetI64AtomicWaitCallDescriptor ( ) ; <nl> + auto * call_descriptor = GetI64AtomicWaitCallDescriptor ( ) ; <nl> <nl> intptr_t target = mcgraph ( ) - > machine ( ) - > Is64 ( ) <nl> ? wasm : : WasmCode : : kWasmI64AtomicWait64 <nl> Node * WasmGraphBuilder : : AtomicOp ( wasm : : WasmOpcode opcode , Node * const * inputs , <nl> Node * call_target = mcgraph ( ) - > RelocatableIntPtrConstant ( <nl> target , RelocInfo : : WASM_STUB_CALL ) ; <nl> <nl> - node = graph ( ) - > NewNode ( mcgraph ( ) - > common ( ) - > Call ( call_descriptor ) , <nl> - call_target , address , inputs [ 1 ] , inputs [ 2 ] , <nl> - effect ( ) , control ( ) ) ; <nl> - break ; <nl> + return gasm_ - > Call ( call_descriptor , call_target , address , inputs [ 1 ] , <nl> + inputs [ 2 ] ) ; <nl> } <nl> <nl> default : <nl> FATAL_UNSUPPORTED_OPCODE ( opcode ) ; <nl> } <nl> - return SetEffect ( node ) ; <nl> } <nl> <nl> Node * WasmGraphBuilder : : AtomicFence ( ) { <nl> Node * WasmGraphBuilder : : AtomicFence ( ) { <nl> effect ( ) , control ( ) ) ) ; <nl> } <nl> <nl> - # undef ATOMIC_BINOP_LIST <nl> - # undef ATOMIC_CMP_EXCHG_LIST <nl> - # undef ATOMIC_LOAD_LIST <nl> - # undef ATOMIC_STORE_LIST <nl> - <nl> Node * WasmGraphBuilder : : MemoryInit ( uint32_t data_segment_index , Node * dst , <nl> Node * src , Node * size , <nl> wasm : : WasmCodePosition position ) { <nl> mmm a / src / compiler / wasm - compiler . h <nl> ppp b / src / compiler / wasm - compiler . h <nl> class WasmGraphBuilder { <nl> / / partially out - of - bounds , traps if it is completely out - of - bounds . <nl> Node * BoundsCheckMemRange ( Node * * start , Node * * size , wasm : : WasmCodePosition ) ; <nl> <nl> - Node * CheckBoundsAndAlignment ( uint8_t access_size , Node * index , <nl> + Node * CheckBoundsAndAlignment ( int8_t access_size , Node * index , <nl> uint32_t offset , wasm : : WasmCodePosition ) ; <nl> <nl> Node * Uint32ToUintptr ( Node * ) ; <nl>
[ wasm ] Refactor generation of atomic instructions
v8/v8
179f7f435b5956c7bcf25781fc5a7482783a8dc0
2020-10-01T16:37:54Z
mmm a / BUILD . gn <nl> ppp b / BUILD . gn <nl> v8_source_set ( " v8_base " ) { <nl> " src / builtins / builtins - api . cc " , <nl> " src / builtins / builtins - array . cc " , <nl> " src / builtins / builtins - arraybuffer . cc " , <nl> + " src / builtins / builtins - bigint . cc " , <nl> " src / builtins / builtins - boolean . cc " , <nl> " src / builtins / builtins - call . cc " , <nl> " src / builtins / builtins - callsite . cc " , <nl> mmm a / src / bootstrapper . cc <nl> ppp b / src / bootstrapper . cc <nl> EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( harmony_dynamic_import ) <nl> EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( harmony_template_escapes ) <nl> EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( harmony_restrict_constructor_return ) <nl> EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( harmony_strict_legacy_accessor_builtins ) <nl> - EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( harmony_bigint ) <nl> <nl> void InstallPublicSymbol ( Factory * factory , Handle < Context > native_context , <nl> const char * name , Handle < Symbol > value ) { <nl> void Genesis : : InitializeGlobal_harmony_regexp_dotall ( ) { <nl> native_context ( ) - > set_regexp_prototype_map ( * prototype_map ) ; <nl> } <nl> <nl> + void Genesis : : InitializeGlobal_harmony_bigint ( ) { <nl> + if ( ! FLAG_harmony_bigint ) return ; <nl> + <nl> + Handle < JSGlobalObject > global ( native_context ( ) - > global_object ( ) ) ; <nl> + Handle < JSFunction > bigint_fun = InstallFunction ( <nl> + global , " BigInt " , JS_VALUE_TYPE , JSValue : : kSize , <nl> + isolate ( ) - > factory ( ) - > the_hole_value ( ) , Builtins : : kBigIntConstructor ) ; <nl> + bigint_fun - > shared ( ) - > DontAdaptArguments ( ) ; <nl> + bigint_fun - > shared ( ) - > SetConstructStub ( <nl> + * BUILTIN_CODE ( isolate ( ) , BigIntConstructor_ConstructStub ) ) ; <nl> + bigint_fun - > shared ( ) - > set_length ( 1 ) ; <nl> + InstallWithIntrinsicDefaultProto ( isolate ( ) , bigint_fun , <nl> + Context : : BIGINT_FUNCTION_INDEX ) ; <nl> + heap ( ) - > bigint_map ( ) - > SetConstructorFunctionIndex ( <nl> + Context : : BIGINT_FUNCTION_INDEX ) ; <nl> + <nl> + / / Install the properties of the BigInt constructor . <nl> + / / parseInt ( string , radix ) <nl> + SimpleInstallFunction ( bigint_fun , " parseInt " , Builtins : : kBigIntParseInt , 2 , <nl> + false ) ; <nl> + / / asUintN ( bits , bigint ) <nl> + SimpleInstallFunction ( bigint_fun , " asUintN " , Builtins : : kBigIntAsUintN , 2 , <nl> + false ) ; <nl> + / / asIntN ( bits , bigint ) <nl> + SimpleInstallFunction ( bigint_fun , " asIntN " , Builtins : : kBigIntAsIntN , 2 , <nl> + false ) ; <nl> + <nl> + / / Set up the % BigIntPrototype % . <nl> + Handle < JSObject > prototype ( JSObject : : cast ( bigint_fun - > instance_prototype ( ) ) ) ; <nl> + JSFunction : : SetPrototype ( bigint_fun , prototype ) ; <nl> + <nl> + / / Install the properties of the BigInt . prototype . <nl> + / / " constructor " is created implicitly by InstallFunction ( ) above . <nl> + / / toLocaleString ( [ reserved1 [ , reserved2 ] ] ) <nl> + SimpleInstallFunction ( prototype , " toLocaleString " , <nl> + Builtins : : kBigIntPrototypeToLocaleString , 0 , false ) ; <nl> + / / toString ( [ radix ] ) <nl> + SimpleInstallFunction ( prototype , " toString " , <nl> + Builtins : : kBigIntPrototypeToString , 0 , false ) ; <nl> + / / valueOf ( ) <nl> + SimpleInstallFunction ( prototype , " valueOf " , Builtins : : kBigIntPrototypeValueOf , <nl> + 0 , false ) ; <nl> + } <nl> + <nl> # ifdef V8_INTL_SUPPORT <nl> <nl> void Genesis : : InitializeGlobal_harmony_number_format_to_parts ( ) { <nl> new file mode 100644 <nl> index 00000000000 . . e80b88c38ba <nl> mmm / dev / null <nl> ppp b / src / builtins / builtins - bigint . cc <nl> <nl> + / / Copyright 2017 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + # include " src / builtins / builtins - utils . h " <nl> + # include " src / builtins / builtins . h " <nl> + # include " src / counters . h " <nl> + # include " src / objects - inl . h " <nl> + <nl> + namespace v8 { <nl> + namespace internal { <nl> + <nl> + BUILTIN ( BigIntConstructor ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > value = args . atOrUndefined ( isolate , 1 ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement properly . <nl> + <nl> + / / Dummy implementation only takes Smi args . <nl> + if ( ! value - > IsSmi ( ) ) return isolate - > heap ( ) - > undefined_value ( ) ; <nl> + int num = Smi : : ToInt ( * value ) ; <nl> + if ( num = = 0 ) return * isolate - > factory ( ) - > NewBigInt ( 0 ) ; <nl> + Handle < BigInt > result = isolate - > factory ( ) - > NewBigIntRaw ( 1 ) ; <nl> + result - > set_value ( num ) ; <nl> + return * result ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntConstructor_ConstructStub ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > value = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < JSFunction > target = args . target ( ) ; <nl> + Handle < JSReceiver > new_target = Handle < JSReceiver > : : cast ( args . new_target ( ) ) ; <nl> + DCHECK ( * target = = target - > native_context ( ) - > bigint_function ( ) ) ; <nl> + Handle < JSObject > result ; <nl> + ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( isolate , result , <nl> + JSObject : : New ( target , new_target ) ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement . <nl> + USE ( value ) ; <nl> + USE ( result ) ; <nl> + <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntParseInt ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > string_obj = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < Object > radix_obj = args . atOrUndefined ( isolate , 2 ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement . <nl> + USE ( string_obj ) ; <nl> + USE ( radix_obj ) ; <nl> + <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntAsUintN ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > bits_obj = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < Object > bigint_obj = args . atOrUndefined ( isolate , 2 ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement . <nl> + USE ( bits_obj ) ; <nl> + USE ( bigint_obj ) ; <nl> + <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntAsIntN ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > bits_obj = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < Object > bigint_obj = args . atOrUndefined ( isolate , 2 ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement . <nl> + USE ( bits_obj ) ; <nl> + USE ( bigint_obj ) ; <nl> + <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntPrototypeToLocaleString ) { <nl> + HandleScope scope ( isolate ) ; <nl> + <nl> + / / TODO ( jkummerow ) : Implement . <nl> + <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + <nl> + namespace { <nl> + <nl> + MaybeHandle < BigInt > ThisBigIntValue ( Isolate * isolate , Handle < Object > value , <nl> + const char * caller ) { <nl> + / / 1 . If Type ( value ) is BigInt , return value . <nl> + if ( value - > IsBigInt ( ) ) return Handle < BigInt > : : cast ( value ) ; <nl> + / / 2 . If Type ( value ) is Object and value has a [ [ BigIntData ] ] internal slot : <nl> + if ( value - > IsJSValue ( ) ) { <nl> + / / 2a . Assert : value . [ [ BigIntData ] ] is a BigInt value . <nl> + / / 2b . Return value . [ [ BigIntData ] ] . <nl> + Object * data = JSValue : : cast ( * value ) - > value ( ) ; <nl> + if ( data - > IsBigInt ( ) ) return handle ( BigInt : : cast ( data ) , isolate ) ; <nl> + } <nl> + / / 3 . Throw a TypeError exception . <nl> + THROW_NEW_ERROR ( <nl> + isolate , <nl> + NewTypeError ( MessageTemplate : : kNotGeneric , <nl> + isolate - > factory ( ) - > NewStringFromAsciiChecked ( caller ) , <nl> + isolate - > factory ( ) - > NewStringFromStaticChars ( " BigInt " ) ) , <nl> + BigInt ) ; <nl> + } <nl> + <nl> + } / / namespace <nl> + <nl> + BUILTIN ( BigIntPrototypeToString ) { <nl> + HandleScope scope ( isolate ) ; <nl> + / / 1 . Let x be ? thisBigIntValue ( this value ) . <nl> + Handle < BigInt > x ; <nl> + ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( <nl> + isolate , x , <nl> + ThisBigIntValue ( isolate , args . receiver ( ) , " BigInt . prototype . toString " ) ) ; <nl> + / / 2 . If radix is not present , let radixNumber be 10 . <nl> + / / 3 . Else if radix is undefined , let radixNumber be 10 . <nl> + Handle < Object > radix = args . atOrUndefined ( isolate , 1 ) ; <nl> + int radix_number ; <nl> + if ( radix - > IsUndefined ( isolate ) ) { <nl> + radix_number = 10 ; <nl> + } else { <nl> + / / 4 . Else , let radixNumber be ? ToInteger ( radix ) . <nl> + ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( isolate , radix , <nl> + Object : : ToInteger ( isolate , radix ) ) ; <nl> + radix_number = static_cast < int > ( radix - > Number ( ) ) ; <nl> + } <nl> + / / 5 . If radixNumber < 2 or radixNumber > 36 , throw a RangeError exception . <nl> + if ( radix_number < 2 | | radix_number > 36 ) { <nl> + THROW_NEW_ERROR_RETURN_FAILURE ( <nl> + isolate , NewRangeError ( MessageTemplate : : kToRadixFormatRange ) ) ; <nl> + } <nl> + / / Return the String representation of this Number value using the radix <nl> + / / specified by radixNumber . <nl> + RETURN_RESULT_OR_FAILURE ( isolate , BigInt : : ToString ( x , radix_number ) ) ; <nl> + } <nl> + <nl> + BUILTIN ( BigIntPrototypeValueOf ) { <nl> + HandleScope scope ( isolate ) ; <nl> + RETURN_RESULT_OR_FAILURE ( <nl> + isolate , <nl> + ThisBigIntValue ( isolate , args . receiver ( ) , " BigInt . prototype . valueOf " ) ) ; <nl> + } <nl> + <nl> + } / / namespace internal <nl> + } / / namespace v8 <nl> mmm a / src / builtins / builtins - definitions . h <nl> ppp b / src / builtins / builtins - definitions . h <nl> namespace internal { <nl> TFJ ( AsyncFunctionPromiseCreate , 0 ) \ <nl> TFJ ( AsyncFunctionPromiseRelease , 1 , kPromise ) \ <nl> \ <nl> + / * BigInt * / \ <nl> + CPP ( BigIntConstructor ) \ <nl> + CPP ( BigIntConstructor_ConstructStub ) \ <nl> + CPP ( BigIntParseInt ) \ <nl> + CPP ( BigIntAsUintN ) \ <nl> + CPP ( BigIntAsIntN ) \ <nl> + CPP ( BigIntPrototypeToLocaleString ) \ <nl> + CPP ( BigIntPrototypeToString ) \ <nl> + CPP ( BigIntPrototypeValueOf ) \ <nl> + \ <nl> / * Boolean * / \ <nl> CPP ( BooleanConstructor ) \ <nl> CPP ( BooleanConstructor_ConstructStub ) \ <nl> mmm a / src / contexts . h <nl> ppp b / src / contexts . h <nl> enum ContextLookupFlags { <nl> V ( ASYNC_GENERATOR_RETURN_CLOSED_REJECT_SHARED_FUN , SharedFunctionInfo , \ <nl> async_generator_return_closed_reject_shared_fun ) \ <nl> V ( ATOMICS_OBJECT , JSObject , atomics_object ) \ <nl> + V ( BIGINT_FUNCTION_INDEX , JSFunction , bigint_function ) \ <nl> V ( BOOLEAN_FUNCTION_INDEX , JSFunction , boolean_function ) \ <nl> V ( BOUND_FUNCTION_WITH_CONSTRUCTOR_MAP_INDEX , Map , \ <nl> bound_function_with_constructor_map ) \ <nl> mmm a / src / objects / bigint . cc <nl> ppp b / src / objects / bigint . cc <nl> Handle < BigInt > BigInt : : BitwiseOr ( Handle < BigInt > x , Handle < BigInt > y ) { <nl> UNIMPLEMENTED ( ) ; / / TODO ( jkummerow ) : Implement . <nl> } <nl> <nl> - Handle < String > BigInt : : ToString ( Handle < BigInt > bigint , int radix ) { <nl> + MaybeHandle < String > BigInt : : ToString ( Handle < BigInt > bigint , int radix ) { <nl> / / TODO ( jkummerow ) : Support non - power - of - two radixes . <nl> if ( ! base : : bits : : IsPowerOfTwo ( radix ) ) radix = 16 ; <nl> return ToStringBasePowerOfTwo ( bigint , radix ) ; <nl> void BigInt : : RightTrim ( ) { <nl> <nl> static const char kConversionChars [ ] = " 0123456789abcdefghijklmnopqrstuvwxyz " ; <nl> <nl> - / / TODO ( jkummerow ) : Add more tests for this when it is exposed on <nl> - / / BigInt . prototype . <nl> - Handle < String > BigInt : : ToStringBasePowerOfTwo ( Handle < BigInt > x , int radix ) { <nl> + / / TODO ( jkummerow ) : Add more tests for this when we have a way to construct <nl> + / / multi - digit BigInts . <nl> + MaybeHandle < String > BigInt : : ToStringBasePowerOfTwo ( Handle < BigInt > x , <nl> + int radix ) { <nl> STATIC_ASSERT ( base : : bits : : IsPowerOfTwo ( kDigitBits ) ) ; <nl> DCHECK ( base : : bits : : IsPowerOfTwo ( radix ) ) ; <nl> DCHECK ( radix > = 2 & & radix < = 32 ) ; <nl> mmm a / src / objects / bigint . h <nl> ppp b / src / objects / bigint . h <nl> class BigInt : public HeapObject { <nl> } <nl> void Initialize ( int length , bool zero_initialize ) ; <nl> <nl> - static Handle < String > ToString ( Handle < BigInt > bigint , int radix ) ; <nl> + static MaybeHandle < String > ToString ( Handle < BigInt > bigint , int radix ) ; <nl> <nl> / / Temporarily exposed helper , pending proper initialization . <nl> void set_value ( int value ) { <nl> class BigInt : public HeapObject { <nl> / / abs ( x ) < abs ( y ) , or zero if abs ( x ) = = abs ( y ) . <nl> static int AbsoluteCompare ( Handle < BigInt > x , Handle < BigInt > y ) ; <nl> <nl> - static Handle < String > ToStringBasePowerOfTwo ( Handle < BigInt > x , int radix ) ; <nl> + static MaybeHandle < String > ToStringBasePowerOfTwo ( Handle < BigInt > x , <nl> + int radix ) ; <nl> <nl> / / Digit arithmetic helpers . <nl> static inline digit_t digit_add ( digit_t a , digit_t b , digit_t * carry ) ; <nl> mmm a / src / runtime / runtime - bigint . cc <nl> ppp b / src / runtime / runtime - bigint . cc <nl> <nl> namespace v8 { <nl> namespace internal { <nl> <nl> - RUNTIME_FUNCTION ( Runtime_BigInt ) { <nl> - HandleScope scope ( isolate ) ; <nl> - DCHECK_EQ ( 1 , args . length ( ) ) ; <nl> - CONVERT_SMI_ARG_CHECKED ( value , 0 ) ; <nl> - <nl> - / / For the moment , this is the only way to create a BigInt . <nl> - <nl> - / / Since we currently don ' t want ClusterFuzz to generate BigInts , we always <nl> - / / throw here if the - - harmony - bigint flag is disabled . ( All - - harmony - * flags <nl> - / / are blacklisted for ClusterFuzz . ) <nl> - if ( ! FLAG_harmony_bigint ) { <nl> - THROW_NEW_ERROR_RETURN_FAILURE ( isolate , <nl> - NewTypeError ( MessageTemplate : : kUnsupported ) ) ; <nl> - } <nl> - <nl> - if ( value = = 0 ) return * isolate - > factory ( ) - > NewBigInt ( 0 ) ; <nl> - <nl> - Handle < BigInt > result = isolate - > factory ( ) - > NewBigInt ( 1 ) ; <nl> - result - > set_value ( value ) ; <nl> - return * result ; <nl> - } <nl> - <nl> RUNTIME_FUNCTION ( Runtime_BigIntEqual ) { <nl> SealHandleScope shs ( isolate ) ; <nl> DCHECK_EQ ( 2 , args . length ( ) ) ; <nl> mmm a / src / runtime / runtime . h <nl> ppp b / src / runtime / runtime . h <nl> namespace internal { <nl> F ( SetAllowAtomicsWait , 1 , 1 ) <nl> <nl> # define FOR_EACH_INTRINSIC_BIGINT ( F ) \ <nl> - F ( BigInt , 1 , 1 ) \ <nl> F ( BigIntEqual , 2 , 1 ) \ <nl> F ( BigIntToBoolean , 1 , 1 ) <nl> <nl> mmm a / src / v8 . gyp <nl> ppp b / src / v8 . gyp <nl> <nl> ' builtins / builtins - api . cc ' , <nl> ' builtins / builtins - arraybuffer . cc ' , <nl> ' builtins / builtins - array . cc ' , <nl> + ' builtins / builtins - bigint . cc ' , <nl> ' builtins / builtins - boolean . cc ' , <nl> ' builtins / builtins - call . cc ' , <nl> ' builtins / builtins - callsite . cc ' , <nl> mmm a / test / mjsunit / harmony / bigint . js <nl> ppp b / test / mjsunit / harmony / bigint . js <nl> <nl> <nl> ' use strict ' <nl> <nl> - const zero = % BigInt ( 0 ) ; <nl> - const another_zero = % BigInt ( 0 ) ; <nl> - const one = % BigInt ( 1 ) ; <nl> - const another_one = % BigInt ( 1 ) ; <nl> + const zero = BigInt ( 0 ) ; <nl> + const another_zero = BigInt ( 0 ) ; <nl> + const one = BigInt ( 1 ) ; <nl> + const another_one = BigInt ( 1 ) ; <nl> + <nl> + / / BigInt <nl> + { <nl> + assertSame ( BigInt , BigInt . prototype . constructor ) <nl> + } <nl> <nl> / / typeof <nl> { <nl> const another_one = % BigInt ( 1 ) ; <nl> assertEquals ( String ( one ) , " 1 " ) ; <nl> } <nl> <nl> + / / . toString ( radix ) <nl> + { <nl> + / / assertEquals ( expected , BigInt ( input ) . toString ( n ) ) is generated by : <nl> + / / input = $ ( python - c " print ( int ( ' expected ' , n ) ) " ) <nl> + assertEquals ( " hello " , BigInt ( 18306744 ) . toString ( 32 ) ) ; <nl> + assertEquals ( " - hello " , BigInt ( - 18306744 ) . toString ( 32 ) ) ; <nl> + assertEquals ( " abcde " , BigInt ( 0xabcde ) . toString ( 16 ) ) ; <nl> + assertEquals ( " - abcde " , BigInt ( - 0xabcde ) . toString ( 16 ) ) ; <nl> + assertEquals ( " 1234567 " , BigInt ( 342391 ) . toString ( 8 ) ) ; <nl> + assertEquals ( " - 1234567 " , BigInt ( - 342391 ) . toString ( 8 ) ) ; <nl> + assertEquals ( " 1230123 " , BigInt ( 6939 ) . toString ( 4 ) ) ; <nl> + assertEquals ( " - 1230123 " , BigInt ( - 6939 ) . toString ( 4 ) ) ; <nl> + assertEquals ( " 1011001110001 " , BigInt ( 5745 ) . toString ( 2 ) ) ; <nl> + assertEquals ( " - 1011001110001 " , BigInt ( - 5745 ) . toString ( 2 ) ) ; <nl> + } <nl> + <nl> + / / . valueOf <nl> + { <nl> + assertEquals ( Object ( zero ) . valueOf ( ) , another_zero ) ; <nl> + assertThrows ( ( ) = > { return BigInt . prototype . valueOf . call ( " string " ) ; } , <nl> + TypeError ) ; <nl> + / / TODO ( jkummerow ) : Add tests for ( new BigInt ( . . . ) ) . valueOf ( ) when we <nl> + / / can construct BigInt wrappers . <nl> + } <nl> + <nl> / / ToBoolean <nl> { <nl> assertTrue ( ! zero ) ; <nl>
[ bigint ] Expose BigInt on the global object
v8/v8
b361ed5135764240b5c0b05417f127c8685a62e2
2017-09-20T17:52:01Z
mmm a / dbms / src / Columns / ColumnArray . h <nl> ppp b / dbms / src / Columns / ColumnArray . h <nl> class ColumnArray final : public COWPtrHelper < IColumn , ColumnArray > <nl> ColumnPtr data ; <nl> ColumnPtr offsets ; <nl> <nl> - size_t ALWAYS_INLINE offsetAt ( size_t i ) const { return getOffsets ( ) [ i - 1 ] ; } <nl> - size_t ALWAYS_INLINE sizeAt ( size_t i ) const { return getOffsets ( ) [ i ] - getOffsets ( ) [ i - 1 ] ; } <nl> + size_t ALWAYS_INLINE offsetAt ( ssize_t i ) const { return getOffsets ( ) [ i - 1 ] ; } <nl> + size_t ALWAYS_INLINE sizeAt ( ssize_t i ) const { return getOffsets ( ) [ i ] - getOffsets ( ) [ i - 1 ] ; } <nl> <nl> <nl> / / / Multiply values if the nested column is ColumnVector < T > . <nl> mmm a / dbms / src / Columns / ColumnString . h <nl> ppp b / dbms / src / Columns / ColumnString . h <nl> class ColumnString final : public COWPtrHelper < IColumn , ColumnString > <nl> / / / For convenience , every string ends with terminating zero byte . Note that strings could contain zero bytes in the middle . <nl> Chars chars ; <nl> <nl> - size_t ALWAYS_INLINE offsetAt ( size_t i ) const { return offsets [ i - 1 ] ; } <nl> + size_t ALWAYS_INLINE offsetAt ( ssize_t i ) const { return offsets [ i - 1 ] ; } <nl> <nl> / / / Size of i - th element , including terminating zero . <nl> - size_t ALWAYS_INLINE sizeAt ( size_t i ) const { return offsets [ i ] - offsets [ i - 1 ] ; } <nl> + size_t ALWAYS_INLINE sizeAt ( ssize_t i ) const { return offsets [ i ] - offsets [ i - 1 ] ; } <nl> <nl> template < bool positive > <nl> struct less ; <nl> mmm a / dbms / src / Common / PODArray . h <nl> ppp b / dbms / src / Common / PODArray . h <nl> class PODArray : public PODArrayBase < sizeof ( T ) , INITIAL_SIZE , TAllocator , pad_ri <nl> T * data ( ) { return t_start ( ) ; } <nl> const T * data ( ) const { return t_start ( ) ; } <nl> <nl> - T & operator [ ] ( size_t n ) { return t_start ( ) [ n ] ; } <nl> - const T & operator [ ] ( size_t n ) const { return t_start ( ) [ n ] ; } <nl> + / / / The index is signed to access - 1th element without pointer overflow . <nl> + T & operator [ ] ( ssize_t n ) { return t_start ( ) [ n ] ; } <nl> + const T & operator [ ] ( ssize_t n ) const { return t_start ( ) [ n ] ; } <nl> <nl> T & front ( ) { return t_start ( ) [ 0 ] ; } <nl> T & back ( ) { return t_end ( ) [ - 1 ] ; } <nl>
Fixed UB
ClickHouse/ClickHouse
5abb2d02b9d35ee2238cf2611bbb6320e4eabc5d
2018-12-26T05:34:25Z
mmm a / test / cpp / qps / server . cc <nl> ppp b / test / cpp / qps / server . cc <nl> static void RunServer ( ) { <nl> builder . AddPort ( server_address ) ; <nl> builder . RegisterService ( service . service ( ) ) ; <nl> <nl> - ThreadPool * pool = new ThreadPool ( FLAGS_server_threads ) ; <nl> - builder . SetThreadPool ( pool ) ; <nl> + std : : unique_ptr < ThreadPool > pool ( new ThreadPool ( FLAGS_server_threads ) ) ; <nl> + builder . SetThreadPool ( pool . get ( ) ) ; <nl> <nl> std : : unique_ptr < Server > server ( builder . BuildAndStart ( ) ) ; <nl> gpr_log ( GPR_INFO , " Server listening on % s \ n " , server_address ) ; <nl> static void RunServer ( ) { <nl> <nl> grpc_profiler_stop ( ) ; <nl> <nl> - delete pool ; <nl> gpr_free ( server_address ) ; <nl> } <nl> <nl>
Change pointer format
grpc/grpc
4ca479c3a1ccb11e025ca68be91f8bd9eabc5e85
2015-02-10T18:55:53Z
mmm a / src / gui / mainwindow . cpp <nl> ppp b / src / gui / mainwindow . cpp <nl> void MainWindow : : displayRSSTab ( bool enable ) <nl> if ( ! m_rssWidget ) { <nl> m_rssWidget = new RSSWidget ( m_tabs ) ; <nl> connect ( m_rssWidget . data ( ) , & RSSWidget : : unreadCountUpdated , this , & MainWindow : : handleRSSUnreadCountUpdated ) ; <nl> - # ifndef Q_OS_MAC <nl> + # ifdef Q_OS_MAC <nl> + m_tabs - > addTab ( m_rssWidget , tr ( " RSS ( % 1 ) " ) . arg ( RSS : : Session : : instance ( ) - > rootFolder ( ) - > unreadCount ( ) ) ) ; <nl> + # else <nl> const int indexTab = m_tabs - > addTab ( m_rssWidget , tr ( " RSS ( % 1 ) " ) . arg ( RSS : : Session : : instance ( ) - > rootFolder ( ) - > unreadCount ( ) ) ) ; <nl> m_tabs - > setTabIcon ( indexTab , GuiIconProvider : : instance ( ) - > getIcon ( " application - rss + xml " ) ) ; <nl> # endif <nl> void MainWindow : : on_actionExecutionLogs_triggered ( bool checked ) <nl> if ( checked ) { <nl> Q_ASSERT ( ! m_executionLog ) ; <nl> m_executionLog = new ExecutionLog ( m_tabs , static_cast < Log : : MsgType > ( executionLogMsgTypes ( ) ) ) ; <nl> - # ifndef Q_OS_MAC <nl> + # ifdef Q_OS_MAC <nl> + m_tabs - > addTab ( m_executionLog , tr ( " Execution Log " ) ) ; <nl> + # else <nl> const int indexTab = m_tabs - > addTab ( m_executionLog , tr ( " Execution Log " ) ) ; <nl> m_tabs - > setTabIcon ( indexTab , GuiIconProvider : : instance ( ) - > getIcon ( " view - calendar - journal " ) ) ; <nl> # endif <nl>
Merge pull request from sledgehammer999 / fix_tabs
qbittorrent/qBittorrent
591cde53cf94a23ce9c37321f5b2dec7e202db79
2018-05-04T20:59:07Z
mmm a / hphp / runtime / vm / jit / func - prologues - arm . cpp <nl> ppp b / hphp / runtime / vm / jit / func - prologues - arm . cpp <nl> constexpr auto kLocalsToInitializeInline = 9 ; <nl> SrcKey emitPrologueWork ( Func * func , int nPassed ) { <nl> vixl : : MacroAssembler a { mcg - > code . main ( ) } ; <nl> <nl> - if ( tx - > mode ( ) = = TransProflogue ) { <nl> + if ( tx - > mode ( ) = = TransKind : : Proflogue ) { <nl> not_implemented ( ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / func - prologues - x64 . cpp <nl> ppp b / hphp / runtime / vm / jit / func - prologues - x64 . cpp <nl> SrcKey emitPrologueWork ( Func * func , int nPassed ) { <nl> <nl> Asm a { mcg - > code . main ( ) } ; <nl> <nl> - if ( tx - > mode ( ) = = TransProflogue ) { <nl> + if ( tx - > mode ( ) = = TransKind : : Proflogue ) { <nl> assert ( func - > shouldPGO ( ) ) ; <nl> TransID transId = tx - > profData ( ) - > curTransID ( ) ; <nl> auto counterAddr = tx - > profData ( ) - > transCounterAddr ( transId ) ; <nl> mmm a / hphp / runtime / vm / jit / ir - builder . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - builder . cpp <nl> static bool isMainExit ( const Block * b ) { <nl> * / <nl> std : : vector < RegionDesc : : TypePred > IRBuilder : : getKnownTypes ( ) { <nl> / / This function is only correct when given a single - exit region , as <nl> - / / in TransProfile . Furthermore , its output is only used to guide <nl> - / / formation of profile - driven regions . <nl> - assert ( tx - > mode ( ) = = TransProfile ) ; <nl> + / / in TransKind : : Profile . Furthermore , its output is only used to <nl> + / / guide formation of profile - driven regions . <nl> + assert ( tx - > mode ( ) = = TransKind : : Profile ) ; <nl> <nl> / / We want the state for the last block on the " main trace " . Figure <nl> / / out which that is . <nl> mmm a / hphp / runtime / vm / jit / ir - translator . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - translator . cpp <nl> void IRTranslator : : translateInstr ( const NormalizedInstruction & ni ) { <nl> ni . offset ( ) , ni . toString ( ) , <nl> ht . showStack ( ) ) ) ; <nl> / / When profiling , we disable type predictions to avoid side exits <nl> - assert ( IMPLIES ( JIT : : tx - > mode ( ) = = TransProfile , ! ni . outputPredicted ) ) ; <nl> + assert ( IMPLIES ( JIT : : tx - > mode ( ) = = TransKind : : Profile , ! ni . outputPredicted ) ) ; <nl> <nl> if ( ni . guardedThis ) { <nl> / / Task # 2067635 : This should really generate an AssertThis <nl> mmm a / hphp / runtime / vm / jit / mc - generator . cpp <nl> ppp b / hphp / runtime / vm / jit / mc - generator . cpp <nl> TCA MCGenerator : : retranslate ( const TranslArgs & args ) { <nl> } <nl> SKTRACE ( 1 , args . m_sk , " retranslate \ n " ) ; <nl> <nl> - m_tx . setMode ( profileSrcKey ( args . m_sk ) ? TransProfile : TransLive ) ; <nl> - SCOPE_EXIT { m_tx . setMode ( TransInvalid ) ; } ; <nl> + m_tx . setMode ( profileSrcKey ( args . m_sk ) ? TransKind : : Profile : TransKind : : Live ) ; <nl> + SCOPE_EXIT { m_tx . setMode ( TransKind : : Invalid ) ; } ; <nl> <nl> return translate ( args ) ; <nl> } <nl> TCA MCGenerator : : retranslateOpt ( TransID transId , bool align ) { <nl> <nl> TRACE ( 1 , " retranslateOpt : transId = % u \ n " , transId ) ; <nl> <nl> - SCOPE_EXIT { m_tx . setMode ( TransInvalid ) ; } ; <nl> + SCOPE_EXIT { m_tx . setMode ( TransKind : : Invalid ) ; } ; <nl> <nl> always_assert ( m_tx . profData ( ) - > transRegion ( transId ) ! = nullptr ) ; <nl> <nl> TCA MCGenerator : : retranslateOpt ( TransID transId , bool align ) { <nl> regionizeFunc ( func , this , regions ) ; <nl> <nl> for ( auto region : regions ) { <nl> - m_tx . setMode ( TransOptimize ) ; <nl> + m_tx . setMode ( TransKind : : Optimize ) ; <nl> always_assert ( region - > blocks . size ( ) > 0 ) ; <nl> SrcKey regionSk = region - > blocks [ 0 ] - > start ( ) ; <nl> auto translArgs = TranslArgs ( regionSk , align ) . region ( region ) ; <nl> MCGenerator : : createTranslation ( const TranslArgs & args ) { <nl> size_t stubsize = code . stubs ( ) . frontier ( ) - stubstart ; <nl> assert ( asize = = 0 ) ; <nl> if ( stubsize & & RuntimeOption : : EvalDumpTCAnchors ) { <nl> - TransRec tr ( sk , sk . unit ( ) - > md5 ( ) , TransAnchor , <nl> + TransRec tr ( sk , sk . unit ( ) - > md5 ( ) , TransKind : : Anchor , <nl> astart , asize , stubstart , stubsize ) ; <nl> m_tx . addTranslation ( tr ) ; <nl> if ( RuntimeOption : : EvalJitUseVtuneAPI ) { <nl> MCGenerator : : createTranslation ( const TranslArgs & args ) { <nl> } <nl> <nl> if ( m_tx . profData ( ) ) { <nl> - m_tx . profData ( ) - > addTransNonProf ( TransAnchor , sk ) ; <nl> + m_tx . profData ( ) - > addTransNonProf ( TransKind : : Anchor , sk ) ; <nl> } <nl> assert ( ! m_tx . isTransDBEnabled ( ) | | <nl> - m_tx . getTransRec ( stubstart ) - > kind = = TransAnchor ) ; <nl> + m_tx . getTransRec ( stubstart ) - > kind = = TransKind : : Anchor ) ; <nl> } <nl> <nl> return retranslate ( args ) ; <nl> MCGenerator : : translate ( const TranslArgs & args ) { <nl> <nl> assert ( ( ( uintptr_t ) vmsp ( ) & ( sizeof ( Cell ) - 1 ) ) = = 0 ) ; <nl> assert ( ( ( uintptr_t ) vmfp ( ) & ( sizeof ( Cell ) - 1 ) ) = = 0 ) ; <nl> - assert ( m_tx . mode ( ) ! = TransInvalid ) ; <nl> - SCOPE_EXIT { m_tx . setMode ( TransInvalid ) ; } ; <nl> + assert ( m_tx . mode ( ) ! = TransKind : : Invalid ) ; <nl> + SCOPE_EXIT { m_tx . setMode ( TransKind : : Invalid ) ; } ; <nl> <nl> if ( ! args . m_interp ) { <nl> if ( m_numHHIRTrans = = RuntimeOption : : EvalJitGlobalTranslationLimit ) { <nl> MCGenerator : : translate ( const TranslArgs & args ) { <nl> <nl> Func * func = const_cast < Func * > ( args . m_sk . func ( ) ) ; <nl> CodeCache : : Selector cbSel ( CodeCache : : Selector : : Args ( code ) <nl> - . profile ( m_tx . mode ( ) = = TransProfile ) <nl> + . profile ( m_tx . mode ( ) = = TransKind : : Profile ) <nl> . hot ( ( func - > attrs ( ) & AttrHot ) & & m_tx . useAHot ( ) ) ) ; <nl> <nl> if ( args . m_align ) { <nl> MCGenerator : : translate ( const TranslArgs & args ) { <nl> TCA start = code . main ( ) . frontier ( ) ; <nl> <nl> if ( RuntimeOption : : EvalJitDryRuns & & <nl> - ( m_tx . mode ( ) = = TransLive | | m_tx . mode ( ) = = TransProfile ) ) { <nl> + ( m_tx . mode ( ) = = TransKind : : Live | | m_tx . mode ( ) = = TransKind : : Profile ) ) { <nl> auto const useRegion = <nl> RuntimeOption : : EvalJitRegionSelector = = " tracelet " ; <nl> always_assert ( useRegion | | <nl> MCGenerator : : getFuncPrologue ( Func * func , int nPassed , ActRec * ar ) { <nl> <nl> / / We ' re comming from a BIND_CALL service request , so enable <nl> / / profiling if we haven ' t optimized the function entry yet . <nl> - assert ( m_tx . mode ( ) = = TransInvalid | | m_tx . mode ( ) = = TransPrologue ) ; <nl> - if ( m_tx . mode ( ) = = TransInvalid & & profilePrologue ( funcBody ) ) { <nl> - m_tx . setMode ( TransProflogue ) ; <nl> + assert ( m_tx . mode ( ) = = TransKind : : Invalid | | <nl> + m_tx . mode ( ) = = TransKind : : Prologue ) ; <nl> + if ( m_tx . mode ( ) = = TransKind : : Invalid & & profilePrologue ( funcBody ) ) { <nl> + m_tx . setMode ( TransKind : : Proflogue ) ; <nl> } else { <nl> - m_tx . setMode ( TransPrologue ) ; <nl> + m_tx . setMode ( TransKind : : Prologue ) ; <nl> } <nl> - SCOPE_EXIT { m_tx . setMode ( TransInvalid ) ; } ; <nl> + SCOPE_EXIT { m_tx . setMode ( TransKind : : Invalid ) ; } ; <nl> <nl> CodeCache : : Selector cbSel ( CodeCache : : Selector : : Args ( code ) <nl> - . profile ( m_tx . mode ( ) = = TransProflogue ) <nl> + . profile ( m_tx . mode ( ) = = TransKind : : Proflogue ) <nl> . hot ( ( func - > attrs ( ) & AttrHot ) & & m_tx . useAHot ( ) ) ) ; <nl> <nl> / / If we ' re close to a cache line boundary , just burn some space to <nl> MCGenerator : : getFuncPrologue ( Func * func , int nPassed , ActRec * ar ) { <nl> assert ( isValidCodeAddress ( start ) ) ; <nl> func - > setPrologue ( paramIndex , start ) ; <nl> <nl> - assert ( m_tx . mode ( ) = = TransPrologue | | m_tx . mode ( ) = = TransProflogue ) ; <nl> + assert ( m_tx . mode ( ) = = TransKind : : Prologue | | <nl> + m_tx . mode ( ) = = TransKind : : Proflogue ) ; <nl> TransRec tr ( skFuncBody , func - > unit ( ) - > md5 ( ) , <nl> m_tx . mode ( ) , aStart , code . main ( ) . frontier ( ) - aStart , <nl> stubStart , code . stubs ( ) . frontier ( ) - stubStart ) ; <nl> TCA MCGenerator : : regeneratePrologue ( TransID prologueTransId , <nl> <nl> / / Regenerate the prologue . <nl> func - > resetPrologue ( nArgs ) ; <nl> - m_tx . setMode ( TransPrologue ) ; <nl> - SCOPE_EXIT { m_tx . setMode ( TransInvalid ) ; } ; <nl> + m_tx . setMode ( TransKind : : Prologue ) ; <nl> + SCOPE_EXIT { m_tx . setMode ( TransKind : : Invalid ) ; } ; <nl> TCA start = getFuncPrologue ( func , nArgs ) ; <nl> func - > setPrologue ( nArgs , start ) ; <nl> <nl> TCA MCGenerator : : regeneratePrologue ( TransID prologueTransId , <nl> if ( nArgs < func - > numNonVariadicParams ( ) & & ! func - > isClonedClosure ( ) ) { <nl> auto paramInfo = func - > params ( ) [ nArgs ] ; <nl> if ( paramInfo . hasDefaultValue ( ) ) { <nl> - m_tx . setMode ( TransOptimize ) ; <nl> + m_tx . setMode ( TransKind : : Optimize ) ; <nl> SrcKey funcletSK ( func , paramInfo . funcletOff ( ) , false ) ; <nl> TransID funcletTransId = m_tx . profData ( ) - > dvFuncletTransId ( func , nArgs ) ; <nl> if ( funcletTransId ! = InvalidID ) { <nl> MCGenerator : : reachedTranslationLimit ( SrcKey sk , <nl> if ( tns [ i ] = = topTrans ) { <nl> SKTRACE ( 2 , sk , " % zd : * Top * \ n " , i ) ; <nl> } <nl> - if ( rec - > kind = = TransAnchor ) { <nl> + if ( rec - > kind = = TransKind : : Anchor ) { <nl> SKTRACE ( 2 , sk , " % zd : Anchor \ n " , i ) ; <nl> } else { <nl> SKTRACE ( 2 , sk , " % zd : guards { \ n " , i ) ; <nl> MCGenerator : : emitGuardChecks ( SrcKey sk , <nl> } <nl> <nl> m_tx . irTrans ( ) - > hhbcTrans ( ) . emitRB ( RBTypeTraceletGuards , sk ) ; <nl> - bool checkOuterTypeOnly = m_tx . mode ( ) ! = TransProfile ; <nl> + bool checkOuterTypeOnly = m_tx . mode ( ) ! = TransKind : : Profile ; <nl> for ( auto const & dep : dependencies ) { <nl> / * <nl> * In some cases , we may have relaxed a guard to be the same as <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> TCA start = code . main ( ) . frontier ( ) ; <nl> TCA stubStart = code . stubs ( ) . frontier ( ) ; <nl> SrcRec & srcRec = * m_tx . getSrcRec ( sk ) ; <nl> - TransKind transKind = TransInterp ; <nl> + TransKind transKind = TransKind : : Interp ; <nl> UndoMarker undoA ( code . main ( ) ) ; <nl> UndoMarker undoAstubs ( code . stubs ( ) ) ; <nl> UndoMarker undoGlobalData ( code . data ( ) ) ; <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> RegionDescPtr region ; <nl> if ( ! args . m_interp & & ! reachedTranslationLimit ( sk , srcRec ) ) { <nl> / / Attempt to create a region at this SrcKey <nl> - if ( m_tx . mode ( ) = = TransOptimize ) { <nl> + if ( m_tx . mode ( ) = = TransKind : : Optimize ) { <nl> assert ( RuntimeOption : : EvalJitPGO ) ; <nl> region = args . m_region ; <nl> if ( region ) { <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> if ( region & & region - > blocks . size ( ) = = 0 ) region = nullptr ; <nl> } <nl> } else { <nl> - assert ( m_tx . mode ( ) = = TransProfile | | m_tx . mode ( ) = = TransLive ) ; <nl> + assert ( m_tx . mode ( ) = = TransKind : : Profile | | <nl> + m_tx . mode ( ) = = TransKind : : Live ) ; <nl> tp = m_tx . analyze ( sk ) ; <nl> / / TODO ( # 4150507 ) : use sk . resumed ( ) instead of liveResumed ( ) ? <nl> RegionContext rContext { sk . func ( ) , sk . offset ( ) , liveSpOff ( ) , <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> <nl> / / If we ' re profiling , grab the postconditions so we can <nl> / / use them in region selection whenever we decide to retranslate . <nl> - if ( m_tx . mode ( ) = = TransProfile & & result = = Translator : : Success & & <nl> + if ( m_tx . mode ( ) = = TransKind : : Profile & & <nl> + result = = Translator : : Success & & <nl> RuntimeOption : : EvalJitPGOUsePostConditions ) { <nl> pconds = m_tx . irTrans ( ) - > hhbcTrans ( ) . irBuilder ( ) . getKnownTypes ( ) ; <nl> } <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> / / translation , it ' s OK to do a Live translation for the <nl> / / function entry . We lazily create the tracelet here in this <nl> / / case . <nl> - if ( m_tx . mode ( ) = = TransOptimize ) { <nl> + if ( m_tx . mode ( ) = = TransKind : : Optimize ) { <nl> if ( sk . getFuncId ( ) = = liveFunc ( ) - > getFuncId ( ) & & <nl> liveUnit ( ) - > contains ( vmpc ( ) ) & & <nl> sk . offset ( ) = = liveUnit ( ) - > offsetOf ( vmpc ( ) ) ) { <nl> - m_tx . setMode ( TransLive ) ; <nl> + m_tx . setMode ( TransKind : : Live ) ; <nl> tp = m_tx . analyze ( sk ) ; <nl> } else { <nl> - m_tx . setMode ( TransInterp ) ; <nl> + m_tx . setMode ( TransKind : : Interp ) ; <nl> m_tx . traceFree ( ) ; <nl> break ; <nl> } <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> / / If we ' re profiling , grab the postconditions so we can <nl> / / use them in region selection whenever we decide to <nl> / / retranslate . <nl> - if ( m_tx . mode ( ) = = TransProfile & & result = = Translator : : Success & & <nl> + if ( m_tx . mode ( ) = = TransKind : : Profile & & <nl> + result = = Translator : : Success & & <nl> RuntimeOption : : EvalJitPGOUsePostConditions ) { <nl> pconds = m_tx . irTrans ( ) - > hhbcTrans ( ) . irBuilder ( ) . getKnownTypes ( ) ; <nl> } <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> } <nl> <nl> if ( result = = Translator : : Success ) { <nl> - assert ( m_tx . mode ( ) = = TransLive | | <nl> - m_tx . mode ( ) = = TransProfile | | <nl> - m_tx . mode ( ) = = TransOptimize ) ; <nl> + assert ( m_tx . mode ( ) = = TransKind : : Live | | <nl> + m_tx . mode ( ) = = TransKind : : Profile | | <nl> + m_tx . mode ( ) = = TransKind : : Optimize ) ; <nl> transKind = m_tx . mode ( ) ; <nl> } <nl> } <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> return ; <nl> } <nl> <nl> - if ( transKind = = TransInterp ) { <nl> + if ( transKind = = TransKind : : Interp ) { <nl> assertCleanState ( ) ; <nl> auto interpOps = tp ? tp - > m_numOpcodes : 1 ; <nl> FTRACE ( 1 , " emitting { } - instr interp request for failed translation \ n " , <nl> MCGenerator : : translateWork ( const TranslArgs & args ) { <nl> recordGdbTranslation ( sk , sk . func ( ) , code . stubs ( ) , stubStart , <nl> false , false ) ; <nl> if ( RuntimeOption : : EvalJitPGO ) { <nl> - if ( transKind = = TransProfile ) { <nl> + if ( transKind = = TransKind : : Profile ) { <nl> if ( ! region ) { <nl> assert ( tp ) ; <nl> region = selectTraceletLegacy ( liveSpOff ( ) , * tp ) ; <nl> MCGenerator : : translateTracelet ( Tracelet & t ) { <nl> ht . emitIncTransCounter ( ) ; <nl> } <nl> <nl> - if ( m_tx . mode ( ) = = TransProfile ) { <nl> + if ( m_tx . mode ( ) = = TransKind : : Profile ) { <nl> if ( t . func ( ) - > isEntry ( sk . offset ( ) ) ) { <nl> ht . emitCheckCold ( m_tx . profData ( ) - > curTransID ( ) ) ; <nl> profilingFunc = true ; <nl> MCGenerator : : translateTracelet ( Tracelet & t ) { <nl> try { <nl> SKTRACE ( 1 , ni - > source , " HHIR : translateInstr \ n " ) ; <nl> assert ( ! ( m_tx . mode ( ) = = <nl> - TransProfile & & ni - > outputPredicted & & ni - > next ) ) ; <nl> + TransKind : : Profile & & ni - > outputPredicted & & ni - > next ) ) ; <nl> m_tx . irTrans ( ) - > translateInstr ( * ni ) ; <nl> } catch ( FailedIRGen & fcg ) { <nl> always_assert ( ! ni - > interp ) ; <nl> void MCGenerator : : invalidateSrcKey ( SrcKey sk ) { <nl> } <nl> <nl> void MCGenerator : : setJmpTransID ( TCA jmp ) { <nl> - if ( m_tx . mode ( ) ! = TransProfile ) return ; <nl> + if ( m_tx . mode ( ) ! = TransKind : : Profile ) return ; <nl> <nl> TransID transId = m_tx . profData ( ) - > curTransID ( ) ; <nl> FTRACE ( 5 , " setJmpTransID : adding { } = > { } \ n " , jmp , transId ) ; <nl> mmm a / hphp / runtime / vm / jit / opt . cpp <nl> ppp b / hphp / runtime / vm / jit / opt . cpp <nl> void optimize ( IRUnit & unit , IRBuilder & irBuilder , TransKind kind ) { <nl> * PostCond : Loc0 : Uncounted / / post - conds are accurate <nl> * B2 : TypePred : Loc0 : Int / / this will always fail <nl> * / <nl> - const bool relax = kind ! = TransProfile | | <nl> + const bool relax = kind ! = TransKind : : Profile | | <nl> RuntimeOption : : EvalJitRegionSelector = = " tracelet " ; <nl> if ( relax ) { <nl> Timer _t ( Timer : : optimize_relaxGuards ) ; <nl> - const bool simple = kind = = TransProfile & & <nl> + const bool simple = kind = = TransKind : : Profile & & <nl> RuntimeOption : : EvalJitRegionSelector = = " tracelet " ; <nl> auto changed = relaxGuards ( unit , * irBuilder . guards ( ) , simple ) ; <nl> if ( changed ) finishPass ( " guard relaxation " ) ; <nl> mmm a / hphp / runtime / vm / jit / prof - data . cpp <nl> ppp b / hphp / runtime / vm / jit / prof - data . cpp <nl> ProfTransRec : : ProfTransRec ( TransID id , <nl> , m_lastBcOff ( - 1 ) <nl> , m_region ( nullptr ) <nl> , m_sk ( sk ) { <nl> - assert ( kind = = TransAnchor | | kind = = TransOptimize | | <nl> - kind = = TransInterp | | kind = = TransLive ) ; <nl> + assert ( kind = = TransKind : : Anchor | | kind = = TransKind : : Optimize | | <nl> + kind = = TransKind : : Interp | | kind = = TransKind : : Live ) ; <nl> } <nl> <nl> ProfTransRec : : ProfTransRec ( TransID id , <nl> ProfTransRec : : ProfTransRec ( TransID id , <nl> , m_prologueArgs ( nArgs ) <nl> , m_region ( nullptr ) <nl> , m_sk ( sk ) { <nl> - assert ( kind = = TransPrologue | | kind = = TransProflogue ) ; <nl> - if ( kind = = TransProflogue ) { <nl> + assert ( kind = = TransKind : : Prologue | | kind = = TransKind : : Proflogue ) ; <nl> + if ( kind = = TransKind : : Proflogue ) { <nl> / / we only need to keep track of the callers for Proflogues <nl> m_prologueCallers = folly : : make_unique < PrologueCallersRec > ( ) ; <nl> } <nl> Offset ProfTransRec : : startBcOff ( ) const { <nl> } <nl> <nl> Offset ProfTransRec : : lastBcOff ( ) const { <nl> - assert ( m_kind = = TransProfile ) ; <nl> + assert ( m_kind = = TransKind : : Profile ) ; <nl> return m_lastBcOff ; <nl> } <nl> <nl> int ProfTransRec : : prologueArgs ( ) const { <nl> - assert ( m_kind = = TransProflogue ) ; <nl> + assert ( m_kind = = TransKind : : Proflogue ) ; <nl> return m_prologueArgs ; <nl> } <nl> <nl> FuncId ProfTransRec : : funcId ( ) const { <nl> } <nl> <nl> RegionDescPtr ProfTransRec : : region ( ) const { <nl> - assert ( kind ( ) = = TransProfile ) ; <nl> + assert ( kind ( ) = = TransKind : : Profile ) ; <nl> return m_region ; <nl> } <nl> <nl> PrologueCallersRec * ProfTransRec : : prologueCallers ( ) const { <nl> - assert ( kind ( ) = = TransProflogue ) ; <nl> + assert ( kind ( ) = = TransKind : : Proflogue ) ; <nl> return m_prologueCallers . get ( ) ; <nl> } <nl> <nl> TransID ProfData : : addTransProfile ( const RegionDescPtr & region , <nl> <nl> region - > blocks . back ( ) - > setPostConditions ( pconds ) ; <nl> auto const startSk = region - > blocks . front ( ) - > start ( ) ; <nl> - m_transRecs . emplace_back ( new ProfTransRec ( transId , TransProfile , lastBcOff , <nl> - startSk , region ) ) ; <nl> + m_transRecs . emplace_back ( new ProfTransRec ( transId , <nl> + TransKind : : Profile , <nl> + lastBcOff , <nl> + startSk , <nl> + region ) ) ; <nl> <nl> / / If the translation corresponds to a DV Funclet , then add an entry <nl> / / into dvFuncletDB . <nl> TransID ProfData : : addTransProfile ( const RegionDescPtr & region , <nl> <nl> TransID ProfData : : addTransPrologue ( TransKind kind , const SrcKey & sk , <nl> int nArgs ) { <nl> - assert ( kind = = TransPrologue | | kind = = TransProflogue ) ; <nl> + assert ( kind = = TransKind : : Prologue | | kind = = TransKind : : Proflogue ) ; <nl> TransID transId = m_numTrans + + ; <nl> m_transRecs . emplace_back ( new ProfTransRec ( transId , kind , sk , nArgs ) ) ; <nl> - if ( kind = = TransProflogue ) { <nl> + if ( kind = = TransKind : : Proflogue ) { <nl> / / only Proflogue translations need an entry in the m_prologueDB <nl> m_prologueDB . add ( sk . getFuncId ( ) , nArgs , transId ) ; <nl> } <nl> TransID ProfData : : addTransPrologue ( TransKind kind , const SrcKey & sk , <nl> } <nl> <nl> TransID ProfData : : addTransNonProf ( TransKind kind , const SrcKey & sk ) { <nl> - assert ( kind = = TransAnchor | | kind = = TransInterp | | <nl> - kind = = TransLive | | kind = = TransOptimize ) ; <nl> + assert ( kind = = TransKind : : Anchor | | kind = = TransKind : : Interp | | <nl> + kind = = TransKind : : Live | | kind = = TransKind : : Optimize ) ; <nl> TransID transId = m_numTrans + + ; <nl> m_transRecs . emplace_back ( new ProfTransRec ( transId , kind , sk ) ) ; <nl> return transId ; <nl> PrologueCallersRec * ProfData : : findPrologueCallersRec ( const Func * func , <nl> assert ( RuntimeOption : : EvalJitPGOHotOnly & & ! ( func - > attrs ( ) & AttrHot ) ) ; <nl> return nullptr ; <nl> } <nl> - assert ( transKind ( tid ) = = TransProflogue ) ; <nl> + assert ( transKind ( tid ) = = TransKind : : Proflogue ) ; <nl> PrologueCallersRec * prologueCallers = m_transRecs [ tid ] - > prologueCallers ( ) ; <nl> assert ( prologueCallers ) ; <nl> return prologueCallers ; <nl> mmm a / hphp / runtime / vm / jit / region - selection . cpp <nl> ppp b / hphp / runtime / vm / jit / region - selection . cpp <nl> RegionDescPtr selectRegion ( const RegionContext & context , <nl> auto region = [ & ] { <nl> try { <nl> switch ( mode ) { <nl> - case RegionMode : : None : return RegionDescPtr { nullptr } ; <nl> - case RegionMode : : Method : return selectMethod ( context ) ; <nl> - case RegionMode : : Tracelet : return selectTracelet ( context , 0 , <nl> - kind = = TransProfile ) ; <nl> + case RegionMode : : None : <nl> + return RegionDescPtr { nullptr } ; <nl> + case RegionMode : : Method : <nl> + return selectMethod ( context ) ; <nl> + case RegionMode : : Tracelet : <nl> + return selectTracelet ( context , 0 , kind = = TransKind : : Profile ) ; <nl> case RegionMode : : Legacy : <nl> - always_assert ( t ) ; return selectTraceletLegacy ( context . spOffset , <nl> - * t ) ; <nl> + always_assert ( t ) ; <nl> + return selectTraceletLegacy ( context . spOffset , * t ) ; <nl> } <nl> not_reached ( ) ; <nl> } catch ( const std : : exception & e ) { <nl> mmm a / hphp / runtime / vm / jit / translator . cpp <nl> ppp b / hphp / runtime / vm / jit / translator . cpp <nl> getDynLocType ( const SrcKey startSk , <nl> } / / Fall through <nl> case OutPred : { <nl> / / In TransProfile mode , disable type prediction to avoid side exits . <nl> - auto dt = mode = = TransProfile ? KindOfAny : predictOutputs ( ni ) ; <nl> + auto dt = mode = = TransKind : : Profile ? KindOfAny : predictOutputs ( ni ) ; <nl> if ( dt ! = KindOfAny ) ni - > outputPredicted = true ; <nl> return RuntimeType ( dt , dt = = KindOfRef ? KindOfAny : KindOfNone ) ; <nl> } <nl> DynLocation * TraceletContext : : recordRead ( const InputInfo & ii , <nl> m_resolvedDeps [ l ] = dl ; <nl> } <nl> } else { <nl> - const bool specialize = tx - > mode ( ) = = TransLive & & <nl> + const bool specialize = tx - > mode ( ) = = TransKind : : Live & & <nl> ( RuntimeOption : : EvalHHBCRelaxGuards | | <nl> RuntimeOption : : EvalHHIRRelaxGuards ) ; <nl> <nl> std : : unique_ptr < Tracelet > Translator : : analyze ( SrcKey sk , <nl> throwUnknownInput ( ) ; <nl> } <nl> } <nl> - if ( ( m_mode = = TransProfile | | m_mode = = TransOptimize ) & & <nl> + if ( ( m_mode = = TransKind : : Profile | | m_mode = = TransKind : : Optimize ) & & <nl> t . m_numOpcodes > 0 ) { <nl> / / We want to break blocks at every instrution that consumes a ref , <nl> / / so that we avoid side exits . Therefore , instructions consume ref <nl> std : : unique_ptr < Tracelet > Translator : : analyze ( SrcKey sk , <nl> tas . recordDelete ( l ) ; <nl> } <nl> <nl> - if ( m_mode = = TransProfile & & instrBreaksProfileBB ( ni ) ) { <nl> + if ( m_mode = = TransKind : : Profile & & instrBreaksProfileBB ( ni ) ) { <nl> SKTRACE ( 1 , sk , " Profiling BB broken \ n " ) ; <nl> sk . advance ( unit ) ; <nl> goto breakBB ; <nl> std : : unique_ptr < Tracelet > Translator : : analyze ( SrcKey sk , <nl> if ( isUnconditionalJmp ( ni - > op ( ) ) & & <nl> ni - > imm [ 0 ] . u_BA > 0 & & <nl> tas . m_numJmps < MaxJmpsTracedThrough & & <nl> - m_mode ! = TransProfile ) { <nl> + m_mode ! = TransKind : : Profile ) { <nl> / / Continue tracing through jumps . To prevent pathologies , only trace <nl> / / through a finite number of forward jumps . <nl> SKTRACE ( 1 , sk , " greedily continuing through % dth jmp + % d \ n " , <nl> std : : unique_ptr < Tracelet > Translator : : analyze ( SrcKey sk , <nl> } <nl> } <nl> <nl> - if ( RuntimeOption : : EvalHHBCRelaxGuards & & m_mode = = TransLive ) { <nl> + if ( RuntimeOption : : EvalHHBCRelaxGuards & & m_mode = = TransKind : : Live ) { <nl> relaxDeps ( t , tas ) ; <nl> } else { <nl> FTRACE ( 3 , " Not relaxing deps . HHBCRelax : { } , mode : { } \ n " , <nl> std : : unique_ptr < Tracelet > Translator : : analyze ( SrcKey sk , <nl> Translator : : Translator ( ) <nl> : uniqueStubs { } <nl> , m_createdTime ( HPHP : : Timer : : GetCurrentTimeMicros ( ) ) <nl> - , m_mode ( TransInvalid ) <nl> + , m_mode ( TransKind : : Invalid ) <nl> , m_profData ( nullptr ) <nl> , m_analysisDepth ( 0 ) <nl> , m_useAHot ( RuntimeOption : : RepoAuthoritative & & <nl> Translator : : translateRegion ( const RegionDesc & region , <nl> assert ( loc . tag ( ) = = RegionDesc : : Location : : Tag : : Stack ) ; <nl> ht . assertType ( loc , type ) ; <nl> } else if ( isFirstRegionInstr ) { <nl> - bool checkOuterTypeOnly = m_mode ! = TransProfile ; <nl> + bool checkOuterTypeOnly = m_mode ! = TransKind : : Profile ; <nl> ht . guardTypeLocation ( loc , type , checkOuterTypeOnly ) ; <nl> } else { <nl> ht . checkType ( loc , type , sk . offset ( ) ) ; <nl> Translator : : translateRegion ( const RegionDesc & region , <nl> ht . emitIncTransCounter ( ) ; <nl> } <nl> <nl> - if ( m_mode = = TransProfile ) { <nl> + if ( m_mode = = TransKind : : Profile ) { <nl> if ( block - > func ( ) - > isEntry ( block - > start ( ) . offset ( ) ) ) { <nl> ht . emitCheckCold ( m_profData - > curTransID ( ) ) ; <nl> profilingFunc = true ; <nl> Translator : : translateRegion ( const RegionDesc & region , <nl> inst . preppedByRef = byRefs . next ( ) ; <nl> } <nl> <nl> - / / Check for a type prediction . Put it in the NormalizedInstruction so <nl> - / / the emit * method can use it if needed . <nl> - / / In PGO mode , we don ' t really need the values coming from the <nl> - / / interpreter type profiler . TransProfile translations end whenever <nl> - / / there ' s a side - exit , and type predictions incur side - exits . And when <nl> - / / we stitch multiple TransProfile translations together to form a <nl> - / / larger region ( in TransOptimize mode ) , the guard for the top of the <nl> - / / stack essentially does the role of type prediction . And , if the value <nl> - / / is also inferred , then the guard is omitted . <nl> - auto const doPrediction = mode ( ) = = TransLive & & outputIsPredicted ( inst ) ; <nl> + / * <nl> + * Check for a type prediction . Put it in the <nl> + * NormalizedInstruction so the emit * method can use it if <nl> + * needed . In PGO mode , we don ' t really need the values coming <nl> + * from the interpreter type profiler . TransKind : : Profile <nl> + * translations end whenever there ' s a side - exit , and type <nl> + * predictions incur side - exits . And when we stitch multiple <nl> + * TransKind : : Profile translations together to form a larger <nl> + * region ( in TransKind : : Optimize mode ) , the guard for the top <nl> + * of the stack essentially does the role of type prediction . <nl> + * And , if the value is also inferred , then the guard is <nl> + * omitted . <nl> + * / <nl> + auto const doPrediction = mode ( ) = = TransKind : : Live & & <nl> + outputIsPredicted ( inst ) ; <nl> <nl> / / If this block ends with an inlined FCall , we don ' t emit anything for <nl> / / the FCall and instead set up HhbcTranslator for inlining . Blocks from <nl> struct DeferredPathInvalidate : public DeferredWorkItem { <nl> <nl> } <nl> <nl> - static const char * transKindStr [ ] = { <nl> - # define DO ( KIND ) # KIND , <nl> - TRANS_KINDS <nl> - # undef DO <nl> - } ; <nl> - <nl> - const char * getTransKindName ( TransKind kind ) { <nl> - assert ( kind > = 0 & & kind < TransInvalid ) ; <nl> - return transKindStr [ kind ] ; <nl> - } <nl> - <nl> TransRec : : TransRec ( SrcKey s , <nl> MD5 _md5 , <nl> TransKind _kind , <nl> TransRec : : print ( uint64_t profCount ) const { <nl> " aLen = { : # x } \ n " <nl> " stubStart = { } \ n " <nl> " stubLen = { : # x } \ n " , <nl> - static_cast < uint32_t > ( kind ) , getTransKindName ( kind ) , <nl> + static_cast < uint32_t > ( kind ) , show ( kind ) , <nl> aStart , aLen , astubsStart , astubsLen ) . str ( ) ; <nl> <nl> ret + = folly : : format ( <nl> mmm a / hphp / runtime / vm / jit / translator . h <nl> ppp b / hphp / runtime / vm / jit / translator . h <nl> typedef hphp_hash_map < RegionDesc : : BlockId , Block * > BlockIdToIRBlockMap ; <nl> typedef hphp_hash_map < RegionDesc : : BlockId , <nl> RegionDesc : : Block * > BlockIdToRegionBlockMap ; <nl> <nl> - <nl> - <nl> - const char * getTransKindName ( TransKind kind ) ; <nl> - <nl> / * <nl> * Used to maintain a mapping from the bytecode to its corresponding x86 . <nl> * / <nl> mmm a / hphp / runtime / vm / jit / types . h <nl> ppp b / hphp / runtime / vm / jit / types . h <nl> const TransID InvalidID = - 1LL ; <nl> DO ( Proflogue ) \ <nl> DO ( Invalid ) \ <nl> <nl> - enum TransKind { <nl> - # define DO ( KIND ) Trans # # KIND , <nl> + enum class TransKind { <nl> + # define DO ( KIND ) KIND , <nl> TRANS_KINDS <nl> # undef DO <nl> } ; <nl> constexpr size_t NumTransKinds = <nl> ; <nl> <nl> inline std : : string show ( TransKind k ) { <nl> - switch ( k ) { <nl> - # define DO ( name ) case Trans # # name : return " Trans " # name ; <nl> - TRANS_KINDS <nl> - # undef DO <nl> - } <nl> + # define DO ( name ) case TransKind : : name : return " Trans " # name ; <nl> + switch ( k ) { TRANS_KINDS } <nl> + # undef DO <nl> not_reached ( ) ; <nl> } <nl> <nl>
Change TransKind into an enum class
facebook/hhvm
b59a2a686c842b3393ddbfc1bca2ad1ec061a264
2014-05-08T04:56:08Z
mmm a / src / test / test_ext_network . cpp <nl> ppp b / src / test / test_ext_network . cpp <nl> bool TestExtNetwork : : test_socket_get_status ( ) { <nl> <nl> bool TestExtNetwork : : test_socket_set_blocking ( ) { <nl> Variant f = f_fsockopen ( " facebook . com " , 80 ) ; <nl> + VERIFY ( ! same ( f , false ) ) ; <nl> f_socket_set_blocking ( f , 0 ) ; <nl> return Count ( true ) ; <nl> } <nl> <nl> bool TestExtNetwork : : test_socket_set_timeout ( ) { <nl> Variant f = f_fsockopen ( " facebook . com " , 80 ) ; <nl> + VERIFY ( ! same ( f , false ) ) ; <nl> f_socket_set_timeout ( f , 0 ) ; <nl> return Count ( true ) ; <nl> } <nl>
[ Fix ] Dont abort in test_ext_network on socket errors
facebook/hhvm
c46f527ac984c1fa2c0d40e1f1bf12da75079ed0
2010-09-10T18:26:51Z
mmm a / modules / planning / math / hermite_spline . h <nl> ppp b / modules / planning / math / hermite_spline . h <nl> <nl> # ifndef MODULES_PLANNING_MATH_HERMITE_SPLINE_H_ <nl> # define MODULES_PLANNING_MATH_HERMITE_SPLINE_H_ <nl> <nl> + # include < cstring > <nl> + # include < typeinfo > <nl> # include < utility > <nl> <nl> # include " modules / common / log . h " <nl> class HermiteSpline { <nl> virtual T Evaluate ( const std : : uint32_t order , const double z ) const ; <nl> <nl> private : <nl> - std : : array < T , ( N + 1 ) / 2 > _x0 ; <nl> + std : : array < T , ( N + 1 ) / 2 > x0_ ; <nl> <nl> - std : : array < T , ( N + 1 ) / 2 > _x1 ; <nl> + std : : array < T , ( N + 1 ) / 2 > x1_ ; <nl> <nl> - double _z0 = 0 . 0 ; <nl> + double z0_ = 0 . 0 ; <nl> <nl> double delta_z_ = 0 . 0 ; <nl> } ; <nl> template < typename T , std : : uint32_t N > <nl> inline HermiteSpline < T , N > : : HermiteSpline ( std : : array < T , ( N + 1 ) / 2 > x0 , <nl> std : : array < T , ( N + 1 ) / 2 > x1 , <nl> const double z0 , const double z1 ) <nl> - : _x0 ( std : : move ( x0 ) ) , _x1 ( std : : move ( x1 ) ) , _z0 ( z0 ) , delta_z_ ( z1 - z0 ) { <nl> + : x0_ ( std : : move ( x0 ) ) , x1_ ( std : : move ( x1 ) ) , z0_ ( z0 ) , delta_z_ ( z1 - z0 ) { <nl> CHECK ( N = = 3 | | N = = 5 ) <nl> < < " Error : currently we only support cubic and quintic hermite splines ! " ; <nl> } <nl> inline HermiteSpline < T , N > : : HermiteSpline ( std : : array < T , ( N + 1 ) / 2 > x0 , <nl> template < typename T , std : : uint32_t N > <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> const double z ) const { <nl> - CHECK_LE ( _z0 , z ) ; <nl> - CHECK_LE ( z , _z0 + delta_z_ ) ; <nl> + CHECK_LE ( z0_ , z ) ; <nl> + CHECK_LE ( z , z0_ + delta_z_ ) ; <nl> <nl> / / if N = = 3 , cubic hermite spline , N = = 5 , qunitic hermite spline <nl> if ( N = = 3 ) { <nl> - double p0 = _x0 [ 0 ] ; <nl> - double v0 = _x0 [ 1 ] ; <nl> - double p1 = _x1 [ 0 ] ; <nl> - double v1 = _x1 [ 1 ] ; <nl> + double p0 = x0_ [ 0 ] ; <nl> + double v0 = x0_ [ 1 ] ; <nl> + double p1 = x1_ [ 0 ] ; <nl> + double v1 = x1_ [ 1 ] ; <nl> switch ( order ) { <nl> case 0 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> double t3 = t2 * t ; <nl> <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> ( - 2 . 0 * t3 + 3 . 0 * t2 ) * p1 + ( t3 - t2 ) * v1 ; <nl> } <nl> case 1 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> <nl> return ( 6 . 0 * t2 - 6 . 0 * t ) * p0 + ( 3 . 0 * t2 - 4 * t + 1 . 0 ) * v0 + <nl> ( - 6 . 0 * t2 + 6 . 0 * t ) * p1 + ( 3 . 0 * t2 - 2 . 0 * t ) * v1 ; <nl> } <nl> case 2 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> return ( 12 . 0 * t - 6 . 0 ) * p0 + ( 6 . 0 * t - 4 . 0 ) * v0 + <nl> ( - 12 . 0 * t + 6 . 0 ) * p1 + ( 6 . 0 * t - 2 . 0 ) * v1 ; <nl> } <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> } <nl> } else { <nl> CHECK_EQ ( 5 , N ) ; <nl> - double p0 = _x0 [ 0 ] ; <nl> - double v0 = _x0 [ 1 ] ; <nl> - double a0 = _x0 [ 2 ] ; <nl> - double p1 = _x1 [ 0 ] ; <nl> - double v1 = _x1 [ 1 ] ; <nl> - double a1 = _x1 [ 2 ] ; <nl> + double p0 = x0_ [ 0 ] ; <nl> + double v0 = x0_ [ 1 ] ; <nl> + double a0 = x0_ [ 2 ] ; <nl> + double p1 = x1_ [ 0 ] ; <nl> + double v1 = x1_ [ 1 ] ; <nl> + double a1 = x1_ [ 2 ] ; <nl> <nl> switch ( order ) { <nl> case 0 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> double t3 = t * t2 ; <nl> double t4 = t2 * t2 ; <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> return h0 * p0 + h1 * v0 + h2 * a0 + h3 * p1 + h4 * v1 + h5 * a1 ; <nl> } <nl> case 1 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> double t3 = t * t2 ; <nl> double t4 = t2 * t2 ; <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> return dh0 * p0 + dh1 * v0 + dh2 * a0 + dh3 * p1 + dh4 * v1 + dh5 * a1 ; <nl> } <nl> case 2 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> double t3 = t * t2 ; <nl> double det0 = t - t2 ; <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> ddh5 * a1 ; <nl> } <nl> case 3 : { <nl> - double t = ( z - _z0 ) / delta_z_ ; <nl> + double t = ( z - z0_ ) / delta_z_ ; <nl> double t2 = t * t ; <nl> double det = t - t2 ; <nl> double dddh0 = - 60 . 0 + 360 . 0 * det ; <nl> inline T HermiteSpline < T , N > : : Evaluate ( const std : : uint32_t order , <nl> return dddh0 * p0 + dddh1 * v0 + dddh2 * a0 + dddh3 * p1 + dddh4 * v1 + <nl> dddh5 * a1 ; <nl> } <nl> - / / TODO ( fanhaoyang ) : the derive higher order derivative for <nl> - / / quintic hermite spline <nl> default : { break ; } <nl> } <nl> } <nl> - AFATAL < < " Error : unsupported order of spline or derivative ! " ; <nl> + / / Check the type is " double " or " float " <nl> + if ( std : : strcmp ( typeid ( x0_ ) . name ( ) , " d " ) = = 0 | | std : : strcmp ( typeid ( x0_ ) . name ( ) , " f " ) = = 0 ) { <nl> + return 0 . 0 ; <nl> + } <nl> T t ; <nl> return t ; <nl> } <nl>
planning : formatted HermiteSpline class and added type checks
ApolloAuto/apollo
1d9eaaddc8a254a90e69d2b22a72e040fcdd00f1
2017-08-14T18:08:06Z
mmm a / plugins / chain_api_plugin / chain_api_plugin . cpp <nl> ppp b / plugins / chain_api_plugin / chain_api_plugin . cpp <nl> void chain_api_plugin : : plugin_startup ( ) { <nl> CHAIN_RO_CALL ( abi_json_to_bin ) , <nl> CHAIN_RO_CALL ( abi_bin_to_json ) , <nl> CHAIN_RW_CALL ( push_block ) , <nl> - CHAIN_RW_CALL ( push_transaction ) <nl> + CHAIN_RW_CALL ( push_transaction ) , <nl> + CHAIN_RW_CALL ( push_transactions ) <nl> } ) ; <nl> } <nl> <nl> mmm a / plugins / chain_plugin / chain_plugin . cpp <nl> ppp b / plugins / chain_plugin / chain_plugin . cpp <nl> read_write : : push_block_results read_write : : push_block ( const read_write : : push_blo <nl> } <nl> <nl> read_write : : push_transaction_results read_write : : push_transaction ( const read_write : : push_transaction_params & params ) { <nl> - auto ptrx = db . push_transaction ( params , skip_flags ) ; <nl> + auto pretty_input = db . transaction_from_variant ( params ) ; <nl> + auto ptrx = db . push_transaction ( pretty_input , skip_flags ) ; <nl> auto pretty_trx = db . transaction_to_variant ( ptrx ) ; <nl> - return read_write : : push_transaction_results { params . id ( ) , pretty_trx } ; <nl> + return read_write : : push_transaction_results { pretty_input . id ( ) , pretty_trx } ; <nl> + } <nl> + <nl> + read_write : : push_transactions_results read_write : : push_transactions ( const vector < read_write : : push_transaction_params > & params ) { <nl> + push_transactions_results result ; <nl> + for ( const auto & item : params ) { <nl> + try { <nl> + result . push_back ( push_transaction ( item ) ) ; <nl> + } catch ( const fc : : exception & e ) { <nl> + result . push_back ( read_write : : push_transaction_results { chain : : transaction_id_type ( ) , <nl> + fc : : mutable_variant_object ( " error " , e . to_detail_string ( ) ) } ) ; <nl> + } <nl> + } <nl> + return result ; <nl> } <nl> <nl> read_only : : get_account_results read_only : : get_account ( const get_account_params & params ) const { <nl> mmm a / plugins / chain_plugin / include / eos / chain_plugin / chain_plugin . hpp <nl> ppp b / plugins / chain_plugin / include / eos / chain_plugin / chain_plugin . hpp <nl> class read_write { <nl> using push_block_results = empty ; <nl> push_block_results push_block ( const push_block_params & params ) ; <nl> <nl> - using push_transaction_params = chain : : SignedTransaction ; <nl> + using push_transaction_params = fc : : variant_object ; <nl> struct push_transaction_results { <nl> chain : : transaction_id_type transaction_id ; <nl> fc : : variant processed ; <nl> } ; <nl> push_transaction_results push_transaction ( const push_transaction_params & params ) ; <nl> + <nl> + <nl> + using push_transactions_params = vector < push_transaction_params > ; <nl> + using push_transactions_results = vector < push_transaction_results > ; <nl> + push_transactions_results push_transactions ( const push_transactions_params & params ) ; <nl> } ; <nl> } / / namespace chain_apis <nl> <nl>
implementing push_transactions api
EOSIO/eos
c23a8f05c13404ec781f65907a553bc81ba7c666
2017-08-29T20:19:23Z
mmm a / src / Access / EnabledSettings . cpp <nl> ppp b / src / Access / EnabledSettings . cpp <nl> <nl> # include < Access / EnabledSettings . h > <nl> + # include < Common / SettingsChanges . h > <nl> <nl> <nl> namespace DB <nl> mmm a / src / Access / EnabledSettings . h <nl> ppp b / src / Access / EnabledSettings . h <nl> <nl> <nl> # include < Core / Types . h > <nl> # include < Core / UUID . h > <nl> - # include < Common / SettingsChanges . h > <nl> # include < Access / SettingsConstraints . h > <nl> # include < Access / SettingsProfileElement . h > <nl> # include < boost / container / flat_set . hpp > <nl> mmm a / src / Access / SettingsConstraints . cpp <nl> ppp b / src / Access / SettingsConstraints . cpp <nl> <nl> # include < Access / SettingsConstraints . h > <nl> # include < Core / Settings . h > <nl> # include < Common / FieldVisitors . h > <nl> + # include < Common / SettingsChanges . h > <nl> # include < IO / WriteHelpers . h > <nl> # include < Poco / Util / AbstractConfiguration . h > <nl> <nl> mmm a / src / Access / SettingsConstraints . h <nl> ppp b / src / Access / SettingsConstraints . h <nl> <nl> # pragma once <nl> <nl> # include < Core / Field . h > <nl> - # include < Common / SettingsChanges . h > <nl> # include < common / StringRef . h > <nl> # include < unordered_map > <nl> <nl> namespace Util <nl> namespace DB <nl> { <nl> struct Settings ; <nl> + struct SettingChange ; <nl> + class SettingsChanges ; <nl> <nl> / * * Checks if specified changes of settings are allowed or not . <nl> * If the changes are not allowed ( i . e . violates some constraints ) this class throws an exception . <nl> mmm a / src / Access / SettingsProfileElement . cpp <nl> ppp b / src / Access / SettingsProfileElement . cpp <nl> <nl> # include < Access / SettingsProfile . h > <nl> # include < Parsers / ASTSettingsProfileElement . h > <nl> # include < Core / Settings . h > <nl> + # include < Common / SettingsChanges . h > <nl> # include < IO / ReadHelpers . h > <nl> # include < IO / WriteHelpers . h > <nl> <nl> mmm a / src / Access / SettingsProfileElement . h <nl> ppp b / src / Access / SettingsProfileElement . h <nl> <nl> namespace DB <nl> { <nl> struct Settings ; <nl> - struct SettingChange ; <nl> - using SettingsChanges = std : : vector < SettingChange > ; <nl> + class SettingsChanges ; <nl> class SettingsConstraints ; <nl> class ASTSettingsProfileElement ; <nl> class ASTSettingsProfileElements ; <nl> mmm a / src / Access / SettingsProfilesCache . cpp <nl> ppp b / src / Access / SettingsProfilesCache . cpp <nl> <nl> # include < Access / AccessControlManager . h > <nl> # include < Access / SettingsProfile . h > <nl> # include < Core / Settings . h > <nl> + # include < Common / SettingsChanges . h > <nl> # include < Common / quoteString . h > <nl> # include < boost / range / adaptor / map . hpp > <nl> # include < boost / range / algorithm_ext / push_back . hpp > <nl> new file mode 100644 <nl> index 00000000000 . . e7c769ad55a <nl> mmm / dev / null <nl> ppp b / src / Common / SettingsChanges . cpp <nl> <nl> + # include < Common / SettingsChanges . h > <nl> + <nl> + <nl> + namespace DB <nl> + { <nl> + namespace <nl> + { <nl> + SettingChange * find ( SettingsChanges & changes , const std : : string_view & name ) <nl> + { <nl> + auto it = std : : find_if ( changes . begin ( ) , changes . end ( ) , [ & name ] ( const SettingChange & change ) { return change . name = = name ; } ) ; <nl> + if ( it = = changes . end ( ) ) <nl> + return nullptr ; <nl> + return & * it ; <nl> + } <nl> + <nl> + const SettingChange * find ( const SettingsChanges & changes , const std : : string_view & name ) <nl> + { <nl> + auto it = std : : find_if ( changes . begin ( ) , changes . end ( ) , [ & name ] ( const SettingChange & change ) { return change . name = = name ; } ) ; <nl> + if ( it = = changes . end ( ) ) <nl> + return nullptr ; <nl> + return & * it ; <nl> + } <nl> + } <nl> + <nl> + bool SettingsChanges : : tryGet ( const std : : string_view & name , Field & out_value ) const <nl> + { <nl> + const auto * change = find ( * this , name ) ; <nl> + if ( ! change ) <nl> + return false ; <nl> + out_value = change - > value ; <nl> + return true ; <nl> + } <nl> + <nl> + const Field * SettingsChanges : : tryGet ( const std : : string_view & name ) const <nl> + { <nl> + const auto * change = find ( * this , name ) ; <nl> + if ( ! change ) <nl> + return nullptr ; <nl> + return & change - > value ; <nl> + } <nl> + <nl> + Field * SettingsChanges : : tryGet ( const std : : string_view & name ) <nl> + { <nl> + auto * change = find ( * this , name ) ; <nl> + if ( ! change ) <nl> + return nullptr ; <nl> + return & change - > value ; <nl> + } <nl> + <nl> + } <nl> mmm a / src / Common / SettingsChanges . h <nl> ppp b / src / Common / SettingsChanges . h <nl> <nl> <nl> namespace DB <nl> { <nl> - <nl> struct SettingChange <nl> { <nl> String name ; <nl> Field value ; <nl> - SettingChange ( ) { } <nl> <nl> - SettingChange ( const String & name_ , const Field value_ ) <nl> - : name ( name_ ) <nl> - , value ( value_ ) { } <nl> + SettingChange ( ) { } <nl> + SettingChange ( const std : : string_view & name_ , const Field & value_ ) : name ( name_ ) , value ( value_ ) { } <nl> + SettingChange ( const std : : string_view & name_ , Field & & value_ ) : name ( name_ ) , value ( std : : move ( value_ ) ) { } <nl> <nl> friend bool operator = = ( const SettingChange & lhs , const SettingChange & rhs ) { return ( lhs . name = = rhs . name ) & & ( lhs . value = = rhs . value ) ; } <nl> friend bool operator ! = ( const SettingChange & lhs , const SettingChange & rhs ) { return ! ( lhs = = rhs ) ; } <nl> } ; <nl> <nl> - using SettingsChanges = std : : vector < SettingChange > ; <nl> + <nl> + class SettingsChanges : public std : : vector < SettingChange > <nl> + { <nl> + public : <nl> + using std : : vector < SettingChange > : : vector ; <nl> + <nl> + bool tryGet ( const std : : string_view & name , Field & out_value ) const ; <nl> + const Field * tryGet ( const std : : string_view & name ) const ; <nl> + Field * tryGet ( const std : : string_view & name ) ; <nl> + } ; <nl> <nl> } <nl> mmm a / src / Common / ya . make <nl> ppp b / src / Common / ya . make <nl> SRCS ( <nl> RWLock . cpp <nl> SensitiveDataMasker . cpp <nl> setThreadName . cpp <nl> + SettingsChanges . cpp <nl> SharedLibrary . cpp <nl> ShellCommand . cpp <nl> StackTrace . cpp <nl> mmm a / src / Core / SettingsCollection . h <nl> ppp b / src / Core / SettingsCollection . h <nl> namespace DB <nl> <nl> class Field ; <nl> struct SettingChange ; <nl> - using SettingsChanges = std : : vector < SettingChange > ; <nl> + class SettingsChanges ; <nl> class ReadBuffer ; <nl> class WriteBuffer ; <nl> enum class SettingsBinaryFormat ; <nl> mmm a / src / Parsers / QueryWithOutputSettingsPushDownVisitor . h <nl> ppp b / src / Parsers / QueryWithOutputSettingsPushDownVisitor . h <nl> namespace DB <nl> <nl> class ASTSelectQuery ; <nl> struct SettingChange ; <nl> - using SettingsChanges = std : : vector < SettingChange > ; <nl> + class SettingsChanges ; <nl> <nl> / / / Pushdown SETTINGS clause that goes after FORMAT to the SELECT query : <nl> / / / ( since settings after FORMAT parsed separatelly not in the ParserSelectQuery but in ParserQueryWithOutput ) <nl> mmm a / src / Storages / IStorage . h <nl> ppp b / src / Storages / IStorage . h <nl> using StorageActionBlockType = size_t ; <nl> class ASTCreateQuery ; <nl> <nl> struct Settings ; <nl> - struct SettingChange ; <nl> - using SettingsChanges = std : : vector < SettingChange > ; <nl> <nl> class AlterCommands ; <nl> class MutationCommands ; <nl> mmm a / src / Storages / Kafka / StorageKafka . h <nl> ppp b / src / Storages / Kafka / StorageKafka . h <nl> <nl> # include < Storages / Kafka / Buffer_fwd . h > <nl> # include < Storages / Kafka / KafkaSettings . h > <nl> # include < Interpreters / Context . h > <nl> + # include < Common / SettingsChanges . h > <nl> <nl> # include < Poco / Semaphore . h > <nl> # include < ext / shared_ptr_helper . h > <nl> mmm a / src / Storages / MergeTree / MergeTreeData . cpp <nl> ppp b / src / Storages / MergeTree / MergeTreeData . cpp <nl> void MergeTreeData : : checkAlterIsPossible ( const AlterCommands & commands , const S <nl> const auto & new_changes = new_metadata . settings_changes - > as < const ASTSetQuery & > ( ) . changes ; <nl> for ( const auto & changed_setting : new_changes ) <nl> { <nl> - if ( MergeTreeSettings : : findIndex ( changed_setting . name ) = = MergeTreeSettings : : npos ) <nl> - throw Exception { " Storage ' " + getName ( ) + " ' doesn ' t have setting ' " + changed_setting . name + " ' " , <nl> + const auto & setting_name = changed_setting . name ; <nl> + const auto & new_value = changed_setting . value ; <nl> + if ( MergeTreeSettings : : findIndex ( setting_name ) = = MergeTreeSettings : : npos ) <nl> + throw Exception { " Storage ' " + getName ( ) + " ' doesn ' t have setting ' " + setting_name + " ' " , <nl> ErrorCodes : : UNKNOWN_SETTING } ; <nl> <nl> - auto comparator = [ & changed_setting ] ( const auto & change ) { return change . name = = changed_setting . name ; } ; <nl> + const Field * current_value = current_changes . tryGet ( setting_name ) ; <nl> <nl> - auto current_setting_it <nl> - = std : : find_if ( current_changes . begin ( ) , current_changes . end ( ) , comparator ) ; <nl> - <nl> - if ( ( current_setting_it = = current_changes . end ( ) | | * current_setting_it ! = changed_setting ) <nl> - & & MergeTreeSettings : : isReadonlySetting ( changed_setting . name ) ) <nl> + if ( ( ! current_value | | * current_value ! = new_value ) <nl> + & & MergeTreeSettings : : isReadonlySetting ( setting_name ) ) <nl> { <nl> - throw Exception { " Setting ' " + changed_setting . name + " ' is readonly for storage ' " + getName ( ) + " ' " , <nl> + throw Exception { " Setting ' " + setting_name + " ' is readonly for storage ' " + getName ( ) + " ' " , <nl> ErrorCodes : : READONLY_SETTING } ; <nl> } <nl> <nl> - if ( current_setting_it = = current_changes . end ( ) <nl> - & & MergeTreeSettings : : isPartFormatSetting ( changed_setting . name ) ) <nl> + if ( ! current_value & & MergeTreeSettings : : isPartFormatSetting ( setting_name ) ) <nl> { <nl> MergeTreeSettings copy = * getSettings ( ) ; <nl> copy . applyChange ( changed_setting ) ; <nl> void MergeTreeData : : checkAlterIsPossible ( const AlterCommands & commands , const S <nl> throw Exception ( " Can ' t change settings . Reason : " + reason , ErrorCodes : : NOT_IMPLEMENTED ) ; <nl> } <nl> <nl> - if ( changed_setting . name = = " storage_policy " ) <nl> - checkStoragePolicy ( global_context . getStoragePolicy ( changed_setting . value . safeGet < String > ( ) ) ) ; <nl> + if ( setting_name = = " storage_policy " ) <nl> + checkStoragePolicy ( global_context . getStoragePolicy ( new_value . safeGet < String > ( ) ) ) ; <nl> } <nl> } <nl> <nl> mmm a / src / Storages / MergeTree / MergeTreeSettings . h <nl> ppp b / src / Storages / MergeTree / MergeTreeSettings . h <nl> <nl> <nl> # include < Core / Defines . h > <nl> # include < Core / SettingsCollection . h > <nl> - # include < Common / SettingsChanges . h > <nl> <nl> <nl> namespace Poco <nl>
Make SettingsChanges a class .
ClickHouse/ClickHouse
90602b869a37c66ec5ffea4f99522638a9e52704
2020-07-31T16:11:27Z
mmm a / tensorflow / examples / tutorials / mnist / mnist . py <nl> ppp b / tensorflow / examples / tutorials / mnist / mnist . py <nl> def evaluation ( logits , labels ) : <nl> " " " <nl> # For a classifier model , we can use the in_top_k Op . <nl> # It returns a bool tensor with shape [ batch_size ] that is true for <nl> - # the examples where the label ' s is was in the top k ( here k = 1 ) <nl> + # the examples where the label is in the top k ( here k = 1 ) <nl> # of all logits for that example . <nl> correct = tf . nn . in_top_k ( logits , labels , 1 ) <nl> # Return the number of true entries . <nl>
Fix typo
tensorflow/tensorflow
410ef4f3b097d3ff47d2bc342bb3ac5bc9aedf72
2016-02-18T02:56:10Z
mmm a / admin / static / coffee / models . coffee <nl> ppp b / admin / static / coffee / models . coffee <nl> class Namespace extends Backbone . Model <nl> compute_shards : = > <nl> @ . set ' computed_shards ' , new DataUtils . Shards [ ] , @ <nl> <nl> - interval : 0 <nl> - set_interval_key_distr : = > <nl> - @ set_interval = setInterval @ load_key_distr , @ interval <nl> - <nl> - clear_interval_key_distr : - > <nl> - if @ set_interval ? <nl> - clearInterval @ set_interval <nl> - @ interval = 0 <nl> - <nl> - <nl> compare_keys : ( a , b ) - > <nl> pattern = / ^ ( % 22 ) . * ( % 22 ) $ / <nl> if pattern . test ( a ) is true <nl> class Namespace extends Backbone . Model <nl> return 1 <nl> return 0 <nl> <nl> - # Cache key distribution info . <nl> - load_key_distr : = > <nl> - $ . ajax <nl> - processData : false <nl> - url : " / ajax / distribution ? namespace = # { @ get ( ' id ' ) } & depth = 2 " <nl> - type : ' GET ' <nl> - contentType : ' application / json ' <nl> - success : ( distr_data ) = > <nl> - # Cache the data <nl> - # Sort the keys and cache that too <nl> - distr_keys = [ ] <nl> - for key , count of distr_data <nl> - distr_keys . push ( key ) <nl> - distr_keys . sort ( @ compare_keys ) <nl> - <nl> - @ set ( ' key_distr_sorted ' , distr_keys ) <nl> - @ set ( ' key_distr ' , distr_data ) <nl> - if @ interval isnt 5000 <nl> - @ clear_interval_key_distr ( ) <nl> - @ interval = 5000 <nl> - @ set_interval_key_distr ( ) <nl> - <nl> - error : = > <nl> - if @ interval isnt 1000 <nl> - @ clear_interval_key_distr ( ) <nl> - @ interval = 1000 <nl> - @ set_interval_key_distr ( ) <nl> - <nl> load_key_distr_once : = > <nl> $ . ajax <nl> processData : false <nl> - url : " / ajax / distribution ? namespace = # { @ get ( ' id ' ) } & depth = 1 " <nl> + url : " / ajax / distribution ? namespace = # { @ get ( ' id ' ) } & depth = 2 " <nl> type : ' GET ' <nl> contentType : ' application / json ' <nl> success : ( distr_data ) = > <nl> mmm a / admin / static / coffee / namespaces / namespace . coffee <nl> ppp b / admin / static / coffee / namespaces / namespace . coffee <nl> module ' NamespaceView ' , - > <nl> initialize : - > <nl> log_initial ' ( initializing ) namespace view : container ' <nl> <nl> - @ model . load_key_distr ( ) <nl> + @ model . load_key_distr_once ( ) <nl> <nl> # Panels for namespace view <nl> @ title = new NamespaceView . Title ( model : @ model ) <nl> module ' NamespaceView ' , - > <nl> <nl> <nl> destroy : = > <nl> - @ model . clear_interval_key_distr ( ) <nl> - <nl> @ title . destroy ( ) <nl> @ profile . destroy ( ) <nl> @ replicas . destroy ( ) <nl> mmm a / admin / static / coffee / namespaces / shards . coffee <nl> ppp b / admin / static / coffee / namespaces / shards . coffee <nl> module ' NamespaceView ' , - > <nl> need_update = true <nl> break <nl> <nl> - if need_update <nl> + if need_update and @ model . get ( ' key_distr ' ) ? <nl> @ . $ ( ' . data_repartition - container ' ) . html @ data_repartition_template json <nl> <nl> @ . $ ( ' . loading_text - diagram ' ) . css ' display ' , ' none ' <nl> module ' NamespaceView ' , - > <nl> class @ ChangeShardsModal extends UIComponents . AbstractModal <nl> template : Handlebars . compile $ ( ' # change_shards - modal - template ' ) . html ( ) <nl> change_shards_success_alert_template : Handlebars . compile $ ( ' # change_shards - success - alert - template ' ) . html ( ) <nl> - class : ' modify - replicas ' <nl> + class : ' change_shards ' <nl> <nl> initialize : ( data ) = > <nl> @ model = data . model <nl> module ' NamespaceView ' , - > <nl> <nl> <nl> render : = > <nl> - log_render ' ( rendering ) modify replicas dialog ( outer ) ' <nl> + @ model . load_key_distr_once ( ) <nl> + <nl> super <nl> modal_title : " Change the number of shards " <nl> btn_primary_text : " Shard " <nl>
Handle the case when key_distr is not ready for render_data_repartition ( )
rethinkdb/rethinkdb
870fc2540c01a7c67d33d2b9ebc09221ea13f863
2012-09-19T15:49:37Z
mmm a / tools / db_crashtest . py <nl> ppp b / tools / db_crashtest . py <nl> <nl> " checksum_type " : lambda : random . choice ( [ " kCRC32c " , " kxxHash " , " kxxHash64 " ] ) , <nl> " compression_max_dict_bytes " : lambda : 16384 * random . randint ( 0 , 1 ) , <nl> " compression_zstd_max_train_bytes " : lambda : 65536 * random . randint ( 0 , 1 ) , <nl> - " compression_parallel_threads " : lambda : random . choice ( [ 1 ] * 9 + [ 4 ] ) , <nl> + # Disabled compression_parallel_threads as the feature is not stable <nl> + # lambda : random . choice ( [ 1 ] * 9 + [ 4 ] ) <nl> + " compression_parallel_threads " : 1 , <nl> " clear_column_family_one_in " : 0 , <nl> " compact_files_one_in " : 1000000 , <nl> " compact_range_one_in " : 1000000 , <nl>
Disable " compression_parallel_threads " in crash test for now ( )
facebook/rocksdb
12825894a2d2e55e00aaa02c35b389cd66dfb540
2020-05-08T03:52:29Z
mmm a / src / mongo / db / geo / SConscript <nl> ppp b / src / mongo / db / geo / SConscript <nl> Import ( " env " ) <nl> # Core geometry shape libraries <nl> env . Library ( " geometry " , [ " hash . cpp " , <nl> " shapes . cpp " , <nl> + " big_polygon . cpp " , <nl> " r2_region_coverer . cpp " ] , <nl> LIBDEPS = [ " $ BUILD_DIR / mongo / bson " , <nl> " $ BUILD_DIR / third_party / s2 / s2 " ] ) <nl> env . CppUnitTest ( " r2_region_coverer_test " , [ " r2_region_coverer_test . cpp " ] , <nl> " geoparser " , <nl> " $ BUILD_DIR / mongo / db / common " ] ) # db / common needed for field parsing <nl> <nl> + env . CppUnitTest ( " big_polygon_test " , [ " big_polygon_test . cpp " ] , <nl> + LIBDEPS = [ " geometry " , <nl> + " $ BUILD_DIR / mongo / db / common " ] ) # db / common needed for field parsing <nl> + <nl> new file mode 100644 <nl> index 000000000000 . . 2f7c20871c66 <nl> mmm / dev / null <nl> ppp b / src / mongo / db / geo / big_polygon . cpp <nl> <nl> + / * * <nl> + * Copyright ( C ) 2014 10gen Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the GNU Affero General Public License , version 3 , <nl> + * as published by the Free Software Foundation . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU Affero General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU Affero General Public License <nl> + * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the GNU Affero General Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # include " mongo / db / geo / big_polygon . h " <nl> + <nl> + # include < map > <nl> + <nl> + # include " mongo / base / owned_pointer_vector . h " <nl> + # include " mongo / util / assert_util . h " <nl> + <nl> + namespace mongo { <nl> + <nl> + using boost : : scoped_ptr ; <nl> + using std : : auto_ptr ; <nl> + using std : : vector ; <nl> + <nl> + <nl> + BigSimplePolygon : : BigSimplePolygon ( ) { <nl> + } <nl> + <nl> + / / Caller should ensure loop is valid . <nl> + BigSimplePolygon : : BigSimplePolygon ( S2Loop * loop ) : <nl> + _loop ( loop ) , _isNormalized ( loop - > IsNormalized ( ) ) { <nl> + } <nl> + <nl> + BigSimplePolygon : : ~ BigSimplePolygon ( ) { <nl> + } <nl> + <nl> + void BigSimplePolygon : : Init ( S2Loop * loop ) { <nl> + _loop . reset ( loop ) ; <nl> + _isNormalized = loop - > IsNormalized ( ) ; <nl> + _borderLine . reset ( ) ; <nl> + _borderPoly . reset ( ) ; <nl> + } <nl> + <nl> + double BigSimplePolygon : : GetArea ( ) const { <nl> + return _loop - > GetArea ( ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Contains ( const S2Polygon & polygon ) const { <nl> + const S2Polygon & polyBorder = GetPolygonBorder ( ) ; <nl> + <nl> + if ( _isNormalized ) { <nl> + / / Polygon border is the same as the loop <nl> + return polyBorder . Contains ( & polygon ) ; <nl> + } <nl> + <nl> + / / Polygon border is the complement of the loop <nl> + / / <nl> + / / Return true iff big polygon ' s complement ( polyBorder ) doesn ' t intersect with polygon . <nl> + / / We don ' t guarantee whether the points on border are contained or not . <nl> + return ! polyBorder . Intersects ( & polygon ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Contains ( const S2Polyline & line ) const { <nl> + / / <nl> + / / A line is contained within a loop if the result of subtracting the loop from the line is <nl> + / / nothing . <nl> + / / <nl> + / / Also , a line is contained within a loop if the result of clipping the line to the <nl> + / / complement of the loop is nothing . <nl> + / / <nl> + / / If we can ' t subtract the loop itself using S2 , we clip ( intersect ) to the inverse . Every <nl> + / / point in S2 is contained in exactly one of these loops . <nl> + / / <nl> + / / TODO : Polygon borders are actually kind of weird , and this is somewhat inconsistent with <nl> + / / Intersects ( ) . A point might Intersect ( ) a boundary exactly , but not be Contain ( ) ed <nl> + / / within the Polygon . Think the right thing to do here is custom intersection functions . <nl> + / / <nl> + const S2Polygon & polyBorder = GetPolygonBorder ( ) ; <nl> + <nl> + OwnedPointerVector < S2Polyline > clippedOwned ; <nl> + vector < S2Polyline * > & clipped = clippedOwned . mutableVector ( ) ; <nl> + <nl> + if ( _isNormalized ) { <nl> + / / Polygon border is the same as the loop <nl> + polyBorder . SubtractFromPolyline ( & line , & clipped ) ; <nl> + return clipped . size ( ) = = 0 ; <nl> + } <nl> + else { <nl> + / / Polygon border is the complement of the loop <nl> + polyBorder . IntersectWithPolyline ( & line , & clipped ) ; <nl> + return clipped . size ( ) = = 0 ; <nl> + } <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Contains ( S2Point const & point ) const { <nl> + return _loop - > Contains ( point ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Intersects ( const S2Polygon & polygon ) const { <nl> + / / If the loop area is at most 2 * Pi , treat it as a simple Polygon . <nl> + if ( _isNormalized ) { <nl> + const S2Polygon & polyBorder = GetPolygonBorder ( ) ; <nl> + return polyBorder . Intersects ( & polygon ) ; <nl> + } <nl> + <nl> + / / The loop area is greater than 2 * Pi , so it intersects a polygon ( even with holes ) if it <nl> + / / intersects any of the top - level polygon loops , since any valid polygon is less than <nl> + / / a hemisphere . <nl> + / / <nl> + / / Intersecting a polygon hole requires that the loop must have intersected the containing <nl> + / / loop - topology ftw . <nl> + / / <nl> + / / Another approach is to check polyBorder doesn ' t contain polygon , but the following <nl> + / / approach is cheaper . <nl> + <nl> + / / Iterate over all the top - level polygon loops <nl> + for ( int i = 0 ; i < polygon . num_loops ( ) ; i = polygon . GetLastDescendant ( i ) + 1 ) { <nl> + const S2Loop * polyLoop = polygon . loop ( i ) ; <nl> + if ( _loop - > Intersects ( polyLoop ) ) <nl> + return true ; <nl> + } <nl> + <nl> + return false ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Intersects ( const S2Polyline & line ) const { <nl> + / / <nl> + / / A loop intersects a line if line intersects the loop border or , if it doesn ' t , either <nl> + / / line is contained in the loop , or line is disjoint with the loop . So checking any <nl> + / / vertex of the line is sufficient . <nl> + / / <nl> + / / TODO : Make a general Polygon / Line relation tester which uses S2 primitives <nl> + / / <nl> + return GetLineBorder ( ) . Intersects ( & line ) | | _loop - > Contains ( line . vertex ( 0 ) ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Intersects ( S2Point const & point ) const { <nl> + return Contains ( point ) ; <nl> + } <nl> + <nl> + void BigSimplePolygon : : Invert ( ) { <nl> + _loop - > Invert ( ) ; <nl> + _isNormalized = _loop - > IsNormalized ( ) ; <nl> + } <nl> + <nl> + const S2Polygon & BigSimplePolygon : : GetPolygonBorder ( ) const { <nl> + if ( _borderPoly ) <nl> + return * _borderPoly ; <nl> + <nl> + auto_ptr < S2Loop > cloned ( _loop - > Clone ( ) ) ; <nl> + <nl> + / / Any loop in polygon should be than a hemisphere ( 2 * Pi ) . <nl> + cloned - > Normalize ( ) ; <nl> + <nl> + OwnedPointerVector < S2Loop > loops ; <nl> + loops . mutableVector ( ) . push_back ( cloned . release ( ) ) ; <nl> + _borderPoly . reset ( new S2Polygon ( & loops . mutableVector ( ) ) ) ; <nl> + return * _borderPoly ; <nl> + } <nl> + <nl> + const S2Polyline & BigSimplePolygon : : GetLineBorder ( ) const { <nl> + if ( _borderLine ) <nl> + return * _borderLine ; <nl> + <nl> + vector < S2Point > points ; <nl> + int numVertices = _loop - > num_vertices ( ) ; <nl> + for ( int i = 0 ; i < = numVertices ; + + i ) { <nl> + / / vertex ( ) maps " numVertices " to 0 internally , so we don ' t have to deal with <nl> + / / the index out of range . <nl> + points . push_back ( _loop - > vertex ( i ) ) ; <nl> + } <nl> + <nl> + _borderLine . reset ( new S2Polyline ( points ) ) ; <nl> + <nl> + return * _borderLine ; <nl> + } <nl> + <nl> + BigSimplePolygon * BigSimplePolygon : : Clone ( ) const { <nl> + return new BigSimplePolygon ( _loop - > Clone ( ) ) ; <nl> + } <nl> + <nl> + S2Cap BigSimplePolygon : : GetCapBound ( ) const { <nl> + return _loop - > GetCapBound ( ) ; <nl> + } <nl> + <nl> + S2LatLngRect BigSimplePolygon : : GetRectBound ( ) const { <nl> + return _loop - > GetRectBound ( ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Contains ( const S2Cell & cell ) const { <nl> + return _loop - > Contains ( cell ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : MayIntersect ( const S2Cell & cell ) const { <nl> + return _loop - > MayIntersect ( cell ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : VirtualContainsPoint ( const S2Point & p ) const { <nl> + return _loop - > VirtualContainsPoint ( p ) ; <nl> + } <nl> + <nl> + void BigSimplePolygon : : Encode ( Encoder * const encoder ) const { <nl> + invariant ( false ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : Decode ( Decoder * const decoder ) { <nl> + invariant ( false ) ; <nl> + } <nl> + <nl> + bool BigSimplePolygon : : DecodeWithinScope ( Decoder * const decoder ) { <nl> + invariant ( false ) ; <nl> + } <nl> + <nl> + } <nl> + <nl> new file mode 100644 <nl> index 000000000000 . . 7125da35f355 <nl> mmm / dev / null <nl> ppp b / src / mongo / db / geo / big_polygon . h <nl> <nl> + / * * <nl> + * Copyright ( C ) 2014 10gen Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the GNU Affero General Public License , version 3 , <nl> + * as published by the Free Software Foundation . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU Affero General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU Affero General Public License <nl> + * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the GNU Affero General Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # pragma once <nl> + <nl> + # include < boost / scoped_ptr . hpp > <nl> + # include < vector > <nl> + <nl> + # include " mongo / db / geo / s2 . h " <nl> + # include " third_party / s2 / s2cap . h " <nl> + # include " third_party / s2 / s2cell . h " <nl> + # include " third_party / s2 / s2loop . h " <nl> + # include " third_party / s2 / s2polygon . h " <nl> + # include " third_party / s2 / s2polyline . h " <nl> + # include " third_party / s2 / s2region . h " <nl> + <nl> + namespace mongo { <nl> + <nl> + / / Simple GeoJSON polygon with a custom CRS identifier as having a strict winding order . <nl> + / / The winding order will determine unambiguously the inside / outside of the polygon even <nl> + / / if larger than one hemisphere . <nl> + / / <nl> + / / BigSimplePolygon uses S2Loop internally , which follows a left - foot rule ( inside to the <nl> + / / left when walking the edge of the polygon , counter - clockwise ) <nl> + class BigSimplePolygon : public S2Region { <nl> + public : <nl> + <nl> + BigSimplePolygon ( ) ; <nl> + <nl> + BigSimplePolygon ( S2Loop * loop ) ; <nl> + <nl> + virtual ~ BigSimplePolygon ( ) ; <nl> + <nl> + void Init ( S2Loop * loop ) ; <nl> + <nl> + double GetArea ( ) const ; <nl> + <nl> + bool Contains ( const S2Polygon & polygon ) const ; <nl> + <nl> + bool Contains ( const S2Polyline & line ) const ; <nl> + <nl> + / / Needs to be this way for S2 compatibility <nl> + bool Contains ( S2Point const & point ) const ; <nl> + <nl> + bool Intersects ( const S2Polygon & polygon ) const ; <nl> + <nl> + bool Intersects ( const S2Polyline & line ) const ; <nl> + <nl> + bool Intersects ( S2Point const & point ) const ; <nl> + <nl> + / / Only used in tests <nl> + void Invert ( ) ; <nl> + <nl> + const S2Polygon & GetPolygonBorder ( ) const ; <nl> + <nl> + const S2Polyline & GetLineBorder ( ) const ; <nl> + <nl> + / / <nl> + / / S2Region interface <nl> + / / <nl> + <nl> + BigSimplePolygon * Clone ( ) const ; <nl> + <nl> + S2Cap GetCapBound ( ) const ; <nl> + <nl> + S2LatLngRect GetRectBound ( ) const ; <nl> + <nl> + bool Contains ( S2Cell const & cell ) const ; <nl> + <nl> + bool MayIntersect ( S2Cell const & cell ) const ; <nl> + <nl> + bool VirtualContainsPoint ( S2Point const & p ) const ; <nl> + <nl> + void Encode ( Encoder * const encoder ) const ; <nl> + <nl> + bool Decode ( Decoder * const decoder ) ; <nl> + <nl> + bool DecodeWithinScope ( Decoder * const decoder ) ; <nl> + <nl> + private : <nl> + <nl> + boost : : scoped_ptr < S2Loop > _loop ; <nl> + <nl> + / / Cache whether the loop area is at most 2 * Pi ( the area of hemisphere ) . <nl> + / / <nl> + / / S2 guarantees that any loop in a valid ( normalized ) polygon , no matter a hole <nl> + / / or a shell , has to be less than 2 * Pi . So if the loop is normalized , it ' s the same <nl> + / / with the border polygon , otherwise , the border polygon is its complement . <nl> + bool _isNormalized ; <nl> + <nl> + / / Cached to do Intersects ( ) and Contains ( ) with S2Polylines . <nl> + mutable boost : : scoped_ptr < S2Polyline > _borderLine ; <nl> + mutable boost : : scoped_ptr < S2Polygon > _borderPoly ; <nl> + } ; <nl> + <nl> + } <nl> + <nl> new file mode 100644 <nl> index 000000000000 . . a8cde9bc909f <nl> mmm / dev / null <nl> ppp b / src / mongo / db / geo / big_polygon_test . cpp <nl> <nl> + / * * <nl> + * Copyright ( C ) 2014 10gen Inc . <nl> + * <nl> + * This program is free software : you can redistribute it and / or modify <nl> + * it under the terms of the GNU Affero General Public License , version 3 , <nl> + * as published by the Free Software Foundation . <nl> + * <nl> + * This program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU Affero General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU Affero General Public License <nl> + * along with this program . If not , see < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * As a special exception , the copyright holders give permission to link the <nl> + * code of portions of this program with the OpenSSL library under certain <nl> + * conditions as described in each individual source file and distribute <nl> + * linked combinations including the program with the OpenSSL library . You <nl> + * must comply with the GNU Affero General Public License in all respects for <nl> + * all of the code used other than as permitted herein . If you modify file ( s ) <nl> + * with this exception , you may extend this exception to your version of the <nl> + * file ( s ) , but you are not obligated to do so . If you do not wish to do so , <nl> + * delete this exception statement from your version . If you delete this <nl> + * exception statement from all source files in the program , then also delete <nl> + * it in the license file . <nl> + * / <nl> + <nl> + # include " mongo / db / geo / big_polygon . h " <nl> + <nl> + # include " mongo / bson / util / builder . h " <nl> + # include " mongo / bson / bsonobjbuilder . h " <nl> + # include " mongo / unittest / unittest . h " <nl> + <nl> + namespace { <nl> + <nl> + using namespace mongo ; <nl> + using std : : auto_ptr ; <nl> + using std : : string ; <nl> + using std : : vector ; <nl> + <nl> + / / Helper to build a vector of S2Point <nl> + struct PointBuilder { <nl> + <nl> + vector < S2Point > points ; <nl> + <nl> + PointBuilder & operator < < ( const S2LatLng & LatLng ) { <nl> + points . push_back ( LatLng . ToPoint ( ) ) ; <nl> + return * this ; <nl> + } <nl> + } ; <nl> + <nl> + vector < S2Point > pointVec ( const PointBuilder & builder ) { <nl> + vector < S2Point > points ( builder . points . begin ( ) , builder . points . end ( ) ) ; <nl> + return points ; <nl> + } <nl> + <nl> + S2Loop * loop ( const PointBuilder & builder ) { <nl> + return new S2Loop ( builder . points ) ; <nl> + } <nl> + <nl> + vector < S2Loop * > * loopVec ( const PointBuilder & builder ) { <nl> + static vector < S2Loop * > loops ; <nl> + loops . clear ( ) ; <nl> + loops . push_back ( loop ( builder ) ) ; <nl> + return & loops ; <nl> + } <nl> + <nl> + S2LatLng LatLng ( double lat , double lng ) { <nl> + return S2LatLng : : FromDegrees ( lat , lng ) ; <nl> + } <nl> + <nl> + / / Syntax sugar for PointBuilder , which can be used to construct <nl> + / / - vector < S2Point > pointVec ( ) <nl> + / / - S2Loop * loop ( ) <nl> + / / - vector < S2Loop * > * loopVec ( ) <nl> + / / <nl> + / / e . g . points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) < < LatLng ( 0 . 0 , 0 . 0 ) ) <nl> + typedef PointBuilder points ; <nl> + <nl> + TEST ( BigSimplePolygon , Basic ) { <nl> + <nl> + / / A 20x20 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly20 ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + / / A 10x10 square centered at [ 0 , 0 ] <nl> + S2Polygon poly10 ( loopVec ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigPoly20 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_LESS_THAN ( poly10 . GetArea ( ) , bigPoly20 . GetArea ( ) ) ; <nl> + ASSERT ( bigPoly20 . Contains ( poly10 ) ) ; <nl> + ASSERT ( bigPoly20 . Intersects ( poly10 ) ) ; <nl> + <nl> + / / A 20x20 square centered at [ 0 , 20 ] <nl> + BigSimplePolygon bigPoly20Offset ( loop ( points ( ) < < LatLng ( 10 . 0 , 30 . 0 ) < < LatLng ( 10 . 0 , 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , 10 . 0 ) < < LatLng ( - 10 . 0 , 30 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigPoly20Offset . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_LESS_THAN ( poly10 . GetArea ( ) , bigPoly20Offset . GetArea ( ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Offset . Contains ( poly10 ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Offset . Intersects ( poly10 ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicWithHole ) { <nl> + / / A 30x30 square centered at [ 0 , 0 ] with a 20X20 hole <nl> + vector < S2Loop * > loops ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + S2Polygon holePoly ( & loops ) ; <nl> + <nl> + / / A 16X16 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly16 ( loop ( points ( ) < < LatLng ( 8 . 0 , 8 . 0 ) < < LatLng ( 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , - 8 . 0 ) < < LatLng ( - 8 . 0 , 8 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigPoly16 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly16 . Contains ( holePoly ) ) ; <nl> + ASSERT_FALSE ( bigPoly16 . Intersects ( holePoly ) ) ; <nl> + <nl> + / / A big polygon bigger than the hole . <nl> + BigSimplePolygon bigPoly24 ( loop ( points ( ) < < LatLng ( 12 . 0 , 12 . 0 ) < < LatLng ( 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , - 12 . 0 ) < < LatLng ( - 12 . 0 , 12 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly24 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly24 . Contains ( holePoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly24 . Intersects ( holePoly ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicWithHoleAndShell ) { <nl> + / / A 30x30 square centered at [ 0 , 0 ] with a 20X20 hole and 10X10 shell <nl> + vector < S2Loop * > loops ; <nl> + / / Border <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + / / Hole <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + / / Shell <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + S2Polygon shellPoly ( & loops ) ; <nl> + <nl> + / / A 16X16 square centered at [ 0 , 0 ] containing the shell <nl> + BigSimplePolygon bigPoly16 ( loop ( points ( ) < < LatLng ( 8 . 0 , 8 . 0 ) < < LatLng ( 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , - 8 . 0 ) < < LatLng ( - 8 . 0 , 8 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly16 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly16 . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly16 . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / Try a big polygon bigger than the hole . <nl> + BigSimplePolygon bigPoly24 ( loop ( points ( ) < < LatLng ( 12 . 0 , 12 . 0 ) < < LatLng ( 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , - 12 . 0 ) < < LatLng ( - 12 . 0 , 12 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly24 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly24 . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly24 . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / Try a big polygon smaller than the shell . <nl> + BigSimplePolygon bigPoly8 ( loop ( points ( ) < < LatLng ( 4 . 0 , 4 . 0 ) < < LatLng ( 4 . 0 , - 4 . 0 ) <nl> + < < LatLng ( - 4 . 0 , - 4 . 0 ) < < LatLng ( - 4 . 0 , 4 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly8 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly8 . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly8 . Intersects ( shellPoly ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicComplement ) { <nl> + <nl> + / / Everything * not * in a 20x20 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly20Comp ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + bigPoly20Comp . Invert ( ) ; <nl> + <nl> + / / A 10x10 square centered at [ 0 , 0 ] <nl> + S2Polygon poly10 ( loopVec ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + <nl> + ASSERT_GREATER_THAN ( bigPoly20Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Contains ( poly10 ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Intersects ( poly10 ) ) ; <nl> + <nl> + / / A 10x10 square centered at [ 0 , 20 ] , contained by bigPoly20Comp <nl> + S2Polygon poly10Contained ( loopVec ( points ( ) < < LatLng ( 25 . 0 , 25 . 0 ) < < LatLng ( 25 . 0 , 15 . 0 ) <nl> + < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , 25 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( poly10Contained . GetArea ( ) , bigPoly20Comp . GetArea ( ) ) ; <nl> + ASSERT ( bigPoly20Comp . Contains ( poly10Contained ) ) ; <nl> + ASSERT ( bigPoly20Comp . Intersects ( poly10Contained ) ) ; <nl> + <nl> + / / A 30x30 square centered at [ 0 , 0 ] , so that bigPoly20Comp contains its complement entirely , <nl> + / / which is not allowed by S2 . <nl> + S2Polygon poly30 ( loopVec ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( poly30 . GetArea ( ) , bigPoly20Comp . GetArea ( ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Contains ( poly30 ) ) ; <nl> + ASSERT_TRUE ( bigPoly20Comp . Intersects ( poly30 ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicIntersects ) { <nl> + <nl> + / / Everything * not * in a 20x20 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly20 ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + bigPoly20 . Invert ( ) ; <nl> + <nl> + / / A 10x10 square centered at [ 10 , 10 ] ( partial overlap ) <nl> + S2Polygon poly10 ( loopVec ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , 5 . 0 ) <nl> + < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , 15 . 0 ) ) ) ; <nl> + <nl> + ASSERT_FALSE ( bigPoly20 . Contains ( poly10 ) ) ; <nl> + ASSERT ( bigPoly20 . Intersects ( poly10 ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicComplementWithHole ) { <nl> + / / A 30x30 square centered at [ 0 , 0 ] with a 20X20 hole <nl> + vector < S2Loop * > loops ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + S2Polygon holePoly ( & loops ) ; <nl> + <nl> + / / 1 . BigPolygon doesn ' t touch holePoly <nl> + / / Everything * not * in a 40x40 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly40Comp ( loop ( points ( ) < < LatLng ( 20 . 0 , 20 . 0 ) < < LatLng ( 20 . 0 , - 20 . 0 ) <nl> + < < LatLng ( - 20 . 0 , - 20 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 20 . 0 ) ) ) ; <nl> + bigPoly40Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly40Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly40Comp . Contains ( holePoly ) ) ; <nl> + ASSERT_FALSE ( bigPoly40Comp . Intersects ( holePoly ) ) ; <nl> + <nl> + / / 2 . BigPolygon intersects holePoly <nl> + / / Everything * not * in a 24X24 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly24Comp ( loop ( points ( ) < < LatLng ( 12 . 0 , 12 . 0 ) < < LatLng ( 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , 12 . 0 ) ) ) ; <nl> + bigPoly24Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly24Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly24Comp . Contains ( holePoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly24Comp . Intersects ( holePoly ) ) ; <nl> + <nl> + / / 3 . BigPolygon contains holePoly <nl> + / / Everything * not * in a 16X16 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly16Comp ( loop ( points ( ) < < LatLng ( 8 . 0 , 8 . 0 ) < < LatLng ( 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , 8 . 0 ) ) ) ; <nl> + bigPoly16Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly16Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_TRUE ( bigPoly16Comp . Contains ( holePoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly16Comp . Intersects ( holePoly ) ) ; <nl> + <nl> + / / 4 . BigPolygon contains the right half of holePoly <nl> + / / Everything * not * in a 40x40 square centered at [ 0 , 20 ] <nl> + BigSimplePolygon bigPoly40CompOffset ( loop ( points ( ) < < LatLng ( 20 . 0 , 40 . 0 ) <nl> + < < LatLng ( 20 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 40 . 0 ) ) ) ; <nl> + bigPoly40CompOffset . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly40CompOffset . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly40CompOffset . Contains ( holePoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly40CompOffset . Intersects ( holePoly ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicComplementWithHoleAndShell ) { <nl> + / / A 30x30 square centered at [ 0 , 0 ] with a 20X20 hole and 10X10 shell <nl> + vector < S2Loop * > loops ; <nl> + / / Border <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + / / Hole <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + / / Shell <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + S2Polygon shellPoly ( & loops ) ; <nl> + <nl> + / / 1 . BigPolygon doesn ' t touch shellPoly <nl> + / / Everything * not * in a 40x40 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly40Comp ( loop ( points ( ) < < LatLng ( 20 . 0 , 20 . 0 ) < < LatLng ( 20 . 0 , - 20 . 0 ) <nl> + < < LatLng ( - 20 . 0 , - 20 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 20 . 0 ) ) ) ; <nl> + bigPoly40Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly40Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly40Comp . Contains ( shellPoly ) ) ; <nl> + ASSERT_FALSE ( bigPoly40Comp . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / 2 . BigPolygon intersects shellPoly <nl> + / / Everything * not * in a 24X24 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly24Comp ( loop ( points ( ) < < LatLng ( 12 . 0 , 12 . 0 ) < < LatLng ( 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , - 12 . 0 ) <nl> + < < LatLng ( - 12 . 0 , 12 . 0 ) ) ) ; <nl> + bigPoly24Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly24Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly24Comp . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly24Comp . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / 3 . BigPolygon contains shellPoly ' s outer ring <nl> + / / Everything * not * in a 16X16 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly16Comp ( loop ( points ( ) < < LatLng ( 8 . 0 , 8 . 0 ) < < LatLng ( 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , - 8 . 0 ) <nl> + < < LatLng ( - 8 . 0 , 8 . 0 ) ) ) ; <nl> + bigPoly16Comp . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly16Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly16Comp . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly16Comp . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / 4 . BigPolygon contains the right half of shellPoly <nl> + / / Everything * not * in a 40x40 square centered at [ 0 , 20 ] <nl> + BigSimplePolygon bigPoly40CompOffset ( loop ( points ( ) < < LatLng ( 20 . 0 , 40 . 0 ) <nl> + < < LatLng ( 20 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 20 . 0 , 40 . 0 ) ) ) ; <nl> + bigPoly40CompOffset . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly40CompOffset . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly40CompOffset . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPoly40CompOffset . Intersects ( shellPoly ) ) ; <nl> + <nl> + / / 5 . BigPolygon contain shellPoly ( CW ) <nl> + BigSimplePolygon bigPolyCompOffset ( loop ( points ( ) < < LatLng ( 6 . 0 , 6 . 0 ) <nl> + < < LatLng ( 6 . 0 , 8 . 0 ) <nl> + < < LatLng ( - 6 . 0 , 8 . 0 ) <nl> + < < LatLng ( - 6 . 0 , 6 . 0 ) ) ) ; <nl> + ASSERT_GREATER_THAN ( bigPolyCompOffset . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_TRUE ( bigPolyCompOffset . Contains ( shellPoly ) ) ; <nl> + ASSERT_TRUE ( bigPolyCompOffset . Intersects ( shellPoly ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , BasicWinding ) { <nl> + <nl> + / / A 20x20 square centered at [ 0 , 0 ] ( CCW ) <nl> + BigSimplePolygon bigPoly20 ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + / / Everything * not * in a 20x20 square centered at [ 0 , 0 ] ( CW ) <nl> + BigSimplePolygon bigPoly20Comp ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( 10 . 0 , - 10 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigPoly20 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly20Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , LineRelations ) { <nl> + <nl> + / / A 20x20 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly20 ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + / / A 10x10 line circling [ 0 , 0 ] <nl> + S2Polyline line10 ( pointVec ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigPoly20 . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT ( bigPoly20 . Contains ( line10 ) ) ; <nl> + ASSERT ( bigPoly20 . Intersects ( line10 ) ) ; <nl> + <nl> + / / Line segment disjoint from big polygon <nl> + S2Polyline lineDisjoint ( pointVec ( points ( ) < < LatLng ( 15 . 0 , 5 . 0 ) < < LatLng ( 15 . 0 , - 5 . 0 ) ) ) ; <nl> + ASSERT_FALSE ( bigPoly20 . Contains ( lineDisjoint ) ) ; <nl> + ASSERT_FALSE ( bigPoly20 . Intersects ( lineDisjoint ) ) ; <nl> + <nl> + / / Line segment intersects big polygon <nl> + S2Polyline lineIntersect ( pointVec ( points ( ) < < LatLng ( 0 . 0 , 0 . 0 ) < < LatLng ( 15 . 0 , 0 . 0 ) ) ) ; <nl> + ASSERT_FALSE ( bigPoly20 . Contains ( lineIntersect ) ) ; <nl> + ASSERT_TRUE ( bigPoly20 . Intersects ( lineIntersect ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , LineRelationsComplement ) { <nl> + <nl> + / / A 20x20 square centered at [ 0 , 0 ] <nl> + BigSimplePolygon bigPoly20Comp ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , 10 . 0 ) ) ) ; <nl> + bigPoly20Comp . Invert ( ) ; <nl> + <nl> + / / A 10x10 line circling [ 0 , 0 ] <nl> + S2Polyline line10 ( pointVec ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + <nl> + ASSERT_GREATER_THAN ( bigPoly20Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Contains ( line10 ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Intersects ( line10 ) ) ; <nl> + <nl> + / / Line segment ( 0 , 0 ) - > ( 0 , 15 ) <nl> + S2Polyline lineIntersect ( pointVec ( points ( ) < < LatLng ( 0 . 0 , 0 . 0 ) < < LatLng ( 0 . 0 , 15 . 0 ) ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Contains ( lineIntersect ) ) ; <nl> + ASSERT_TRUE ( bigPoly20Comp . Intersects ( lineIntersect ) ) ; <nl> + <nl> + / / A 10x10 line circling [ 0 , 0 ] <nl> + S2Polyline line30 ( pointVec ( points ( ) < < LatLng ( 15 . 0 , 15 . 0 ) < < LatLng ( 15 . 0 , - 15 . 0 ) <nl> + < < LatLng ( - 15 . 0 , - 15 . 0 ) < < LatLng ( - 15 . 0 , 15 . 0 ) ) ) ; <nl> + ASSERT_TRUE ( bigPoly20Comp . Contains ( line30 ) ) ; <nl> + ASSERT_TRUE ( bigPoly20Comp . Intersects ( line30 ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , LineRelationsWinding ) { <nl> + <nl> + / / Everything * not * in a 20x20 square centered at [ 0 , 0 ] ( CW winding ) <nl> + BigSimplePolygon bigPoly20Comp ( loop ( points ( ) < < LatLng ( 10 . 0 , 10 . 0 ) < < LatLng ( - 10 . 0 , 10 . 0 ) <nl> + < < LatLng ( - 10 . 0 , - 10 . 0 ) <nl> + < < LatLng ( 10 . 0 , - 10 . 0 ) ) ) ; <nl> + <nl> + / / A 10x10 line circling [ 0 , 0 ] <nl> + S2Polyline line10 ( pointVec ( points ( ) < < LatLng ( 5 . 0 , 5 . 0 ) < < LatLng ( 5 . 0 , - 5 . 0 ) <nl> + < < LatLng ( - 5 . 0 , - 5 . 0 ) < < LatLng ( - 5 . 0 , 5 . 0 ) ) ) ; <nl> + <nl> + ASSERT_GREATER_THAN ( bigPoly20Comp . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Contains ( line10 ) ) ; <nl> + ASSERT_FALSE ( bigPoly20Comp . Intersects ( line10 ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , PolarContains ) { <nl> + <nl> + / / Square 10 degrees from the north pole [ 90 , 0 ] <nl> + BigSimplePolygon bigNorthPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( 80 . 0 , 90 . 0 ) <nl> + < < LatLng ( 80 . 0 , 180 . 0 ) < < LatLng ( 80 . 0 , - 90 . 0 ) ) ) ; <nl> + <nl> + / / Square 5 degrees from the north pole [ 90 , 0 ] <nl> + S2Polygon northPoly ( loopVec ( points ( ) < < LatLng ( 85 . 0 , 0 . 0 ) < < LatLng ( 85 . 0 , 90 . 0 ) <nl> + < < LatLng ( 85 . 0 , 180 . 0 ) < < LatLng ( 85 . 0 , - 90 . 0 ) ) ) ; <nl> + <nl> + ASSERT_LESS_THAN ( bigNorthPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + ASSERT_LESS_THAN ( northPoly . GetArea ( ) , bigNorthPoly . GetArea ( ) ) ; <nl> + ASSERT ( bigNorthPoly . Contains ( northPoly ) ) ; <nl> + ASSERT ( bigNorthPoly . Intersects ( northPoly ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , PolarContainsWithHoles ) { <nl> + <nl> + / / Square 10 degrees from the north pole [ 90 , 0 ] <nl> + BigSimplePolygon bigNorthPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( 80 . 0 , 90 . 0 ) <nl> + < < LatLng ( 80 . 0 , 180 . 0 ) < < LatLng ( 80 . 0 , - 90 . 0 ) ) ) ; <nl> + <nl> + / / Square 5 degrees from the north pole [ 90 , 0 ] with a concentric hole 1 degree from the <nl> + / / north pole <nl> + vector < S2Loop * > loops ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 85 . 0 , 0 . 0 ) < < LatLng ( 85 . 0 , 90 . 0 ) <nl> + < < LatLng ( 85 . 0 , 180 . 0 ) < < LatLng ( 85 . 0 , - 90 . 0 ) ) ) ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 89 . 0 , 0 . 0 ) < < LatLng ( 89 . 0 , 90 . 0 ) <nl> + < < LatLng ( 89 . 0 , 180 . 0 ) < < LatLng ( 89 . 0 , - 90 . 0 ) ) ) ; <nl> + S2Polygon northPolyHole ( & loops ) ; <nl> + <nl> + ASSERT_LESS_THAN ( northPolyHole . GetArea ( ) , bigNorthPoly . GetArea ( ) ) ; <nl> + ASSERT ( bigNorthPoly . Contains ( northPolyHole ) ) ; <nl> + ASSERT ( bigNorthPoly . Intersects ( northPolyHole ) ) ; <nl> + } <nl> + <nl> + TEST ( BigSimplePolygon , PolarIntersectsWithHoles ) { <nl> + <nl> + / / Square 10 degrees from the north pole [ 90 , 0 ] <nl> + BigSimplePolygon bigNorthPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( 80 . 0 , 90 . 0 ) <nl> + < < LatLng ( 80 . 0 , 180 . 0 ) < < LatLng ( 80 . 0 , - 90 . 0 ) ) ) ; <nl> + <nl> + / / 5 - degree square with 1 - degree - wide concentric hole , centered on [ 80 . 0 , 0 . 0 ] <nl> + vector < S2Loop * > loops ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 85 . 0 , 5 . 0 ) < < LatLng ( 85 . 0 , - 5 . 0 ) <nl> + < < LatLng ( 75 . 0 , - 5 . 0 ) < < LatLng ( 75 . 0 , 5 . 0 ) ) ) ; <nl> + loops . push_back ( loop ( points ( ) < < LatLng ( 81 . 0 , 1 . 0 ) < < LatLng ( 81 . 0 , - 1 . 0 ) <nl> + < < LatLng ( 79 . 0 , - 1 . 0 ) < < LatLng ( 79 . 0 , 1 . 0 ) ) ) ; <nl> + S2Polygon northPolyHole ( & loops ) ; <nl> + <nl> + ASSERT_LESS_THAN ( northPolyHole . GetArea ( ) , bigNorthPoly . GetArea ( ) ) ; <nl> + ASSERT_FALSE ( bigNorthPoly . Contains ( northPolyHole ) ) ; <nl> + ASSERT ( bigNorthPoly . Intersects ( northPolyHole ) ) ; <nl> + } <nl> + <nl> + / / Edge cases <nl> + / / <nl> + / / No promise in terms of points on border - they may be inside or outside the big polygon . <nl> + / / But we need to ensure the result is consistent : <nl> + / / 1 . If a polygon / line is contained by a big polygon , they must intersect with each other . <nl> + / / 2 . Relation doesn ' t change as long as the touch point doesn ' t change , no matter the big <nl> + / / polygon is larger or less then a hemisphere . <nl> + / / 3 . Relations for big polygons less than a hemisphere are consistent with ordinary ( simple ) <nl> + / / polygon results . <nl> + <nl> + template < typename TShape > <nl> + void checkConsistency ( const BigSimplePolygon & bigPoly , <nl> + const BigSimplePolygon & expandedBigPoly , <nl> + const TShape & shape ) { <nl> + / / Contain ( ) = > Intersects ( ) <nl> + if ( bigPoly . Contains ( shape ) ) ASSERT ( bigPoly . Intersects ( shape ) ) ; <nl> + if ( expandedBigPoly . Contains ( shape ) ) ASSERT ( expandedBigPoly . Intersects ( shape ) ) ; <nl> + / / Relation doesn ' t change <nl> + ASSERT_EQUALS ( bigPoly . Contains ( shape ) , expandedBigPoly . Contains ( shape ) ) ; <nl> + ASSERT_EQUALS ( bigPoly . Intersects ( shape ) , expandedBigPoly . Intersects ( shape ) ) ; <nl> + } <nl> + <nl> + / / Polygon shares big polygon ' s edge ( disjoint ) <nl> + TEST ( BigSimplePolygon , ShareEdgeDisjoint ) { <nl> + / / Big polygon smaller than a hemisphere . <nl> + BigSimplePolygon bigPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 90 . 0 ) < < LatLng ( 80 . 0 , 90 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + / / Vertex point and collinear point <nl> + S2Point point = LatLng ( 80 . 0 , 0 . 0 ) . ToPoint ( ) ; <nl> + S2Point collinearPoint = LatLng ( 0 . 0 , 0 . 0 ) . ToPoint ( ) ; <nl> + <nl> + / / Polygon shares one edge <nl> + S2Polygon poly ( loopVec ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , - 10 . 0 ) < < LatLng ( 80 . 0 , - 10 . 0 ) ) ) ; <nl> + / / Polygon shares a segment of one edge <nl> + S2Polygon collinearPoly ( loopVec ( points ( ) < < LatLng ( 50 . 0 , 0 . 0 ) < < LatLng ( - 50 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 50 . 0 , - 10 . 0 ) < < LatLng ( 50 . 0 , - 10 . 0 ) ) ) ; <nl> + <nl> + / / Line <nl> + S2Polyline line ( pointVec ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , - 10 . 0 ) ) ) ; <nl> + / / Line share a segment of one edge <nl> + S2Polyline collinearLine ( pointVec ( points ( ) < < LatLng ( 50 . 0 , 0 . 0 ) < < LatLng ( - 50 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 50 . 0 , - 10 . 0 ) ) ) ; <nl> + <nl> + / / Big polygon larger than a hemisphere . <nl> + BigSimplePolygon expandedBigPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 90 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 180 . 0 ) <nl> + < < LatLng ( - 80 . 0 , - 90 . 0 ) <nl> + < < LatLng ( 80 . 0 , - 90 . 0 ) < < LatLng ( 80 . 0 , 180 . 0 ) <nl> + < < LatLng ( 80 . 0 , 90 . 0 ) ) ) ; <nl> + ASSERT_GREATER_THAN ( expandedBigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + checkConsistency ( bigPoly , expandedBigPoly , point ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoint ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , poly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , line ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearLine ) ; <nl> + <nl> + / / Check the complement of big polygon <nl> + bigPoly . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + expandedBigPoly . Invert ( ) ; <nl> + ASSERT_LESS_THAN ( expandedBigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + checkConsistency ( bigPoly , expandedBigPoly , point ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoint ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , poly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , line ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearLine ) ; <nl> + } <nl> + <nl> + / / Polygon / line shares big polygon ' s edge ( contained by big polygon ) <nl> + TEST ( BigSimplePolygon , ShareEdgeContained ) { <nl> + / / Big polygon smaller than a hemisphere . <nl> + BigSimplePolygon bigPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 90 . 0 ) < < LatLng ( 80 . 0 , 90 . 0 ) ) ) ; <nl> + ASSERT_LESS_THAN ( bigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + / / Polygon <nl> + S2Polygon poly ( loopVec ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 10 . 0 ) < < LatLng ( 80 . 0 , 10 . 0 ) ) ) ; <nl> + / / Polygon shares a segment of one edge <nl> + S2Polygon collinearPoly ( loopVec ( points ( ) < < LatLng ( 50 . 0 , 0 . 0 ) < < LatLng ( - 50 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 50 . 0 , 10 . 0 ) < < LatLng ( 50 . 0 , 10 . 0 ) ) ) ; <nl> + / / Line <nl> + S2Polyline line ( pointVec ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( 0 . 0 , 10 . 0 ) ) ) ; <nl> + / / Line shares a segment of one edge <nl> + S2Polyline collinearLine ( pointVec ( points ( ) < < LatLng ( 50 . 0 , 0 . 0 ) < < LatLng ( - 50 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 50 . 0 , 10 . 0 ) ) ) ; <nl> + <nl> + / / Big polygon larger than a hemisphere . <nl> + BigSimplePolygon expandedBigPoly ( loop ( points ( ) < < LatLng ( 80 . 0 , 0 . 0 ) < < LatLng ( - 80 . 0 , 0 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 90 . 0 ) <nl> + < < LatLng ( - 80 . 0 , 180 . 0 ) <nl> + < < LatLng ( - 80 . 0 , - 90 . 0 ) <nl> + < < LatLng ( 80 . 0 , - 90 . 0 ) < < LatLng ( 80 . 0 , 180 . 0 ) <nl> + < < LatLng ( 80 . 0 , 90 . 0 ) ) ) ; <nl> + ASSERT_GREATER_THAN ( expandedBigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + checkConsistency ( bigPoly , expandedBigPoly , poly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , line ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearLine ) ; <nl> + <nl> + / / Check the complement of big polygon <nl> + bigPoly . Invert ( ) ; <nl> + ASSERT_GREATER_THAN ( bigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + expandedBigPoly . Invert ( ) ; <nl> + ASSERT_LESS_THAN ( expandedBigPoly . GetArea ( ) , 2 * M_PI ) ; <nl> + <nl> + checkConsistency ( bigPoly , expandedBigPoly , poly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearPoly ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , line ) ; <nl> + checkConsistency ( bigPoly , expandedBigPoly , collinearLine ) ; <nl> + } <nl> + <nl> + } <nl>
SERVER - 14510 Custom CRS for strict winding order enforcement
mongodb/mongo
712768556e72d1756995c6a7b020b875fb9d6ea9
2014-08-12T21:04:08Z
mmm a / servers / visual / rasterizer_rd / shader_compiler_rd . cpp <nl> ppp b / servers / visual / rasterizer_rd / shader_compiler_rd . cpp <nl> String ShaderCompilerRD : : _dump_node_code ( const SL : : Node * p_node , int p_level , Ge <nl> } break ; <nl> case SL : : OP_SELECT_IF : { <nl> <nl> + code + = " ( " ; <nl> code + = _dump_node_code ( onode - > arguments [ 0 ] , p_level , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> code + = " ? " ; <nl> code + = _dump_node_code ( onode - > arguments [ 1 ] , p_level , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> code + = " : " ; <nl> code + = _dump_node_code ( onode - > arguments [ 2 ] , p_level , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> + code + = " ) " ; <nl> <nl> } break ; <nl> <nl>
Merge pull request from Chaosus / shader_bug_vk2
godotengine/godot
86d0d88b4206e2eb58e01c0a13e2fb1f9b6cab12
2020-02-11T11:00:16Z
mmm a / tools / stat . cpp <nl> ppp b / tools / stat . cpp <nl> namespace mongo { <nl> { <nl> string orig = getParam ( " host " ) ; <nl> if ( orig = = " " ) <nl> - orig = " localhost : 27017 " ; <nl> + orig = " localhost " ; <nl> + <nl> + if ( orig . find ( " : " ) = = string : : npos ) { <nl> + if ( hasParam ( " port " ) ) <nl> + orig + = " : " + _params [ " port " ] . as < string > ( ) ; <nl> + else <nl> + orig + = " : 27017 " ; <nl> + } <nl> + <nl> StringSplitter ss ( orig . c_str ( ) , " , " ) ; <nl> while ( ss . more ( ) ) { <nl> string host = ss . next ( ) ; <nl>
fix - - port - - discover for mongostat SERVER - 2457
mongodb/mongo
a8b01fa52171de773633a63a8f48ed6168c8178f
2011-02-02T06:00:38Z
mmm a / bin / import_voxforge . py <nl> ppp b / bin / import_voxforge . py <nl> <nl> import codecs <nl> import os <nl> import re <nl> + import sys <nl> import tarfile <nl> import threading <nl> import unicodedata <nl>
Add missing sys import to import_voxforge . py
mozilla/DeepSpeech
af7c4e90df5f4d1a616e6fd728ee3428b69be155
2020-10-23T09:09:49Z
mmm a / hphp / doc / ir . specification <nl> ppp b / hphp / doc / ir . specification <nl> Instruction set <nl> <nl> 1 . Checks and Asserts <nl> <nl> - Note : Instructions that check boxed types only check that the operand is boxed , <nl> - and they ignore the type of the value inside the box ( the inner type ) . The <nl> - inner type is normally checked when the value within the box is about to be <nl> - loaded , using a separate CheckRefInner instruction . <nl> - <nl> - | CheckType < T > , DRefineS ( 0 ) , S ( Gen , Cls ) , B | P <nl> + | CheckType < T > , DRefineS ( 0 ) , S ( Cell ) , B | P <nl> <nl> Check that the type of the src S0 is T , and if so copy it to D , and <nl> fallthrough . If S0 cannot be proven to be T , branch to block B . Note that <nl> loaded , using a separate CheckRefInner instruction . <nl> If S0 is not a null pointer , branch to block B . This is used to check the <nl> return value of a native helper that returns a potentially null StringData * . <nl> <nl> - | AssertType , DRefineS ( 0 ) , S ( Gen , MemToGen ) , P <nl> + | AssertType , DRefineS ( 0 ) , S ( Cell , MemToCell ) , P <nl> <nl> Assert that the type of S0 is T , copying it to D . <nl> <nl> - | CheckTypeMem < T > , ND , S ( MemToGen ) , B <nl> + | CheckTypeMem < T > , ND , S ( MemToCell ) , B <nl> <nl> If the value pointed to by S0 is not type T , branch to the block B . <nl> <nl> loaded , using a separate CheckRefInner instruction . <nl> Check that S0 is a darray , and if so copy it to D , and fallthrough . If S0 is <nl> not a darray , branch to block B . <nl> <nl> - | HintLocInner < T , localId > , ND , S ( FramePtr ) , NF <nl> - <nl> - Hint that the inner type of a BoxedCell in localId is likely type T , where T <nl> - is a subtype of BoxedCell . The type must be guarded on before it is known to <nl> - be true ( via CheckRefInner ) . <nl> - <nl> - | HintStkInner < T , offset > , ND , S ( StkPtr ) , NF <nl> - <nl> - Hint that the inner type of the BoxedInitCell on the stack pointed to by S0 <nl> - at offset ( in cells ) is T . The type must be guarded on before it is known to <nl> - be true ( via CheckRefInner ) . <nl> - <nl> - | HintMBaseInner < T > , ND , NA , NF <nl> - <nl> - Hint that the inner type of the BoxedInitCell pointed to by the member base <nl> - register is T . The type must be guarded on before it is known to be true <nl> - ( via CheckRefInner ) . <nl> - <nl> | CheckIter < iterId , iterType > , ND , S ( FramePtr ) , B <nl> <nl> Check that specialization type of the given iterator ` iterId ` on the frame S0 <nl> loaded , using a separate CheckRefInner instruction . <nl> Check that the type of the cell on the stack pointed to by S0 at offset ( in <nl> cells ) is T ; if not , branch to block B . <nl> <nl> - | CheckMBase < T > , ND , S ( LvalToGen ) , B <nl> + | CheckMBase < T > , ND , S ( LvalToCell ) , B <nl> <nl> Check that the value pointed to by the member base register S0 has type T ; if <nl> not , branch to block B . This is functionally the same as CheckTypeMem . <nl> loaded , using a separate CheckRefInner instruction . <nl> failed type coercion . They are encoded along with callee Func , fn , and the <nl> integer position of the argument , argNum , being coerced . <nl> <nl> - | CheckInit , ND , S ( Gen ) , B <nl> + | CheckInit , ND , S ( Cell ) , B <nl> <nl> If S0 ' s type is Uninit , branch to block B . <nl> <nl> - | CheckInitMem , ND , S ( MemToGen ) , B <nl> + | CheckInitMem , ND , S ( MemToCell ) , B <nl> <nl> If the value pointed to by S0 has type Uninit , branch to block B . <nl> <nl> loaded , using a separate CheckRefInner instruction . <nl> A no - op at runtime , this instruction serves to mark the end of the initial <nl> sequence of guards in a trace . <nl> <nl> - | CheckNonNull , DSubtract ( 0 , Nullptr ) , S ( Nullptr , Func , PtrToGen , TCA , Cls , Obj , Str ) , B <nl> + | CheckNonNull , DSubtract ( 0 , Nullptr ) , S ( Nullptr , Func , PtrToCell , TCA , Cls , Obj , Str ) , B <nl> <nl> If the value in S0 is Nullptr , branch to block B . If S0 cannot be Nullptr , or <nl> always is Nullptr , this check may be optimized away . <nl> To string conversions : <nl> If S0 is a string representing an integer value ( same criteria as array key <nl> conversion ) , return that value as an integer . Otherwise return S0 . <nl> <nl> - | ConvPtrToLval , DLvalOfPtr , S ( PtrToGen ) , NF <nl> + | ConvPtrToLval , DLvalOfPtr , S ( PtrToCell ) , NF <nl> <nl> Convert S0 to an equivalent lval . <nl> <nl> To string conversions : <nl> <nl> Returns whether the object S0 has a toString method . <nl> <nl> - | IsType < T > , D ( Bool ) , S ( Gen ) , NF <nl> + | IsType < T > , D ( Bool ) , S ( Cell ) , NF <nl> <nl> Sets D to true iff S0 holds a value that is of type T . T must not be a <nl> specialized type . <nl> <nl> - | IsNType < T > , D ( Bool ) , S ( Gen ) , NF <nl> + | IsNType < T > , D ( Bool ) , S ( Cell ) , NF <nl> <nl> Sets D to true iff S0 holds a value that is not of type T . T must not be a <nl> specialized type . <nl> <nl> - | IsTypeMem < T > , D ( Bool ) , S ( PtrToGen ) , NF <nl> + | IsTypeMem < T > , D ( Bool ) , S ( PtrToCell ) , NF <nl> <nl> Sets D to true iff the value referenced by S0 is of type T . T must not be a <nl> specialized type . <nl> To string conversions : <nl> The value in S0 must not be a pointer into the evaluation stack or frame <nl> locals . <nl> <nl> - | IsNTypeMem < T > , D ( Bool ) , S ( PtrToGen ) , NF <nl> + | IsNTypeMem < T > , D ( Bool ) , S ( PtrToCell ) , NF <nl> <nl> Sets D to true iff the value referenced by S0 is not of type T . T must not be <nl> a specialized type . <nl> To string conversions : <nl> Generate exceptions based on surprise flags on a per request basis . <nl> Make sure CheckSurpriseFlags is true before calling HandleRequestSurprise . <nl> <nl> - | ReturnHook , ND , S ( FramePtr ) S ( Gen ) , NF <nl> + | ReturnHook , ND , S ( FramePtr ) S ( Cell ) , NF <nl> <nl> Surprise flag hook for function returns . <nl> <nl> To string conversions : <nl> If S0 is true / non - zero , return S1 , otherwise return S2 . <nl> <nl> <nl> - 6 . Reference manipulation <nl> - <nl> - | Box , D ( BoxedInitCell ) , S ( Cell ) , CRc | PRc <nl> - <nl> - Box S0 and put the resulting BoxedInitCell in D . If S0 is Uninit , then <nl> - InitNull will be boxed instead . <nl> + 6 . Loads <nl> <nl> - | UnboxPtr , DUnboxPtr , S ( MemToGen ) , NF <nl> - <nl> - If S0 points to a cell that is KindOfRef , dereference the pointer in the <nl> - TypedValue and return a pointer to the inner - cell in D . <nl> - <nl> - | BoxPtr , DBoxPtr , S ( MemToGen ) , NF <nl> - <nl> - Boxes the TypeValue that S0 points to if it is not boxed . The result D points <nl> - to the same TypedValue as S0 but has a more refined type . <nl> - <nl> - S0 may not already point into a RefData ( due to VM invariants ) , although the <nl> - IR type system does not enforce it . <nl> - <nl> - <nl> - 7 . Loads <nl> - <nl> - | LdStk < T , offset > , DParamMayRelax ( Gen ) , S ( StkPtr ) , NF <nl> + | LdStk < T , offset > , DParamMayRelax ( Cell ) , S ( StkPtr ) , NF <nl> <nl> Loads from S0 at offset ( in cells ) , and puts the value in D as type T . <nl> <nl> - | LdLoc < T , localId > , DParamMayRelax ( Gen ) , S ( FramePtr ) , NF <nl> + | LdLoc < T , localId > , DParamMayRelax ( Cell ) , S ( FramePtr ) , NF <nl> <nl> Loads local slot localId from the frame S0 and puts the value in D as type T . <nl> <nl> - | LdLocPseudoMain < T , localId > , DParam ( Gen ) , S ( FramePtr ) , B <nl> + | LdLocPseudoMain < T , localId > , DParam ( Cell ) , S ( FramePtr ) , B <nl> <nl> Loads local number localId from frame S0 and puts the value in D if the <nl> local ' s type is a subtype of T . If the local ' s type is not a subtype of T , <nl> To string conversions : <nl> instruction is used for loading locals in pseudo - mains , where they can alias <nl> globals . <nl> <nl> - | LdStkAddr < T , offset > , D ( PtrToStkGen ) , S ( StkPtr ) , NF <nl> + | LdStkAddr < T , offset > , D ( PtrToStkCell ) , S ( StkPtr ) , NF <nl> <nl> Loads the address of the stack slot given by the pointer in S0 at the offset <nl> - ( in cells ) . T must be a subtype of PtrToStkGen . <nl> + ( in cells ) . T must be a subtype of PtrToStkCell . <nl> <nl> - | LdLocAddr < localId > , D ( PtrToFrameGen ) , S ( FramePtr ) , NF <nl> + | LdLocAddr < localId > , D ( PtrToFrameCell ) , S ( FramePtr ) , NF <nl> <nl> Loads the address of the local slot localId from the frame S0 into D . <nl> <nl> - | LdRDSAddr < T , RDSHandle > , DParam ( PtrToGen ) , NA , NF <nl> + | LdRDSAddr < T , RDSHandle > , DParam ( PtrToCell ) , NA , NF <nl> <nl> - Load the address of a Gen that lives at the specified RDS handle . The type <nl> - param must be a subtype of PtrToGen . <nl> + Load the address of a Cell that lives at the specified RDS handle . The type <nl> + param must be a subtype of PtrToCell . <nl> <nl> - | LdInitRDSAddr < T , RDSHandle > , DParam ( PtrToInitGen ) , NA , B <nl> + | LdInitRDSAddr < T , RDSHandle > , DParam ( PtrToInitCell ) , NA , B <nl> <nl> - Load the address of a Gen that lives at the specified RDS handle . Branch if <nl> + Load the address of a Cell that lives at the specified RDS handle . Branch if <nl> the value at that address is Uninit . The type param must be a subtype of <nl> - PtrToInitGen . <nl> + PtrToInitCell . <nl> <nl> | LdVectorBase , D ( PtrToMembCell ) , S ( Obj ) , NF <nl> <nl> To string conversions : <nl> Loads the base pointer to an array of Cells from the given collection <nl> instance in S0 . <nl> <nl> - | LdMem < T > , DParam ( Gen ) , S ( MemToGen ) , NF <nl> + | LdMem < T > , DParam ( Cell ) , S ( MemToCell ) , NF <nl> <nl> Loads from S0 and puts the value in D . <nl> <nl> - | LdContField < T > , DParam ( Gen ) , S ( Obj ) C ( Int ) , NF <nl> + | LdContField < T > , DParam ( Cell ) , S ( Obj ) C ( Int ) , NF <nl> <nl> Loads a property from the object referenced by S0 at the offset given by S1 <nl> and puts the value in D . S0 must be a Generator . <nl> To string conversions : <nl> Map , Set , ImmMap , or ImmSet , and that specific object type must be known at <nl> compile time . <nl> <nl> - | CheckRefInner < T > , ND , S ( BoxedCell ) , B <nl> - <nl> - TODO ( # 2939547 ) : this should take BoxedInitCell <nl> - <nl> - Check that the inner type of the boxed cell in S0 is T , and if not take the <nl> - branch to B . <nl> - <nl> | LdIterBase < T , iterId > , DParam ( ArrLike | Obj ) , S ( FramePtr ) , NF <nl> <nl> Load the base of the iterator with type ` T ` at ` iterId ` . ` T ` must be a valid <nl> To string conversions : <nl> the iter is known to have some specialized type ( via CheckIter ) , then we can <nl> use that information to do a cheaper specialized load . <nl> <nl> - | LdIterPos < T , iterId > , DParam ( Int | PtrToElemGen ) , S ( FramePtr ) , NF <nl> + | LdIterPos < T , iterId > , DParam ( Int | PtrToElemCell ) , S ( FramePtr ) , NF <nl> <nl> - | LdIterEnd < T , iterId > , DParam ( Int | PtrToElemGen ) , S ( FramePtr ) , NF <nl> + | LdIterEnd < T , iterId > , DParam ( Int | PtrToElemCell ) , S ( FramePtr ) , NF <nl> <nl> Load the specified field of the iterator at ` iterId ` . These ops should only <nl> be generated for iterators known to have a specialized type ( via CheckIter ) . <nl> The type param ` T ` should be compatible with this type - i . e . ` T ` should be <nl> either an int or a pointer based on whether it ' s an index or pointer iter . <nl> <nl> - | LdRef < T > , DParam ( Cell ) , S ( BoxedCell ) , NF <nl> - <nl> - TODO ( # 2939547 ) : this should take BoxedInitCell <nl> - <nl> - Loads the value held in the box referenced by S0 and puts the value in D . The <nl> - inner type of S0 must be a subtype of T ( usually ensured with a previous <nl> - CheckRefInner ) . <nl> - <nl> | LdFrameThis , DParam ( Obj ) , S ( FramePtr ) , NF <nl> <nl> Loads into D the value of m_this from S0 . <nl> To string conversions : <nl> Define a value of type T . This instruction aborts at runtime ; it is meant to <nl> be used in tests or code that is known to be unreachable . <nl> <nl> - | ConjureUse , ND , S ( Gen ) , NF <nl> + | ConjureUse , ND , S ( Cell ) , NF <nl> <nl> Define a " use " of S0 effectively keeping the value alive . As with Conjure it <nl> should not appear in reachable code . <nl> To string conversions : <nl> constant . This should only be executed if LdCns on the same constant has <nl> failed . <nl> <nl> - | LdClsCns < className , constantName > , D ( PtrToGen ) , NA , B <nl> + | LdClsCns < className , constantName > , D ( PtrToCell ) , NA , B <nl> <nl> Load the address of the constant ' constantName ' for the class ' className ' in <nl> RDS . If not initialized , branch to B . <nl> <nl> - | LdSubClsCns < constantName , slot > , D ( PtrToGen ) , S ( Cls ) , NF <nl> + | LdSubClsCns < constantName , slot > , D ( PtrToCell ) , S ( Cls ) , NF <nl> <nl> Load the address of the constant ' constantName ' for the class S0 . The <nl> constant is known to be in the given slot . If the returned TypedValue is not <nl> To string conversions : <nl> raises an error if no such constant could be found , or if S0 : : S1 is not a <nl> type constant . <nl> <nl> - | ProfileSubClsCns < constantName , handle > , D ( PtrToGen ) , S ( Cls ) , NF <nl> + | ProfileSubClsCns < constantName , handle > , D ( PtrToCell ) , S ( Cls ) , NF <nl> <nl> Load the address of the constant ' constantName ' for the class S0 , profiling <nl> the observed slots . If the returned TypedValue is not UncountedInit , its <nl> To string conversions : <nl> method . Returns nullptr if it is an instance method defined in S2 ' s class <nl> hierarchy , indicating that this legacy call should be handled by interpreter . <nl> <nl> - | LdPropAddr < T , offset > , DParam ( LvalToPropGen ) , S ( Obj ) , NF <nl> + | LdPropAddr < T , offset > , DParam ( LvalToPropCell ) , S ( Obj ) , NF <nl> <nl> Load the address of the object property for S0 + ` offset ' ( in bytes ) . T must <nl> - be a subtype of PtrToPropGen . <nl> + be a subtype of PtrToPropCell . <nl> <nl> - | LdInitPropAddr < T , offset > , DParam ( LvalToPropGen ) , S ( Obj ) , B <nl> + | LdInitPropAddr < T , offset > , DParam ( LvalToPropCell ) , S ( Obj ) , B <nl> <nl> Load the address of the object property for S0 + ` offset ' ( in bytes ) . Branch <nl> if the value at that address is Uninit . T must be a subtype of <nl> - PtrToPropInitGen . <nl> + PtrToPropInitCell . <nl> <nl> - | LdGblAddr , D ( PtrToGblGen ) , S ( Str ) , B <nl> + | LdGblAddr , D ( PtrToGblCell ) , S ( Str ) , B <nl> <nl> Loads a pointer to a global . S0 is the global ' s name . Branches to B if the <nl> global is not defined . <nl> <nl> - | LdGblAddrDef , D ( PtrToGblGen ) , S ( Str ) , NF <nl> + | LdGblAddrDef , D ( PtrToGblCell ) , S ( Str ) , NF <nl> <nl> Loads a pointer to a global . S0 is the global ' s name . Defines the global if <nl> it is not already defined . <nl> <nl> - | LdClsPropAddrOrNull , D ( PtrToSPropGen | Nullptr ) , <nl> + | LdClsPropAddrOrNull , D ( PtrToSPropCell | Nullptr ) , <nl> | S ( Cls ) S ( Str ) C ( Cls ) C ( Bool ) C ( Bool ) , <nl> | NF <nl> <nl> To string conversions : <nl> Uninit , unless S3 is true . An exception is also thrown if S4 is true , <nl> and the property is constant . <nl> <nl> - | LdClsPropAddrOrRaise , D ( PtrToSPropGen ) , S ( Cls ) S ( Str ) C ( Cls ) C ( Bool ) C ( Bool ) , <nl> + | LdClsPropAddrOrRaise , D ( PtrToSPropCell ) , S ( Cls ) S ( Str ) C ( Cls ) C ( Bool ) C ( Bool ) , <nl> | NF <nl> <nl> Loads a pointer to a static class property . S0 points to the class , S1 is the <nl> To string conversions : <nl> <nl> Load the Func * of the ClsMethDataRef in S0 . <nl> <nl> - 8 . Allocation <nl> + 7 . Allocation <nl> <nl> | AllocObj , DAllocObj , S ( Cls ) , PRc <nl> <nl> To string conversions : <nl> <nl> Allocate a new FunctionCredential <nl> <nl> - 9 . Call & Return <nl> + 8 . Call & Return <nl> <nl> | BeginInlining < offset > , ND , S ( StkPtr ) , NF <nl> <nl> To string conversions : <nl> Execute a call to the native builtin specified by the current function . S0 <nl> and S1 should be the current vmfp and vmsp , respectively . <nl> <nl> - | CallBuiltin , DBuiltin , S ( FramePtr ) S ( StkPtr ) SVar ( PtrToGen , Gen , Cls , Nullptr ) , PRc <nl> + | CallBuiltin , DBuiltin , S ( FramePtr ) S ( StkPtr ) SVar ( PtrToCell , Cell , Nullptr ) , PRc <nl> <nl> Call builtin function with N arguments . S0 and S1 should be the current vmfp <nl> and vmsp , respectively . <nl> To string conversions : <nl> const String & PtrToStr source <nl> const Array & PtrToArr source <nl> const Object & PtrToObj source <nl> - const Variant & PtrToGen source <nl> - Variant & PtrToGen source ( ref param ) <nl> + const Variant & PtrToCell source <nl> + Variant & PtrToCell source ( ref param ) <nl> String { Str | InitNull } destination <nl> Array { Arr | InitNull } destination <nl> Object { Obj | InitNull } destination <nl> - Variant { Gen - UninitNull } destination <nl> + Variant { Cell - UninitNull } destination <nl> <nl> - | RetCtrl < spOff , suspendingResumed > , ND , S ( StkPtr ) S ( FramePtr ) S ( Gen ) , T <nl> + | RetCtrl < spOff , suspendingResumed > , ND , S ( StkPtr ) S ( FramePtr ) S ( Cell ) , T <nl> <nl> Ensure that S0 + ` spOff ' ( in cells ) is stored in rvmsp and that S1 ' s saved <nl> frame pointer is stored in rvmfp , then return to the saved return address in <nl> To string conversions : <nl> return control to the asio scheduler ( slow path ) . As with AsyncRetFast , the <nl> stack must contain exactly one cell containing uninitialied garbage . <nl> <nl> - | LdRetVal < T > , DParam ( Gen ) , S ( FramePtr ) , NF <nl> + | LdRetVal < T > , DParam ( Cell ) , S ( FramePtr ) , NF <nl> <nl> Load the return value from the already - returned - from ActRec pointed to by S0 <nl> into the dest . This is used by NativeImpl . TODO ( # 7150575 ) : We want to make <nl> To string conversions : <nl> call . <nl> <nl> <nl> - 10 . Stores <nl> + 9 . Stores <nl> <nl> - | StMem , ND , S ( MemToGen ) S ( Gen ) , NF <nl> + | StMem , ND , S ( MemToCell ) S ( Cell ) , NF <nl> <nl> Store S1 into the location pointed to by S0 . <nl> <nl> To string conversions : <nl> Store S2 into the location given by the index S1 from base pointer S0 . The <nl> index in S1 is the number of bytes from the base in S0 . <nl> <nl> - | StLoc < localId > , ND , S ( FramePtr ) S ( Gen ) , NF <nl> + | StLoc < localId > , ND , S ( FramePtr ) S ( Cell ) , NF <nl> <nl> Store S1 to local number localId on the frame pointed to by S0 . <nl> <nl> - | StLocPseudoMain < localId > , ND , S ( FramePtr ) S ( Gen ) , NF <nl> + | StLocPseudoMain < localId > , ND , S ( FramePtr ) S ( Cell ) , NF <nl> <nl> Behaves just like StLoc , except the hard requirement that it is only emitted <nl> for pseudo - mains . We don ' t optimize StGbl the same way as StLoc , as we need <nl> intraprocedural analysis to know whether the store is truly dead . <nl> <nl> - | StLocRange < localIds > , ND , S ( FramePtr ) S ( Gen ) , NF <nl> + | StLocRange < localIds > , ND , S ( FramePtr ) S ( Cell ) , NF <nl> <nl> Store S1 to the local variables corresponding to localIds , on the frame <nl> pointed to by S0 . <nl> To string conversions : <nl> Sets the type of the iterator at ` iterId ` to ` iterType ` . This type must be a <nl> specialized type . Also sets auxiliary fields ( like next helper index ) . <nl> <nl> - | StIterPos < iterId > , ND , S ( FramePtr ) S ( Int | PtrToElemGen ) , NF <nl> + | StIterPos < iterId > , ND , S ( FramePtr ) S ( Int | PtrToElemCell ) , NF <nl> <nl> - | StIterEnd < iterId > , ND , S ( FramePtr ) S ( Int | PtrToElemGen ) , NF <nl> + | StIterEnd < iterId > , ND , S ( FramePtr ) S ( Int | PtrToElemCell ) , NF <nl> <nl> Store S1 to the given field of the iterator at ` iterId ` . S1 must be an int if <nl> we ' re doing index iteration and a pointer if we ' re doing pointer iteration . <nl> <nl> - | StRef , ND , S ( BoxedCell ) S ( Cell ) , NF <nl> - <nl> - Store the value in S1 into the RefData pointed to by S0 . Stores the <nl> - RefData : : m_type also . <nl> - <nl> - | StStk < offset > , ND , S ( StkPtr ) S ( Gen ) , NF <nl> + | StStk < offset > , ND , S ( StkPtr ) S ( Cell ) , NF <nl> <nl> Store S1 to the stack pointed to by S0 , at a given offset ( in cells ) . <nl> <nl> - | StOutValue < index > , ND , S ( FramePtr ) S ( Gen ) , NF <nl> + | StOutValue < index > , ND , S ( FramePtr ) S ( Cell ) , NF <nl> <nl> Store S1 in a caller allocated out - value vm stack cell index cells above <nl> S0 on the stack . <nl> To string conversions : <nl> slots starting at the offset ( in cells ) , and going toward higher memory <nl> addresses . <nl> <nl> - | DbgTrashMem , ND , S ( MemToGen ) , NF <nl> + | DbgTrashMem , ND , S ( MemToCell ) , NF <nl> <nl> For debugging purposes . Store kTVTrashJITHeap to a heap slot pointed to by <nl> S0 . <nl> To string conversions : <nl> static <nl> <nl> <nl> - 11 . Trace exits <nl> + 10 . Trace exits <nl> <nl> | EagerSyncVMRegs , ND , S ( FramePtr ) S ( StkPtr ) , NF <nl> <nl> To string conversions : <nl> traces that trigger profile - guided optimizations . <nl> <nl> <nl> - 12 . Refcounting and copies <nl> + 11 . Refcounting and copies <nl> <nl> | Mov , DofS ( 0 ) , S ( Top ) , P <nl> <nl> Defines D as S0 . May imply register - to - register moves at code generation <nl> time . Does not imply an incref or any other manipulation of S0 . <nl> <nl> - | IncRef , ND , S ( Gen ) , NF <nl> + | IncRef , ND , S ( Cell ) , NF <nl> <nl> If S0 is a refcounted type , increment its refcount . <nl> <nl> - | DecRef < locId > , ND , S ( Gen ) , CRc <nl> + | DecRef < locId > , ND , S ( Cell ) , CRc <nl> <nl> Decrease the reference count of S0 by one , and call a destructor for types <nl> that require it if it goes to zero . <nl> <nl> - Note that although DecRef takes a Gen , we don ' t allow it to use information <nl> - about the inner types of a BoxedCell . This is because we don ' t guard on the <nl> - inner types of a BoxedCell except when doing LdRef . For any S0 that is a <nl> - strict subtype of BoxedCell , the DecRef must just decref it as if it were a <nl> - BoxedCell . <nl> - <nl> The locId is just a hint to the runtime indicating which local variable is <nl> being DecRef ' d , if any . <nl> <nl> - | DecRefNZ < locId > , ND , S ( Gen ) , CRc <nl> + | DecRefNZ < locId > , ND , S ( Cell ) , CRc <nl> <nl> Decrease the reference count of S0 by one , do not check if it goes to zero . <nl> This instruction can be used for more efficient code when it is provable that <nl> the reference count cannot go to zero . <nl> <nl> - | ProfileDecRef < locId > , ND , S ( Gen ) , NF <nl> + | ProfileDecRef < locId > , ND , S ( Cell ) , NF <nl> <nl> Update the DecRefProfile for the given input as if it were dec - ref - ed , but do <nl> not actually dec - ref it . We can use this op for e . g . iterator output locals , <nl> because we don ' t specialize iterators in profiling translations . <nl> <nl> <nl> - 13 . Misc <nl> + 12 . Misc <nl> <nl> | DefFP , D ( FramePtr ) , NA , NF <nl> <nl> To string conversions : <nl> as specialized code for an iterator init or next which we may or may not use . <nl> If it survives irgen , it should be eliminated in the first DCE pass . <nl> <nl> - 14 . Runtime helpers <nl> + 13 . Runtime helpers <nl> <nl> | VerifyParamCls , ND , S ( Cls ) S ( Cls | Nullptr ) C ( Int ) C ( Int ) , NF <nl> <nl> To string conversions : <nl> this instruction will raise a recoverable fatal error describing the type <nl> mismatch for parameter S3 . <nl> <nl> - | VerifyParamCallable , ND , S ( Gen ) C ( Int ) , NF <nl> + | VerifyParamCallable , ND , S ( Cell ) C ( Int ) , NF <nl> <nl> If S0 is not callable , as defined by the php function is_callable , this <nl> instruction will raise a recoverable fatal error describing the type <nl> To string conversions : <nl> statically prove that this failure will result in a fatal error rather than a <nl> type coercion . <nl> <nl> - | VerifyRetCallable , ND , S ( Gen ) , NF <nl> + | VerifyRetCallable , ND , S ( Cell ) , NF <nl> <nl> Verify a return type hint . <nl> <nl> To string conversions : <nl> <nl> Verify a return type hint for a record . <nl> <nl> - | VerifyRetFail , ND , S ( PtrToGen ) , NF <nl> + | VerifyRetFail , ND , S ( PtrToCell ) , NF <nl> <nl> Failure to verify a return type hint . <nl> <nl> - | VerifyRetFailHard , ND , S ( PtrToGen ) , T <nl> + | VerifyRetFailHard , ND , S ( PtrToCell ) , T <nl> <nl> Terminal version of VerifyRetFail , to be used when the compiler can prove <nl> that this failure will result in a fatal error . <nl> To string conversions : <nl> [ S1 : S1 + S2 ) , and if so return the value S1 - ( Int ) S0 . Else , they return the <nl> target of the default target , S2 + 1 . <nl> <nl> - | LdSSwitchDestFast , D ( TCA ) , S ( Gen ) , NF <nl> + | LdSSwitchDestFast , D ( TCA ) , S ( Cell ) , NF <nl> <nl> - | LdSSwitchDestSlow , D ( TCA ) , S ( Gen ) , NF <nl> + | LdSSwitchDestSlow , D ( TCA ) , S ( Cell ) , NF <nl> <nl> Load string switch destinations ( two different compilation strategies ) . <nl> <nl> To string conversions : <nl> that this cannot be used for CLOCK_THREAD_CPUTIME_ID , as HHVM provides <nl> different semantics for that counter . <nl> <nl> - 15 . Generators & Closures <nl> + 14 . Generators & Closures <nl> <nl> | LdClosureCls , DParam ( Cls ) , S ( Obj ) , NF <nl> <nl> To string conversions : <nl> <nl> Loads ' key ' from the Generator object ActRec of which is S0 . <nl> <nl> - | StContArKey , ND , S ( FramePtr ) S ( Gen ) , CRc <nl> + | StContArKey , ND , S ( FramePtr ) S ( Cell ) , CRc <nl> <nl> Stores ' key ' into the Generator object ActRec of which is S0 . S1 is the <nl> new value . <nl> To string conversions : <nl> handle states in ext_asio . h . This instruction has undefined behavior if S0 is <nl> not a WaitHandle . <nl> <nl> - | LdWHResult , DParam ( Gen ) , S ( Obj ) , NF <nl> + | LdWHResult , DParam ( Cell ) , S ( Obj ) , NF <nl> <nl> Loads the result of the WaitHandle in S0 . This instruction has undefined <nl> behavior if S0 is not a WaitHandle , or if S0 is not finished . <nl> To string conversions : <nl> AsyncFunctionWaitHandle object in S0 . <nl> <nl> <nl> - 16 . Debugging , instrumentation , and profiling <nl> + 15 . Debugging , instrumentation , and profiling <nl> <nl> | IncStat , ND , C ( Int ) , NF <nl> <nl> To string conversions : <nl> <nl> Increment the profiling counter associated with translation TransID . <nl> <nl> - | DbgAssertRefCount < AssertReason > , ND , S ( Gen ) , NF <nl> + | DbgAssertRefCount < AssertReason > , ND , S ( Cell ) , NF <nl> <nl> Assert that S0 has a valid refcount . If S0 has a reference counted type and <nl> its count is implausible then execute a hardware trap instruction . <nl> To string conversions : <nl> operator ( @ foo ( ) ) . <nl> <nl> <nl> - 17 . Iterators <nl> + 16 . Iterators <nl> <nl> | IterInit < IterData > , D ( Bool ) , S ( ArrLike , Obj ) S ( FramePtr ) , CRc <nl> <nl> To string conversions : <nl> not need to be a valid array position ; for example , it may equal the size of <nl> the array ( so that the " elm " returned is the pointer - iteration end for S0 ) . <nl> <nl> - | AdvanceMixedPtrIter < offset > , DPtrIter , S ( PtrToElemGen ) , NF <nl> + | AdvanceMixedPtrIter < offset > , DPtrIter , S ( PtrToElemCell ) , NF <nl> <nl> - | AdvancePackedPtrIter < offset > , DPtrIter , S ( PtrToElemGen ) , NF <nl> + | AdvancePackedPtrIter < offset > , DPtrIter , S ( PtrToElemCell ) , NF <nl> <nl> Increments the pointer S0 to the array element with the given layout ` offset ` <nl> positions forward . ` offset ` is allowed to be negative . <nl> <nl> - | LdPtrIterKey < T > , DParam ( Int | Str ) , S ( PtrToElemGen ) , NF <nl> + | LdPtrIterKey < T > , DParam ( Int | Str ) , S ( PtrToElemCell ) , NF <nl> <nl> - | LdPtrIterVal < T > , DPtrIterVal , S ( PtrToElemGen ) , NF <nl> + | LdPtrIterVal < T > , DPtrIterVal , S ( PtrToElemCell ) , NF <nl> <nl> Loads the key or val from the array element pointed to by S0 . S0 must be a <nl> valid elm ; that is , it can ' t point to the end of the array data . LdPtrIterKey <nl> To string conversions : <nl> skip doing a check on the type of the elm . For LdPtrIterVal , it ' s only used <nl> to constrain the memory effects of the op . <nl> <nl> - | EqPtrIter , D ( Bool ) , S ( PtrToElemGen ) S ( PtrToElemGen ) , NF <nl> + | EqPtrIter , D ( Bool ) , S ( PtrToElemCell ) S ( PtrToElemCell ) , NF <nl> <nl> Compares two pointer iterators for equality . <nl> <nl> <nl> - 18 . Member instruction support <nl> + 17 . Member instruction support <nl> <nl> - | LdMIStateAddr , D ( PtrToMISGen ) , C ( Int ) , NF <nl> + | LdMIStateAddr , D ( PtrToMISCell ) , C ( Int ) , NF <nl> <nl> Load an MInstrState address . Returns a pointer to offset S0 within the <nl> current MInstrState . <nl> <nl> - | LdMBase , DParam ( LvalToGen ) , NA , NF <nl> + | LdMBase , DParam ( LvalToCell ) , NA , NF <nl> <nl> Load the current value of the member base register . <nl> <nl> - | StMBase , ND , S ( LvalToGen ) , NF <nl> + | StMBase , ND , S ( LvalToCell ) , NF <nl> <nl> Store a new value to the member base register . It is illegal for any <nl> instruction other than StMBase or InterpOne ( when interpreting a member <nl> SetProp will fail if the base is not a subtype of { Obj | Null } . <nl> Any instructions that take a pointer to an MInstrState struct use the various <nl> fields of that struct for holding intermediate values . <nl> <nl> - | BaseG , D ( LvalToRMembCell ) , S ( Str ) , NF <nl> - <nl> - Get a base from global named S0 . <nl> + | BaseG , D ( LvalToMembCell ) , S ( Str ) , NF <nl> <nl> - NB : BaseG returns either a PtrToGblGen , OR a pointer to a ref that is rooted <nl> - in a GblGen . ( I . e . the unbox happens in the C + + helper that this instruction <nl> - calls . ) If it is not a defining BaseG it can also return the <nl> - init_null_variant , so for now it returns a PtrToRMembCell . <nl> + Get a base from global named S0 . If it is not a defining BaseG it can also <nl> + return the init_null_variant , so for now it returns a PtrToMembCell . <nl> <nl> - | PropX , D ( LvalToMembGen ) , S ( Obj , LvalToGen ) S ( Cell ) S ( PtrToMISGen ) , NF <nl> + | PropX , D ( LvalToMembCell ) , S ( Obj , LvalToCell ) S ( Cell ) S ( PtrToMISCell ) , NF <nl> <nl> Lookup intermediate property in S0 , with key S1 . <nl> <nl> - | PropQ , D ( LvalToMembGen ) , S ( Obj , LvalToGen ) S ( StaticStr ) S ( PtrToMISGen ) , NF <nl> + | PropQ , D ( LvalToMembCell ) , S ( Obj , LvalToCell ) S ( StaticStr ) S ( PtrToMISCell ) , NF <nl> <nl> A nullsafe version of PropX , returns null if the base S0 is null . <nl> <nl> - | PropDX , D ( LvalToMembGen ) , S ( Obj , LvalToGen ) S ( Cell ) S ( PtrToMISGen ) S ( MIPropSPtr , Nullptr ) , MProp <nl> + | PropDX , D ( LvalToMembCell ) , S ( Obj , LvalToCell ) S ( Cell ) S ( PtrToMISCell ) S ( MIPropSPtr , Nullptr ) , MProp <nl> <nl> Like PropX , but used for intermediate element lookups that may modify the <nl> base . <nl> <nl> - | CGetProp , D ( Cell ) , S ( Obj , LvalToGen ) S ( Cell ) , PRc <nl> + | CGetProp , D ( Cell ) , S ( Obj , LvalToCell ) S ( Cell ) , PRc <nl> <nl> Get property with key S1 from S0 . <nl> <nl> - | CGetPropQ , D ( Cell ) , S ( Obj , LvalToGen ) S ( StaticStr ) , PRc <nl> + | CGetPropQ , D ( Cell ) , S ( Obj , LvalToCell ) S ( StaticStr ) , PRc <nl> <nl> A nullsafe version of CGetProp , returns null if the base S0 is null . <nl> <nl> - | SetProp , ND , S ( Obj , LvalToGen ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MProp <nl> + | SetProp , ND , S ( Obj , LvalToCell ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MProp <nl> <nl> Set property with key S1 in S0 to S2 . <nl> <nl> - | UnsetProp , ND , S ( Obj , LvalToGen ) S ( Cell ) , NF <nl> + | UnsetProp , ND , S ( Obj , LvalToCell ) S ( Cell ) , NF <nl> <nl> Unset an object property . <nl> <nl> | SetOpProp < op > , D ( Cell ) , <nl> - | S ( Obj , LvalToGen ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> + | S ( Obj , LvalToCell ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> | MProp | PRc <nl> <nl> Set op propery with key S1 in base S0 , using S2 as the right hand side . <nl> <nl> | IncDecProp < op > , D ( Cell ) , <nl> - | S ( Obj , LvalToGen ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> + | S ( Obj , LvalToCell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> | MProp | PRc <nl> <nl> Increment / decrement property with key S1 in base S0 . <nl> <nl> - | EmptyProp , D ( Bool ) , S ( Obj , LvalToGen ) S ( Cell ) , NF <nl> + | EmptyProp , D ( Bool ) , S ( Obj , LvalToCell ) S ( Cell ) , NF <nl> <nl> Returns true iff the property with key S1 in base S0 is empty . <nl> <nl> - | IssetProp , D ( Bool ) , S ( Obj , LvalToGen ) S ( Cell ) , NF <nl> + | IssetProp , D ( Bool ) , S ( Obj , LvalToCell ) S ( Cell ) , NF <nl> <nl> Returns true iff the property with key S1 in base S0 is set . <nl> <nl> - | ElemX , D ( LvalToMembGen ) , S ( LvalToGen ) S ( Cell ) S ( PtrToMISGen ) , NF <nl> + | ElemX , D ( LvalToMembCell ) , S ( LvalToCell ) S ( Cell ) S ( PtrToMISCell ) , NF <nl> <nl> Get intermediate element with key S1 from base S0 . The base will not be <nl> modified . <nl> fields of that struct for holding intermediate values . <nl> that S1 exactly matches the element key of S0 at ` pos ' . If any of the checks <nl> fail , branch to B . This check is allowed to have false negatives . <nl> <nl> - | ElemArrayX < M > , D ( LvalToMembInitGen ) , S ( Arr ) S ( Int , Str ) , NF <nl> + | ElemArrayX < M > , D ( LvalToMembInitCell ) , S ( Arr ) S ( Int , Str ) , NF <nl> <nl> - | ElemArrayD < T > , D ( LvalToElemInitGen ) , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | ElemArrayD < T > , D ( LvalToElemInitCell ) , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> - | ElemArrayU < T > , D ( LvalToMembInitGen ) , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | ElemArrayU < T > , D ( LvalToMembInitCell ) , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> Similar to ElemX , but the base S0 is an array and the key S1 is an int / str . <nl> ElemArrayD is for Define member instrs , ElemArrayU is for Unset , and <nl> ElemArrayX is for InOut , Warn , and None instrs ( where the mode is M ) . <nl> <nl> - ElemArray { D , U } both take a LvalToGen for the base operand , but expect it to be <nl> - a LvalToArr or LvalToBoxedArr . T is the type of the base array . <nl> + ElemArray { D , U } both take a LvalToCell for the base operand , but expect it to be <nl> + a LvalToArr . T is the type of the base array . <nl> <nl> - | ElemMixedArrayK < pos > , D ( LvalToElemInitGen ) , S ( AK ( Mixed ) ) S ( Int , Str ) , NF <nl> + | ElemMixedArrayK < pos > , D ( LvalToElemInitCell ) , S ( AK ( Mixed ) ) S ( Int , Str ) , NF <nl> <nl> Like ElemArray , but the element for S1 is at a known position ` pos ' in S0 . <nl> <nl> - | ElemVecD < T > , D ( LvalToElemInitCell ) , S ( LvalToGen ) S ( Int ) , MElem <nl> + | ElemVecD < T > , D ( LvalToElemInitCell ) , S ( LvalToCell ) S ( Int ) , MElem <nl> <nl> - | ElemVecU < T > , D ( LvalToMembInitCell ) , S ( LvalToGen ) S ( Int ) , MElem <nl> + | ElemVecU < T > , D ( LvalToMembInitCell ) , S ( LvalToCell ) S ( Int ) , MElem <nl> <nl> Similar to ElemX , but the base S0 is a vec and the key S1 is an int . ElemVecD <nl> is for Define member instrs and ElemVecU is for Unset . ( Other variations can <nl> be implemented without special IR instructions ) . <nl> <nl> - ElemVec { D , U } both take a LvalToGen for the base operand , but expect it to be a <nl> - LvalToVec or LvalToBoxedVec . T is the type of the base vec . <nl> + ElemVec { D , U } both take a LvalToCell for the base operand , but expect it to be a <nl> + LvalToVec . T is the type of the base vec . <nl> <nl> | ElemDictX < M > , D ( LvalToMembInitCell ) , S ( Dict ) S ( Int , Str ) , NF <nl> <nl> - | ElemDictD < T > , D ( LvalToElemInitCell ) , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | ElemDictD < T > , D ( LvalToElemInitCell ) , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> - | ElemDictU < T > , D ( LvalToMembInitCell ) , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | ElemDictU < T > , D ( LvalToMembInitCell ) , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> Similar to ElemX , but the base S0 is a dict and the key S1 is an int / str . <nl> ElemDictD is for Define member instrs , ElemDictU is for Unset , and <nl> ElemDictX is for InOut , Warn , and None instrs ( where the mode is M ) . <nl> <nl> - ElemDict { D , U } both take a LvalToGen for the base operand , but expect it to be <nl> - a LvalToDict or LvalToBoxedDict . T is the type of the base array . <nl> + ElemDict { D , U } both take a LvalToCell for the base operand , but expect it to be <nl> + a LvalToDict . T is the type of the base array . <nl> <nl> | ElemDictK < pos > , D ( LvalToElemInitCell ) , S ( Dict ) S ( Int , Str ) , NF <nl> <nl> fields of that struct for holding intermediate values . <nl> <nl> | ElemKeysetX < M > , D ( LvalToMembInitCell ) , S ( Keyset ) S ( Int , Str ) , NF <nl> <nl> - | ElemKeysetU < T > , D ( LvalToMembInitCell ) , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | ElemKeysetU < T > , D ( LvalToMembInitCell ) , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> Similar to ElemX , but the base S0 is a keyset and the key S1 is an int / str . <nl> ElemKeysetU is for Unset instrs and ElemKeysetX is for InOut , Warn and None <nl> instrs ( where the mode is M ) . <nl> <nl> - ElemKeysetU both take a LvalToGen for the base operand , but expect it to be <nl> - a LvalToKeyset or LvalToBoxedKeyset . T is the type of the base array . <nl> + ElemKeysetU both take a LvalToCell for the base operand , but expect it to be <nl> + a LvalToKeyset . T is the type of the base array . <nl> <nl> | ElemKeysetK < pos > , D ( LvalToElemInitCell ) , S ( Keyset ) S ( Int , Str ) , NF <nl> <nl> Like ElemKeyset , but the element for S1 is at a known position ` pos ' in S0 . <nl> <nl> - | ElemDX , D ( LvalToMembGen ) , S ( LvalToGen ) S ( Cell ) S ( PtrToMISGen ) S ( MIPropSPtr , Nullptr ) , MElem <nl> + | ElemDX , D ( LvalToMembCell ) , S ( LvalToCell ) S ( Cell ) S ( PtrToMISCell ) S ( MIPropSPtr , Nullptr ) , MElem <nl> <nl> Like ElemX , but used for intermediate element lookups that may modify the <nl> base . <nl> <nl> - | ElemUX , D ( LvalToMembGen ) , S ( LvalToGen ) S ( Cell ) S ( PtrToMISGen ) , MElem <nl> + | ElemUX , D ( LvalToMembCell ) , S ( LvalToCell ) S ( Cell ) S ( PtrToMISCell ) , MElem <nl> <nl> Like ElemX , but used for intermediate element lookups that may modify the <nl> base as part of an unset operation . <nl> fields of that struct for holding intermediate values . <nl> <nl> Get element with key S1 from base S0 . <nl> <nl> - | CGetElem , D ( Cell ) , S ( LvalToGen ) S ( Cell ) , PRc <nl> + | CGetElem , D ( Cell ) , S ( LvalToCell ) S ( Cell ) , PRc <nl> <nl> Get element with key S1 from S0 . <nl> <nl> fields of that struct for holding intermediate values . <nl> Set element with key S1 in S0 to S2 . The dest will be a new Array that should <nl> replace S0 . <nl> <nl> - | ArraySetRef , ND , S ( Arr ) S ( Int , Str ) S ( Cell ) S ( BoxedCell ) , CRc <nl> - <nl> - Like ArraySet , but for binding operations on the array . S3 must point to a <nl> - RefData with an array type when this instruction is executed , and it must be <nl> - the same array that is in S0 . <nl> - <nl> | VecSet , D ( Vec ) , S ( Vec ) S ( Int ) S ( Cell ) , PRc | CRc <nl> <nl> Set element with key S1 in S0 to S2 . The dest will be a new Vec that should <nl> replace S0 . <nl> <nl> - | VecSetRef , ND , S ( Vec ) S ( Int ) S ( Cell ) S ( BoxedCell ) , CRc <nl> - <nl> - Like VecSet , but for binding operations on the vec . S3 must point to a <nl> - RefData with a vec type when this instruction is executed , and it must be the <nl> - same vec that is in S0 . <nl> - <nl> | DictSet , D ( Dict ) , S ( Dict ) S ( Int , Str ) S ( Cell ) , PRc | CRc <nl> <nl> Set element with key S1 in S0 to S2 . The dest will be a new Dict that should <nl> replace S0 . <nl> <nl> - | DictSetRef , ND , S ( Dict ) S ( Int , Str ) S ( Cell ) S ( BoxedCell ) , CRc <nl> - <nl> - Like DictSet , but for binding operations on the dict . S3 must point to a <nl> - RefData with a vec type when this instruction is executed , and it must be the <nl> - same dict that is in S0 . <nl> - <nl> | MapSet , ND , S ( Obj ) S ( Int , Str ) S ( Cell ) , NF <nl> <nl> Set element with key S1 in S0 to S2 . <nl> fields of that struct for holding intermediate values . <nl> <nl> Set element with key S1 in S0 to S2 . <nl> <nl> - | SetElem , DSetElem , S ( LvalToGen ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem <nl> + | SetElem , DSetElem , S ( LvalToCell ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem <nl> <nl> Set element with key S1 in S0 to S2 . SetElem returns a Nullptr in the common <nl> case , where the logical result of the hhbc SetM is its right hand side . In <nl> fields of that struct for holding intermediate values . <nl> Furthermore , in the case of " invalid offsets " , SetElem may throw an <nl> InvalidSetMException ( see discussion above ) . <nl> <nl> - | SetRange , ND , S ( LvalToGen ) S ( Int ) S ( Cell ) S ( Int ) S ( Int ) , MElem <nl> + | SetRange , ND , S ( LvalToCell ) S ( Int ) S ( Cell ) S ( Int ) S ( Int ) , MElem <nl> <nl> - | SetRangeRev , ND , S ( LvalToGen ) S ( Int ) S ( Cell ) S ( Int ) S ( Int ) , MElem <nl> + | SetRangeRev , ND , S ( LvalToCell ) S ( Int ) S ( Cell ) S ( Int ) S ( Int ) , MElem <nl> <nl> Perform a range set or reverse range set operation , with the same arguments <nl> and semantics as the RangeSet bytecode instruction . <nl> <nl> - | UnsetElem , ND , S ( LvalToGen ) S ( Cell ) , MElem <nl> + | UnsetElem , ND , S ( LvalToCell ) S ( Cell ) , MElem <nl> <nl> Unsets the element at key S1 in the base S0 . <nl> <nl> | SetOpElem < op > , D ( Cell ) , <nl> - | S ( LvalToGen ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> + | S ( LvalToCell ) S ( Cell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , <nl> | MElem | PRc <nl> <nl> Set op elem with key S1 in base S0 , using S2 as the right hand side . <nl> <nl> - | IncDecElem , D ( Cell ) , S ( LvalToGen ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem | PRc <nl> + | IncDecElem , D ( Cell ) , S ( LvalToCell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem | PRc <nl> <nl> Increment / decrement element with key S1 in base S0 . <nl> <nl> - | SetNewElem , ND , S ( LvalToGen ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem <nl> + | SetNewElem , ND , S ( LvalToCell ) S ( Cell ) S ( MIPropSPtr , Nullptr ) , MElem <nl> <nl> Append the value in S1 to S0 . <nl> <nl> - | SetNewElemArray , ND , S ( LvalToGen ) S ( Cell ) , MElem <nl> + | SetNewElemArray , ND , S ( LvalToCell ) S ( Cell ) , MElem <nl> <nl> Append the value in S1 to S0 , where S0 must be a pointer to a array . <nl> <nl> - | SetNewElemVec , ND , S ( LvalToGen ) S ( Cell ) , MElem <nl> + | SetNewElemVec , ND , S ( LvalToCell ) S ( Cell ) , MElem <nl> <nl> Append the value in S1 to S0 , where S0 must be a pointer to a vec . <nl> <nl> - | SetNewElemKeyset , ND , S ( LvalToGen ) S ( Int , Str ) , MElem <nl> + | SetNewElemKeyset , ND , S ( LvalToCell ) S ( Int , Str ) , MElem <nl> <nl> Append the value in S1 to S0 , where S0 must be a pointer to a keyset . <nl> <nl> fields of that struct for holding intermediate values . <nl> <nl> Returns true iff the element at key S1 in the base S0 is set . <nl> <nl> - | IssetElem , D ( Bool ) , S ( LvalToGen ) S ( Cell ) , NF <nl> + | IssetElem , D ( Bool ) , S ( LvalToCell ) S ( Cell ) , NF <nl> <nl> Returns true iff the element at key S1 in S0 is set . <nl> <nl> - | EmptyElem , D ( Bool ) , S ( LvalToGen ) S ( Cell ) , NF <nl> + | EmptyElem , D ( Bool ) , S ( LvalToCell ) S ( Cell ) , NF <nl> <nl> Returns true iff the element at key S1 in S0 is set and not equal ( as defined <nl> by the hhbc Eq instruction ) to false . <nl> fields of that struct for holding intermediate values . <nl> Throws an out of bounds exception if S0 is an undefined key for an array . <nl> The isInOut option indicates that the access was for an inout parameter . <nl> <nl> - | ThrowOutOfBounds , ND , S ( ArrLike | Obj ) S ( Gen ) , T <nl> + | ThrowOutOfBounds , ND , S ( ArrLike | Obj ) S ( Cell ) , T <nl> <nl> Throws an OutOfBoundsException corresponding to an access of S0 with the key <nl> S1 . <nl> <nl> - | ThrowInvalidArrayKey , ND , S ( ArrLike ) S ( Gen ) , T <nl> + | ThrowInvalidArrayKey , ND , S ( ArrLike ) S ( Cell ) , T <nl> <nl> Throws an InvalidArgumentException corresponding to an access of S0 with the <nl> key S1 , which has a type invalid for that array . <nl> fields of that struct for holding intermediate values . <nl> property . S0 is the class the property was declared on . S1 is the property <nl> name . S2 is true if its a static property , false otherwise . <nl> <nl> - | ThrowParameterWrongType < expectedType , func , argNum > , ND , S ( Gen ) , T <nl> + | ThrowParameterWrongType < expectedType , func , argNum > , ND , S ( Cell ) , T <nl> <nl> Throws a RuntimeException if calling a function with an argument that has the <nl> wrong type <nl> fields of that struct for holding intermediate values . <nl> <nl> Profile the array kind of S0 . <nl> <nl> - | ProfileType , ND , S ( Gen ) , NF <nl> + | ProfileType , ND , S ( Cell ) , NF <nl> <nl> Profile the type of S0 . <nl> <nl> fields of that struct for holding intermediate values . <nl> Checks that the index in S1 is within the bounds of the packed array or <nl> vector array in S0 . Branches to B if the index is out of bounds . <nl> <nl> - | LdPackedArrayDataElemAddr < T > , DParam ( LvalToElemGen ) , S ( AK ( Packed ) , Vec ) S ( Int ) , NF <nl> + | LdPackedArrayDataElemAddr < T > , DParam ( LvalToElemCell ) , S ( AK ( Packed ) , Vec ) S ( Int ) , NF <nl> <nl> Loads the address of the element at index S1 of the packed array or vec array <nl> in S0 . This instruction assumes the array actually contains an element at <nl> fields of that struct for holding intermediate values . <nl> returning the tagged array . <nl> <nl> <nl> - 19 . Exception / unwinding support <nl> + 18 . Exception / unwinding support <nl> <nl> | BeginCatch , ND , NA , NF <nl> <nl> fields of that struct for holding intermediate values . <nl> Loads the value contained by the current unwinder exception . <nl> <nl> <nl> - 20 . Function prologues <nl> + 19 . Function prologues <nl> <nl> | CheckStackOverflow , ND , S ( FramePtr ) , NF <nl> <nl> fields of that struct for holding intermediate values . <nl> Load the flags stored on the ActRec pointed to by the frame <nl> pointer S0 . Bits not defined as flags may contain arbitrary garbage . <nl> <nl> - | LdTVAux < ValidBits > , D ( Int ) , S ( Gen ) , NF <nl> + | LdTVAux < ValidBits > , D ( Int ) , S ( Cell ) , NF <nl> <nl> Load the value of m_aux from the TypedValue S0 . ValidBits is a mask <nl> specifying which bits are allowed to be set . The runtime may ignore it . <nl> mmm a / hphp / runtime / test / alias - class . cpp <nl> ppp b / hphp / runtime / test / alias - class . cpp <nl> TEST ( AliasClass , IterUnion ) { <nl> TEST ( AliasClass , Pointees ) { <nl> IRUnit unit { test_context } ; <nl> auto const bcctx = BCContext { BCMarker : : Dummy ( ) , 0 } ; <nl> - auto ptr = unit . gen ( LdMBase , bcctx , TLvalToGen ) - > dst ( ) ; <nl> + auto ptr = unit . gen ( LdMBase , bcctx , TLvalToCell ) - > dst ( ) ; <nl> auto const acls = pointee ( ptr ) ; <nl> EXPECT_EQ ( AHeapAny | AFrameAny | AStackAny | AMIStateTV | ARdsAny , acls ) ; <nl> } <nl> mmm a / hphp / runtime / test / prediction - opts . cpp <nl> ppp b / hphp / runtime / test / prediction - opts . cpp <nl> TEST ( PredictionOpts , basic ) { <nl> Block * taken = unit . defBlock ( ) ; <nl> Block * end = unit . defBlock ( ) ; <nl> <nl> - auto ptr = unit . gen ( Conjure , bcctx , TPtrToGen ) ; <nl> - auto ldm = unit . gen ( LdMem , bcctx , TGen , ptr - > dst ( ) ) ; <nl> + auto ptr = unit . gen ( Conjure , bcctx , TPtrToCell ) ; <nl> + auto ldm = unit . gen ( LdMem , bcctx , TCell , ptr - > dst ( ) ) ; <nl> auto inc = unit . gen ( IncRef , bcctx , ldm - > dst ( ) ) ; <nl> auto ckt = unit . gen ( CheckType , bcctx , TInt , taken , ldm - > dst ( ) ) ; <nl> ckt - > setNext ( end ) ; <nl> TEST ( PredictionOpts , basic ) { <nl> auto takenIt = taken - > begin ( ) ; <nl> auto & ldmem = * takenIt ; <nl> auto & incref = * ( + + takenIt ) ; <nl> - EXPECT_MATCH ( ldmem , LdMem , TGen , ptr - > dst ( ) ) ; <nl> + EXPECT_MATCH ( ldmem , LdMem , TCell , ptr - > dst ( ) ) ; <nl> EXPECT_MATCH ( incref , IncRef , ldmem . dst ( ) ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / test / type . cpp <nl> ppp b / hphp / runtime / test / type . cpp <nl> std : : unordered_set < Type > allTypes ( ) { <nl> } <nl> <nl> TEST ( Type , Equality ) { <nl> - EXPECT_NE ( TCls , TPtrToBoxedObj ) ; <nl> - EXPECT_NE ( TCls , TLvalToBoxedObj ) ; <nl> - EXPECT_NE ( TCls , TMemToBoxedObj ) ; <nl> + EXPECT_NE ( TCls , TPtrToObj ) ; <nl> + EXPECT_NE ( TCls , TLvalToObj ) ; <nl> + EXPECT_NE ( TCls , TMemToObj ) ; <nl> } <nl> <nl> TEST ( Type , Null ) { <nl> TEST ( Type , Null ) { <nl> TEST ( Type , KnownDataType ) { <nl> auto trueTypes = { <nl> TInt , <nl> - TBoxedCell , <nl> TStaticStr , <nl> TCountedStr , <nl> TStr , <nl> TEST ( Type , KnownDataType ) { <nl> auto falseTypes = { <nl> TNull , <nl> TCell , <nl> - TGen , <nl> TInt | TDbl , <nl> TArrLike , <nl> TPersistentArrLike <nl> TEST ( Type , KnownDataType ) { <nl> TEST ( Type , ToString ) { <nl> EXPECT_EQ ( " Int " , TInt . toString ( ) ) ; <nl> EXPECT_EQ ( " Cell " , TCell . toString ( ) ) ; <nl> - EXPECT_EQ ( " BoxedDbl " , TBoxedDbl . toString ( ) ) ; <nl> - EXPECT_EQ ( " Boxed { Dbl | Int } " , ( TBoxedInt | TBoxedDbl ) . toString ( ) ) ; <nl> <nl> EXPECT_EQ ( " Vec " , TVec . toString ( ) ) ; <nl> EXPECT_EQ ( " Dict " , TDict . toString ( ) ) ; <nl> TEST ( Type , ToString ) { <nl> EXPECT_EQ ( " PtrToStr " , TPtrToStr . toString ( ) ) ; <nl> EXPECT_EQ ( " LvalToStr " , TLvalToStr . toString ( ) ) ; <nl> <nl> - EXPECT_EQ ( " PtrTo { Prop | MIS | MMisc | Other } Gen " , <nl> - ( TPtrToMembGen - TPtrToElemGen ) . toString ( ) ) ; <nl> - EXPECT_EQ ( " LvalTo { Prop | MIS | MMisc | Other } Gen " , <nl> - ( TLvalToMembGen - TLvalToElemGen ) . toString ( ) ) ; <nl> - EXPECT_EQ ( " PtrToMembGen " , TPtrToMembGen . toString ( ) ) ; <nl> - EXPECT_EQ ( " LvalToMembGen " , TLvalToMembGen . toString ( ) ) ; <nl> - EXPECT_EQ ( <nl> - " PtrTo { ClsInit | ClsCns | Frame | Stk | Gbl | Prop | Elem | SProp | MIS | MMisc | Other } Gen " , <nl> - ( TPtrToGen - TPtrToRefGen ) . toString ( ) <nl> - ) ; <nl> - EXPECT_EQ ( <nl> - " LvalTo { ClsInit | ClsCns | Frame | Stk | Gbl | Prop | Elem | SProp | MIS | MMisc | Other } Gen " , <nl> - ( TLvalToGen - TLvalToRefGen ) . toString ( ) <nl> - ) ; <nl> + EXPECT_EQ ( " PtrTo { Prop | MIS | MMisc | Other } Cell " , <nl> + ( TPtrToMembCell - TPtrToElemCell ) . toString ( ) ) ; <nl> + EXPECT_EQ ( " LvalTo { Prop | MIS | MMisc | Other } Cell " , <nl> + ( TLvalToMembCell - TLvalToElemCell ) . toString ( ) ) ; <nl> + EXPECT_EQ ( " PtrToMembCell " , TPtrToMembCell . toString ( ) ) ; <nl> + EXPECT_EQ ( " LvalToMembCell " , TLvalToMembCell . toString ( ) ) ; <nl> EXPECT_EQ ( " MemToInt " , TMemToInt . toString ( ) ) ; <nl> EXPECT_EQ ( " PtrTo { Str | Int } | LvalTo { Str | Int } " , <nl> ( TMemToInt | TMemToStr ) . toString ( ) ) ; <nl> TEST ( Type , ToString ) { <nl> <nl> EXPECT_EQ ( " InitNull " , TInitNull . constValString ( ) ) ; <nl> <nl> - EXPECT_EQ ( " InitGen " , TInitGen . toString ( ) ) ; <nl> - EXPECT_EQ ( " PtrToInitGen " , TInitGen . ptr ( Ptr : : Ptr ) . toString ( ) ) ; <nl> - EXPECT_EQ ( " PtrToFrameInitGen " , TPtrToFrameInitGen . toString ( ) ) ; <nl> - EXPECT_EQ ( " LvalToFrameInitGen " , TLvalToFrameInitGen . toString ( ) ) ; <nl> + EXPECT_EQ ( " InitCell " , TInitCell . toString ( ) ) ; <nl> + EXPECT_EQ ( " PtrToInitCell " , TInitCell . ptr ( Ptr : : Ptr ) . toString ( ) ) ; <nl> + EXPECT_EQ ( " PtrToFrameInitCell " , TPtrToFrameInitCell . toString ( ) ) ; <nl> + EXPECT_EQ ( " LvalToFrameInitCell " , TLvalToFrameInitCell . toString ( ) ) ; <nl> <nl> auto const ptrCns = Type : : cns ( ( TypedValue * ) 0xba5eba11 , TPtrToMembInitNull ) ; <nl> EXPECT_EQ ( " PtrToMembInitNull < TV : 0xba5eba11 > " , ptrCns . toString ( ) ) ; <nl> EXPECT_EQ ( " TV : 0xba5eba11 " , ptrCns . constValString ( ) ) ; <nl> } <nl> <nl> - TEST ( Type , Boxes ) { <nl> - EXPECT_EQ ( TBoxedDbl , TDbl . box ( ) ) ; <nl> - EXPECT_TRUE ( TBoxedDbl < = TBoxedCell ) ; <nl> - EXPECT_EQ ( TDbl , TBoxedDbl . unbox ( ) ) ; <nl> - EXPECT_FALSE ( TDbl < = TBoxedCell ) ; <nl> - EXPECT_EQ ( TCell , TGen . unbox ( ) ) ; <nl> - EXPECT_EQ ( TBoxedVec , TVec . box ( ) ) ; <nl> - EXPECT_EQ ( TBoxedDict , TDict . box ( ) ) ; <nl> - EXPECT_EQ ( TBoxedKeyset , TKeyset . box ( ) ) ; <nl> - <nl> - EXPECT_EQ ( ( TBoxedCell - TBoxedUninit ) , <nl> - ( TCell - TUninit ) . box ( ) ) ; <nl> - <nl> - EXPECT_EQ ( TBottom , TBoxedCell & TPtrToGen ) ; <nl> - EXPECT_EQ ( TBottom , TBoxedCell & TLvalToGen ) ; <nl> - EXPECT_EQ ( TBottom , TBoxedCell & TMemToGen ) ; <nl> - <nl> - EXPECT_EQ ( TInt | TDbl , ( TInt | TBoxedDbl ) . unbox ( ) ) ; <nl> - <nl> - auto const packedSpec = ArraySpec ( ArrayData : : kPackedKind ) ; <nl> - auto const arrData = ArrayData : : GetScalarArray ( make_packed_array ( 1 , 2 , 3 , 4 ) ) ; <nl> - auto boxedConstPackedArray = Type : : cns ( arrData ) . box ( ) ; <nl> - EXPECT_FALSE ( boxedConstPackedArray . hasConstVal ( ) ) ; <nl> - EXPECT_TRUE ( boxedConstPackedArray . isSpecialized ( ) ) ; <nl> - EXPECT_EQ ( TBoxedStaticArr , boxedConstPackedArray . unspecialize ( ) ) ; <nl> - EXPECT_EQ ( packedSpec , boxedConstPackedArray . arrSpec ( ) ) ; <nl> - <nl> - auto boxedStaticPackedArray = Type : : StaticArray ( ArrayData : : kPackedKind ) . box ( ) ; <nl> - EXPECT_FALSE ( boxedStaticPackedArray . hasConstVal ( ) ) ; <nl> - EXPECT_TRUE ( boxedStaticPackedArray . isSpecialized ( ) ) ; <nl> - EXPECT_EQ ( TBoxedStaticArr , boxedStaticPackedArray . unspecialize ( ) ) ; <nl> - EXPECT_EQ ( packedSpec , boxedStaticPackedArray . arrSpec ( ) ) ; <nl> - <nl> - auto boxedPackedArray = Type : : Array ( ArrayData : : kPackedKind ) . box ( ) ; <nl> - EXPECT_FALSE ( boxedPackedArray . hasConstVal ( ) ) ; <nl> - EXPECT_TRUE ( boxedPackedArray . isSpecialized ( ) ) ; <nl> - EXPECT_EQ ( TBoxedArr , boxedPackedArray . unspecialize ( ) ) ; <nl> - EXPECT_EQ ( packedSpec , boxedPackedArray . arrSpec ( ) ) ; <nl> - <nl> - auto boxedExactObj = Type : : ExactObj ( SystemLib : : s_IteratorClass ) . box ( ) ; <nl> - auto exactClassSpec = <nl> - ClassSpec ( SystemLib : : s_IteratorClass , ClassSpec : : ExactTag { } ) ; <nl> - EXPECT_FALSE ( boxedExactObj . hasConstVal ( ) ) ; <nl> - EXPECT_TRUE ( boxedExactObj . isSpecialized ( ) ) ; <nl> - EXPECT_EQ ( TBoxedObj , boxedExactObj . unspecialize ( ) ) ; <nl> - EXPECT_EQ ( exactClassSpec , boxedExactObj . clsSpec ( ) ) ; <nl> - <nl> - auto boxedSubObj = Type : : SubObj ( SystemLib : : s_IteratorClass ) . box ( ) ; <nl> - auto subClassSpec = <nl> - ClassSpec ( SystemLib : : s_IteratorClass , ClassSpec : : SubTag { } ) ; <nl> - EXPECT_FALSE ( boxedSubObj . hasConstVal ( ) ) ; <nl> - EXPECT_TRUE ( boxedSubObj . isSpecialized ( ) ) ; <nl> - EXPECT_EQ ( TBoxedObj , boxedSubObj . unspecialize ( ) ) ; <nl> - EXPECT_EQ ( subClassSpec , boxedSubObj . clsSpec ( ) ) ; <nl> - } <nl> - <nl> TEST ( Type , Ptr ) { <nl> - EXPECT_TRUE ( TPtrToInt < = TPtrToGen ) ; <nl> - EXPECT_TRUE ( TPtrToBoxedInt < = TPtrToGen ) ; <nl> - EXPECT_TRUE ( TPtrToBoxedCell < = TPtrToGen ) ; <nl> EXPECT_TRUE ( TPtrToInt < = TPtrToCell ) ; <nl> <nl> EXPECT_EQ ( TPtrToInt , TInt . ptr ( Ptr : : Ptr ) ) ; <nl> EXPECT_EQ ( TPtrToCell , TCell . ptr ( Ptr : : Ptr ) ) ; <nl> EXPECT_EQ ( TInt , TPtrToInt . deref ( ) ) ; <nl> - EXPECT_EQ ( TBoxedCell , TPtrToBoxedCell . deref ( ) ) ; <nl> <nl> EXPECT_EQ ( TPtrToInt , TPtrToInt - TInt ) ; <nl> EXPECT_EQ ( TInt , ( TPtrToInt | TInt ) - TPtrToInt ) ; <nl> TEST ( Type , Ptr ) { <nl> EXPECT_EQ ( t , t - TInt ) ; <nl> EXPECT_EQ ( TPtrToInt | TPtrToStr , t - ( TInt | TStr ) ) ; <nl> EXPECT_EQ ( TInt | TStr , t - ( TPtrToInt | TPtrToStr ) ) ; <nl> - EXPECT_EQ ( TPtrToFrameGen , TPtrToRFrameGen - TPtrToRefGen ) ; <nl> <nl> EXPECT_EQ ( TBottom , TPtrToInt & TInt ) ; <nl> auto const a1 = Type : : Array ( ArrayData : : kPackedKind ) . ptr ( Ptr : : Frame ) ; <nl> TEST ( Type , Ptr ) { <nl> } <nl> <nl> TEST ( Type , Lval ) { <nl> - EXPECT_TRUE ( TLvalToInt < = TLvalToGen ) ; <nl> - EXPECT_TRUE ( TLvalToBoxedInt < = TLvalToGen ) ; <nl> - EXPECT_TRUE ( TLvalToBoxedCell < = TLvalToGen ) ; <nl> EXPECT_TRUE ( TLvalToInt < = TLvalToCell ) ; <nl> <nl> EXPECT_EQ ( TInt , TLvalToInt . deref ( ) ) ; <nl> - EXPECT_EQ ( TBoxedCell , TLvalToBoxedCell . deref ( ) ) ; <nl> <nl> EXPECT_EQ ( TLvalToInt , TLvalToInt - TInt ) ; <nl> EXPECT_EQ ( TInt , ( TLvalToInt | TInt ) - TLvalToInt ) ; <nl> TEST ( Type , Lval ) { <nl> EXPECT_EQ ( t , t - TInt ) ; <nl> EXPECT_EQ ( TLvalToInt | TLvalToStr , t - ( TInt | TStr ) ) ; <nl> EXPECT_EQ ( TInt | TStr , t - ( TLvalToInt | TLvalToStr ) ) ; <nl> - EXPECT_EQ ( TLvalToFrameGen , TLvalToRFrameGen - TLvalToRefGen ) ; <nl> <nl> EXPECT_EQ ( TBottom , TLvalToInt & TInt ) ; <nl> } <nl> <nl> TEST ( Type , Mem ) { <nl> - EXPECT_TRUE ( TMemToInt < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TMemToBoxedInt < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TMemToBoxedCell < = TMemToGen ) ; <nl> EXPECT_TRUE ( TMemToInt < = TMemToCell ) ; <nl> <nl> EXPECT_EQ ( TInt , TMemToInt . deref ( ) ) ; <nl> - EXPECT_EQ ( TBoxedCell , TMemToBoxedCell . deref ( ) ) ; <nl> <nl> EXPECT_EQ ( TMemToInt , TMemToInt - TInt ) ; <nl> EXPECT_EQ ( TInt , ( TMemToInt | TInt ) - TMemToInt ) ; <nl> TEST ( Type , Mem ) { <nl> EXPECT_EQ ( t , t - TInt ) ; <nl> EXPECT_EQ ( TMemToInt | TMemToStr , t - ( TInt | TStr ) ) ; <nl> EXPECT_EQ ( TInt | TStr , t - ( TMemToInt | TMemToStr ) ) ; <nl> - EXPECT_EQ ( TMemToFrameGen , TMemToRFrameGen - TMemToRefGen ) ; <nl> <nl> EXPECT_EQ ( TBottom , TMemToInt & TInt ) ; <nl> } <nl> <nl> TEST ( Type , MemPtrLval ) { <nl> - EXPECT_TRUE ( TPtrToInt < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TLvalToInt < = TMemToGen ) ; <nl> - EXPECT_FALSE ( TInt < = TMemToGen ) ; <nl> - <nl> - EXPECT_TRUE ( TPtrToBoxedInt < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TLvalToBoxedInt < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TPtrToBoxedCell < = TMemToGen ) ; <nl> - EXPECT_TRUE ( TLvalToBoxedCell < = TMemToGen ) ; <nl> EXPECT_TRUE ( TPtrToInt < = TMemToCell ) ; <nl> EXPECT_TRUE ( TLvalToInt < = TMemToCell ) ; <nl> + EXPECT_FALSE ( TInt < = TMemToCell ) ; <nl> <nl> EXPECT_EQ ( TBottom , TPtrToInt & TLvalToInt ) ; <nl> - EXPECT_EQ ( TBottom , TPtrToGen & TLvalToGen ) ; <nl> - EXPECT_EQ ( TPtrToInt , TPtrToInt & TMemToGen ) ; <nl> + EXPECT_EQ ( TBottom , TPtrToCell & TLvalToCell ) ; <nl> + EXPECT_EQ ( TPtrToInt , TPtrToInt & TMemToCell ) ; <nl> <nl> EXPECT_EQ ( TPtrToInt , TMemToInt - TLvalToInt ) ; <nl> EXPECT_EQ ( TLvalToInt , TMemToInt - TPtrToInt ) ; <nl> TEST ( Type , Subtypes ) { <nl> EXPECT_FALSE ( TBool < = numbers ) ; <nl> <nl> EXPECT_TRUE ( TFunc < = TCell ) ; <nl> - EXPECT_FALSE ( TTCA < = TGen ) ; <nl> - <nl> - EXPECT_TRUE ( TPtrToCell < TPtrToGen ) ; <nl> + EXPECT_FALSE ( TTCA < = TCell ) ; <nl> <nl> EXPECT_TRUE ( TVec < = TArrLike ) ; <nl> EXPECT_TRUE ( TDict < = TArrLike ) ; <nl> inline bool fits ( Type t , GuardConstraint gc ) { <nl> } <nl> <nl> TEST ( Type , GuardConstraints ) { <nl> - EXPECT_TRUE ( fits ( TGen , DataTypeGeneric ) ) ; <nl> - EXPECT_FALSE ( fits ( TGen , DataTypeBoxAndCountness ) ) ; <nl> - EXPECT_FALSE ( fits ( TGen , DataTypeBoxAndCountnessInit ) ) ; <nl> - EXPECT_FALSE ( fits ( TGen , DataTypeSpecific ) ) ; <nl> - EXPECT_FALSE ( fits ( TGen , <nl> + EXPECT_TRUE ( fits ( TCell , DataTypeGeneric ) ) ; <nl> + EXPECT_FALSE ( fits ( TCell , DataTypeBoxAndCountness ) ) ; <nl> + EXPECT_FALSE ( fits ( TCell , DataTypeBoxAndCountnessInit ) ) ; <nl> + EXPECT_FALSE ( fits ( TCell , DataTypeSpecific ) ) ; <nl> + EXPECT_FALSE ( fits ( TCell , <nl> GuardConstraint ( DataTypeSpecialized ) . setWantArrayKind ( ) ) ) ; <nl> <nl> EXPECT_TRUE ( fits ( TCell , <nl> { DataTypeGeneric } ) ) ; <nl> - EXPECT_TRUE ( fits ( TGen , <nl> - { DataTypeGeneric } ) ) ; <nl> <nl> EXPECT_FALSE ( fits ( TArr , <nl> GuardConstraint ( DataTypeSpecialized ) . setWantArrayKind ( ) ) ) ; <nl> TEST ( Type , GuardConstraints ) { <nl> } <nl> <nl> TEST ( Type , RelaxType ) { <nl> - EXPECT_EQ ( TGen , relaxType ( TBoxedStr , DataTypeGeneric ) ) ; <nl> - EXPECT_EQ ( TBoxedInitCell | TUncounted , <nl> - relaxType ( TBoxedObj | TInitNull , <nl> - DataTypeBoxAndCountness ) ) ; <nl> - <nl> - <nl> auto gc = GuardConstraint { DataTypeSpecialized } ; <nl> gc . setDesiredClass ( SystemLib : : s_IteratorClass ) ; <nl> gc . category = DataTypeSpecialized ; <nl> auto subIter = Type : : SubObj ( SystemLib : : s_IteratorClass ) ; <nl> EXPECT_EQ ( " Obj < = Iterator " , subIter . toString ( ) ) ; <nl> EXPECT_EQ ( subIter , relaxType ( subIter , gc . category ) ) ; <nl> - <nl> - EXPECT_EQ ( TBoxedInitCell , <nl> - relaxType ( TBoxedInitCell , DataTypeBoxAndCountnessInit ) ) ; <nl> - EXPECT_EQ ( TBoxedInitCell , <nl> - relaxType ( TBoxedInitCell , DataTypeBoxAndCountness ) ) ; <nl> } <nl> <nl> TEST ( Type , RelaxConstraint ) { <nl> TEST ( Type , Specialized ) { <nl> EXPECT_EQ ( TBottom , packed & Type : : Array ( ArrayData : : kMixedKind ) ) ; <nl> EXPECT_EQ ( TBottom , packed - TArr ) ; <nl> <nl> - EXPECT_EQ ( TPtrToSPropCell , TPtrToSPropGen - TPtrToBoxedCell ) ; <nl> - <nl> - <nl> - <nl> auto const arrData = ArrayData : : GetScalarArray ( make_packed_array ( 1 , 2 , 3 , 4 ) ) ; <nl> auto const arrDataMixed = ArrayData : : GetScalarArray ( make_map_array ( 1 , 1 , <nl> 2 , 2 ) ) ; <nl> TEST ( Type , Specialized ) { <nl> EXPECT_EQ ( constArrayMixed , constArrayMixed - spacked ) ; <nl> <nl> / / Checking specialization dropping . <nl> - EXPECT_EQ ( TArr | TBoxedInitCell , packed | TBoxedInitCell ) ; <nl> auto subIter = Type : : SubObj ( SystemLib : : s_IteratorClass ) ; <nl> EXPECT_EQ ( TArr | TObj , packed | subIter ) ; <nl> <nl> TEST ( Type , Const ) { <nl> EXPECT_TRUE ( five . hasConstVal ( TInt ) ) ; <nl> EXPECT_TRUE ( five . hasConstVal ( 5 ) ) ; <nl> EXPECT_FALSE ( five . hasConstVal ( 5 . 0 ) ) ; <nl> - EXPECT_TRUE ( TGen . maybe ( five ) ) ; <nl> + EXPECT_TRUE ( TCell . maybe ( five ) ) ; <nl> EXPECT_EQ ( TInt , five | TInt ) ; <nl> EXPECT_EQ ( TInt , five | Type : : cns ( 10 ) ) ; <nl> EXPECT_EQ ( five , five | Type : : cns ( 5 ) ) ; <nl> EXPECT_EQ ( five , Type : : cns ( 5 ) & five ) ; <nl> EXPECT_EQ ( five , five & TInt ) ; <nl> - EXPECT_EQ ( five , TGen & five ) ; <nl> + EXPECT_EQ ( five , TCell & five ) ; <nl> EXPECT_EQ ( " Int < 5 > " , five . toString ( ) ) ; <nl> EXPECT_EQ ( five , five - TArr ) ; <nl> EXPECT_EQ ( five , five - Type : : cns ( 1 ) ) ; <nl> TEST ( Type , Const ) { <nl> EXPECT_EQ ( TArr , fiveArr - TInt ) ; <nl> EXPECT_EQ ( TBottom , five - TInt ) ; <nl> EXPECT_EQ ( TBottom , five - five ) ; <nl> - EXPECT_EQ ( TPtrToGen , <nl> - ( TPtrToGen | TNullptr ) - TNullptr ) ; <nl> + EXPECT_EQ ( TPtrToCell , <nl> + ( TPtrToCell | TNullptr ) - TNullptr ) ; <nl> EXPECT_EQ ( TInt , five . dropConstVal ( ) ) ; <nl> EXPECT_TRUE ( ! five . maybe ( Type : : cns ( 2 ) ) ) ; <nl> <nl> TEST ( Type , Const ) { <nl> } <nl> <nl> TEST ( Type , PtrKinds ) { <nl> - auto const frameGen = TGen . ptr ( Ptr : : Frame ) ; <nl> + auto const frameCell = TCell . ptr ( Ptr : : Frame ) ; <nl> auto const frameUninit = TUninit . ptr ( Ptr : : Frame ) ; <nl> auto const frameBool = TBool . ptr ( Ptr : : Frame ) ; <nl> auto const unknownBool = TBool . ptr ( Ptr : : Ptr ) ; <nl> - auto const unknownGen = TGen . ptr ( Ptr : : Ptr ) ; <nl> + auto const unknownCell = TCell . ptr ( Ptr : : Ptr ) ; <nl> auto const stackObj = TObj . ptr ( Ptr : : Stk ) ; <nl> auto const stackBool = TBool . ptr ( Ptr : : Stk ) ; <nl> <nl> - EXPECT_EQ ( " PtrToFrameGen " , frameGen . toString ( ) ) ; <nl> + EXPECT_EQ ( " PtrToFrameCell " , frameCell . toString ( ) ) ; <nl> EXPECT_EQ ( " PtrToFrameBool " , frameBool . toString ( ) ) ; <nl> EXPECT_EQ ( " PtrToBool " , unknownBool . toString ( ) ) ; <nl> EXPECT_EQ ( " PtrToStkObj " , stackObj . toString ( ) ) ; <nl> TEST ( Type , PtrKinds ) { <nl> EXPECT_EQ ( Ptr : : Frame , ( frameUninit | frameBool ) . ptrKind ( ) ) ; <nl> <nl> EXPECT_TRUE ( frameBool < = unknownBool ) ; <nl> - EXPECT_TRUE ( frameBool < = frameGen ) ; <nl> + EXPECT_TRUE ( frameBool < = frameCell ) ; <nl> EXPECT_FALSE ( frameBool < = frameUninit ) ; <nl> - EXPECT_TRUE ( frameBool . maybe ( frameGen ) ) ; <nl> + EXPECT_TRUE ( frameBool . maybe ( frameCell ) ) ; <nl> EXPECT_TRUE ( frameBool . maybe ( unknownBool ) ) ; <nl> EXPECT_TRUE ( ! frameUninit . maybe ( frameBool ) ) ; <nl> - EXPECT_TRUE ( frameUninit . maybe ( frameGen ) ) ; <nl> + EXPECT_TRUE ( frameUninit . maybe ( frameCell ) ) ; <nl> EXPECT_TRUE ( ! frameUninit . maybe ( unknownBool ) ) ; <nl> EXPECT_TRUE ( ! TPtrToUninit . maybe ( TPtrToBool ) ) ; <nl> EXPECT_FALSE ( unknownBool < = frameBool ) ; <nl> EXPECT_EQ ( unknownBool , frameBool | unknownBool ) ; <nl> <nl> - EXPECT_EQ ( unknownGen , frameGen | unknownBool ) ; <nl> + EXPECT_EQ ( unknownCell , frameCell | unknownBool ) ; <nl> <nl> EXPECT_EQ ( TBottom , frameBool & stackBool ) ; <nl> EXPECT_EQ ( frameBool , frameBool & unknownBool ) ; <nl> <nl> EXPECT_EQ ( Ptr : : Prop , <nl> ( TPtrToPropCell | TNullptr ) . ptrKind ( ) ) ; <nl> - EXPECT_EQ ( Ptr : : RProp , <nl> - ( TPtrToRPropCell | TNullptr ) . ptrKind ( ) ) ; <nl> EXPECT_EQ ( TPtrToPropCell , <nl> ( TPtrToPropCell | TNullptr ) - TNullptr ) ; <nl> <nl> - auto const frameGenOrCell = frameGen | TCell ; <nl> - auto const frameOrRefGenOrCell = frameGenOrCell | TGen . ptr ( Ptr : : Ref ) ; <nl> - auto const stackOrRefArrOrInt = TArr . ptr ( Ptr : : RStk ) | TInt ; <nl> - EXPECT_EQ ( TInt | TArr , frameGenOrCell & stackOrRefArrOrInt ) ; <nl> - EXPECT_EQ ( TArr . ptr ( Ptr : : Ref ) | TInt , <nl> - frameOrRefGenOrCell & stackOrRefArrOrInt ) ; <nl> - } <nl> - <nl> - TEST ( Type , PtrRefs ) { <nl> - EXPECT_EQ ( TBottom , TPtrToStkCell & TPtrToRefCell ) ; <nl> - EXPECT_EQ ( TPtrToRefCell , TPtrToRStkCell & TPtrToRFrameCell ) ; <nl> - EXPECT_EQ ( TPtrToPropCell , TPtrToRPropCell & TPtrToPropCell ) ; <nl> - EXPECT_EQ ( TBottom , TPtrToRPropCell & TPtrToFrameBool ) ; <nl> - EXPECT_FALSE ( TPtrToRPropCell . maybe ( TPtrToFrameBool ) ) ; <nl> - <nl> - EXPECT_EQ ( TPtrToPropCell , TPtrToPropGen - TPtrToBoxedCell ) ; <nl> - EXPECT_EQ ( TPtrToPropInt , TPtrToRPropInt - TPtrToRefCell ) ; <nl> - EXPECT_EQ ( TPtrToPropInt , TPtrToRPropInt - TPtrToRStkCell ) ; <nl> - <nl> - EXPECT_EQ ( <nl> - Ptr : : Ref , <nl> - ( ( TPtrToRPropCell | TNullptr ) & TPtrToRFrameCell ) . ptrKind ( ) <nl> - ) ; <nl> - <nl> - EXPECT_TRUE ( TPtrToPropCell < TPtrToRPropCell ) ; <nl> - EXPECT_TRUE ( TPtrToPropCell < = TPtrToRPropCell ) ; <nl> - EXPECT_TRUE ( TPtrToPropCell < TPtrToMembCell ) ; <nl> - EXPECT_TRUE ( TPtrToPropCell < TPtrToRMembCell ) ; <nl> - EXPECT_FALSE ( TPtrToPropCell < TPtrToRMembInt ) ; <nl> - EXPECT_TRUE ( TPtrToPropCell . maybe ( TPtrToRMembInt ) ) ; <nl> - EXPECT_TRUE ( ! TPtrToPropInt . maybe ( TPtrToRefInt ) ) ; <nl> - EXPECT_TRUE ( ! TPtrToPropInt . maybe ( TPtrToRefBool ) ) ; <nl> - EXPECT_TRUE ( ! TPtrToPropInt . maybe ( TPtrToPropBool ) ) ; <nl> - <nl> - EXPECT_EQ ( TBottom , TPtrToRefInt & TPtrToClsInitCell ) ; <nl> - EXPECT_EQ ( TBottom , TPtrToMembCell & TPtrToClsInitCell ) ; <nl> + auto const frameCellOrCell = frameCell | TCell ; <nl> + auto const stackOrArrOrInt = TArr . ptr ( Ptr : : Stk ) | TInt ; <nl> + EXPECT_EQ ( TInt | TArr , frameCellOrCell & stackOrArrOrInt ) ; <nl> } <nl> <nl> } } <nl> mmm a / hphp / runtime / vm / jit / alias - analysis . cpp <nl> ppp b / hphp / runtime / vm / jit / alias - analysis . cpp <nl> ALocBits AliasAnalysis : : may_alias ( AliasClass acls ) const { <nl> ret | = may_alias_part ( * this , acls , acls . prop ( ) , APropAny , all_props ) ; <nl> ret | = may_alias_part ( * this , acls , acls . elemI ( ) , AElemIAny , all_elemIs ) ; <nl> ret | = may_alias_part ( * this , acls , acls . elemS ( ) , AElemSAny , all_elemSs ) ; <nl> - ret | = may_alias_part ( * this , acls , acls . ref ( ) , ARefAny , all_ref ) ; <nl> ret | = may_alias_part ( * this , acls , acls . iterBase ( ) , <nl> AIterBaseAny , all_iterBase ) ; <nl> ret | = may_alias_part ( * this , acls , acls . iterType ( ) , <nl> ALocBits AliasAnalysis : : expand ( AliasClass acls ) const { <nl> ret | = expand_part ( * this , acls , acls . prop ( ) , APropAny , all_props ) ; <nl> ret | = expand_part ( * this , acls , acls . elemI ( ) , AElemIAny , all_elemIs ) ; <nl> ret | = expand_part ( * this , acls , acls . elemS ( ) , AElemSAny , all_elemSs ) ; <nl> - ret | = expand_part ( * this , acls , acls . ref ( ) , ARefAny , all_ref ) ; <nl> ret | = expand_part ( * this , acls , acls . iterBase ( ) , AIterBaseAny , all_iterBase ) ; <nl> ret | = expand_part ( * this , acls , acls . iterType ( ) , AIterTypeAny , all_iterType ) ; <nl> ret | = expand_part ( * this , acls , acls . iterPos ( ) , AIterPosAny , all_iterPos ) ; <nl> AliasAnalysis collect_aliases ( const IRUnit & unit , const BlockList & blocks ) { <nl> return ; <nl> } <nl> <nl> - if ( acls . is_ref ( ) ) { <nl> - if ( auto const index = add_class ( ret , acls ) ) { <nl> - ret . all_ref . set ( * index ) ; <nl> - } <nl> - return ; <nl> - } <nl> - <nl> if ( acls . is_mis ( ) & & acls . isSingleLocation ( ) ) { <nl> add_class ( ret , acls ) ; <nl> return ; <nl> AliasAnalysis collect_aliases ( const IRUnit & unit , const BlockList & blocks ) { <nl> return ; <nl> } <nl> <nl> - if ( acls . is_ref ( ) ) { <nl> - meta . conflicts = ret . all_ref ; <nl> - meta . conflicts . reset ( meta . index ) ; <nl> - return ; <nl> - } <nl> - <nl> if ( acls . is_rds ( ) ) { <nl> ret . all_rds . set ( meta . index ) ; <nl> return ; <nl> std : : string show ( const AliasAnalysis & ainfo ) { <nl> " all props " , show ( ainfo . all_props ) , <nl> " all elemIs " , show ( ainfo . all_elemIs ) , <nl> " all elemSs " , show ( ainfo . all_elemSs ) , <nl> - " all refs " , show ( ainfo . all_ref ) , <nl> " all iterBase " , show ( ainfo . all_iterBase ) , <nl> " all iterType " , show ( ainfo . all_iterType ) , <nl> " all iterPos " , show ( ainfo . all_iterPos ) , <nl> mmm a / hphp / runtime / vm / jit / alias - analysis . h <nl> ppp b / hphp / runtime / vm / jit / alias - analysis . h <nl> struct AliasAnalysis { <nl> ALocBits all_frame ; <nl> ALocBits all_stack ; <nl> ALocBits all_rds ; <nl> - ALocBits all_ref ; <nl> ALocBits all_iterBase ; <nl> ALocBits all_iterType ; <nl> ALocBits all_iterPos ; <nl> mmm a / hphp / runtime / vm / jit / alias - class . cpp <nl> ppp b / hphp / runtime / vm / jit / alias - class . cpp <nl> std : : string bit_str ( AliasClass : : rep bits , AliasClass : : rep skip ) { <nl> case A : : BMITvRef2 : break ; <nl> case A : : BMIBase : break ; <nl> case A : : BMIPropS : break ; <nl> - case A : : BRef : break ; <nl> case A : : BRds : break ; <nl> } <nl> <nl> std : : string bit_str ( AliasClass : : rep bits , AliasClass : : rep skip ) { <nl> case A : : BMITvRef2 : ret + = " MiT2 " ; break ; <nl> case A : : BMIBase : ret + = " MiB " ; break ; <nl> case A : : BMIPropS : ret + = " MiP " ; break ; <nl> - case A : : BRef : ret + = " Ref " ; break ; <nl> case A : : BRds : ret + = " Rds " ; break ; <nl> } <nl> } <nl> size_t AliasClass : : Hash : : operator ( ) ( AliasClass acls ) const { <nl> return folly : : hash : : hash_combine ( hash , <nl> acls . m_stack . offset . offset , <nl> acls . m_stack . size ) ; <nl> - case STag : : Ref : <nl> - return folly : : hash : : hash_combine ( hash , acls . m_ref . boxed ) ; <nl> <nl> case STag : : Rds : <nl> return folly : : hash : : hash_combine ( hash , acls . m_rds . handle ) ; <nl> X ( Prop , prop ) <nl> X ( ElemI , elemI ) <nl> X ( ElemS , elemS ) <nl> X ( Stack , stack ) <nl> - X ( Ref , ref ) <nl> X ( Rds , rds ) <nl> <nl> # undef X <nl> X ( Prop , prop ) <nl> X ( ElemI , elemI ) <nl> X ( ElemS , elemS ) <nl> X ( Stack , stack ) <nl> - X ( Ref , ref ) <nl> X ( Rds , rds ) <nl> <nl> # undef X <nl> AliasClass : : rep AliasClass : : stagBits ( STag tag ) { <nl> case STag : : ElemI : return BElemI ; <nl> case STag : : ElemS : return BElemS ; <nl> case STag : : Stack : return BStack ; <nl> - case STag : : Ref : return BRef ; <nl> case STag : : Rds : return BRds ; <nl> } <nl> always_assert ( 0 ) ; <nl> bool AliasClass : : checkInvariants ( ) const { <nl> case STag : : ElemS : <nl> assertx ( m_elemS . key - > isStatic ( ) ) ; <nl> break ; <nl> - case STag : : Ref : <nl> - assertx ( m_ref . boxed - > isA ( TBoxedCell ) ) ; <nl> - break ; <nl> case STag : : Rds : <nl> assertx ( rds : : isValidHandle ( m_rds . handle ) ) ; <nl> break ; <nl> bool AliasClass : : equivData ( AliasClass o ) const { <nl> m_elemS . key = = o . m_elemS . key ; <nl> case STag : : Stack : return m_stack . offset = = o . m_stack . offset & & <nl> m_stack . size = = o . m_stack . size ; <nl> - case STag : : Ref : return m_ref . boxed = = o . m_ref . boxed ; <nl> case STag : : Rds : return m_rds . handle = = o . m_rds . handle ; <nl> } <nl> not_reached ( ) ; <nl> AliasClass AliasClass : : unionData ( rep newBits , AliasClass a , AliasClass b ) { <nl> case STag : : Prop : <nl> case STag : : ElemI : <nl> case STag : : ElemS : <nl> - case STag : : Ref : <nl> case STag : : Rds : <nl> case STag : : IterAll : <nl> assertx ( ! a . equivData ( b ) ) ; <nl> AliasClass AliasClass : : operator | ( AliasClass o ) const { <nl> case STag : : ElemI : new ( & ret . m_elemI ) AElemI ( best - > m_elemI ) ; break ; <nl> case STag : : ElemS : new ( & ret . m_elemS ) AElemS ( best - > m_elemS ) ; break ; <nl> case STag : : Stack : new ( & ret . m_stack ) AStack ( best - > m_stack ) ; break ; <nl> - case STag : : Ref : new ( & ret . m_ref ) ARef ( best - > m_ref ) ; break ; <nl> case STag : : Rds : new ( & ret . m_rds ) ARds ( best - > m_rds ) ; break ; <nl> } <nl> ret . m_stag = stag ; <nl> bool AliasClass : : subclassData ( AliasClass o ) const { <nl> case STag : : Prop : <nl> case STag : : ElemI : <nl> case STag : : ElemS : <nl> - case STag : : Ref : <nl> case STag : : Rds : <nl> return equivData ( o ) ; <nl> case STag : : Frame : <nl> folly : : Optional < AliasClass : : UIterAll > AliasClass : : asUIter ( ) const { <nl> case STag : : Prop : <nl> case STag : : ElemI : <nl> case STag : : ElemS : <nl> - case STag : : Ref : <nl> case STag : : Rds : <nl> case STag : : Stack : <nl> return folly : : none ; <nl> bool AliasClass : : maybeData ( AliasClass o ) const { <nl> return lowest_upper . offset > highest_lower ; <nl> } <nl> <nl> - / * <nl> - * Two boxed cells can generally refer to the same RefData . <nl> - * / <nl> - case STag : : Ref : <nl> - return true ; <nl> - <nl> case STag : : Rds : <nl> return m_rds . handle = = o . m_rds . handle ; <nl> } <nl> AliasClass canonicalize ( AliasClass a ) { <nl> case T : : IterEnd : return a ; <nl> case T : : IterAll : return a ; <nl> case T : : Stack : return a ; <nl> - case T : : Ref : return a ; <nl> case T : : Rds : return a ; <nl> case T : : Prop : a . m_prop . obj = canonical ( a . m_prop . obj ) ; return a ; <nl> case T : : ElemI : a . m_elemI . arr = canonical ( a . m_elemI . arr ) ; return a ; <nl> std : : string show ( AliasClass acls ) { <nl> : folly : : sformat ( " ; { } " , acls . m_stack . size ) <nl> ) ; <nl> break ; <nl> - case A : : STag : : Ref : <nl> - folly : : format ( & ret , " Ref { } " , acls . m_ref . boxed - > id ( ) ) ; <nl> - break ; <nl> case A : : STag : : Rds : <nl> folly : : format ( & ret , " Rds { } " , acls . m_rds . handle ) ; <nl> break ; <nl> mmm a / hphp / runtime / vm / jit / alias - class . h <nl> ppp b / hphp / runtime / vm / jit / alias - class . h <nl> struct SSATmp ; <nl> * | | | | | <nl> * | | HeapAny * | . . . <nl> * | | | | <nl> - * | | + mmmmmm + mmmmmm + mmmmmmmmm + | <nl> - * | | | | | | <nl> - * FrameAny StackAny ElemAny PropAny RefAny MIStateAny <nl> - * | | / \ | | | <nl> - * . . . . . . ElemIAny ElemSAny . . . . . . | <nl> + * | | + mmmmmm + mmmmmm + | <nl> + * | | | | | <nl> + * FrameAny StackAny ElemAny PropAny MIStateAny <nl> + * | | / \ | | <nl> + * . . . . . . ElemIAny ElemSAny . . . | <nl> * | | | <nl> * . . . . . . + mmmmmmmmm + mmmmmm - - + - - + mmmmmm + <nl> * | | | | <nl> struct SSATmp ; <nl> * MIBase * * MIPropS * * <nl> * <nl> * <nl> - * ( * ) AHeapAny contains some things other than ElemAny , PropAny and RefAny <nl> + * ( * ) AHeapAny contains some things other than ElemAny , and PropAny <nl> * that don ' t have explicit nodes in the lattice yet . ( Like the <nl> * lvalBlackhole , etc . ) It ' s hard for this to matter to client code for <nl> * now because we don ' t expose an intersection or difference operation . <nl> struct AStack { <nl> int32_t size ; <nl> } ; <nl> <nl> - / * <nl> - * A RefData referenced by a BoxedCell . <nl> - * / <nl> - struct ARef { SSATmp * boxed ; } ; <nl> - <nl> / * <nl> * A TypedValue stored in rds . <nl> * <nl> struct AliasClass { <nl> BElemI = 1U < < 6 , <nl> BElemS = 1U < < 7 , <nl> BStack = 1U < < 8 , <nl> - BRef = 1U < < 9 , <nl> - BRds = 1U < < 10 , <nl> + BRds = 1U < < 9 , <nl> <nl> / / Have no specialization , put them last . <nl> BMITempBase = 1U < < 11 , <nl> struct AliasClass { <nl> BMIPropS = 1U < < 15 , <nl> <nl> BElem = BElemI | BElemS , <nl> - BHeap = BElem | BProp | BRef , <nl> + BHeap = BElem | BProp , <nl> BMIStateTV = BMITempBase | BMITvRef | BMITvRef2 , <nl> BMIState = BMIStateTV | BMIBase | BMIPropS , <nl> <nl> struct AliasClass { <nl> / * implicit * / AliasClass ( AElemI ) ; <nl> / * implicit * / AliasClass ( AElemS ) ; <nl> / * implicit * / AliasClass ( AStack ) ; <nl> - / * implicit * / AliasClass ( ARef ) ; <nl> / * implicit * / AliasClass ( ARds ) ; <nl> <nl> / * <nl> struct AliasClass { <nl> folly : : Optional < AElemI > elemI ( ) const ; <nl> folly : : Optional < AElemS > elemS ( ) const ; <nl> folly : : Optional < AStack > stack ( ) const ; <nl> - folly : : Optional < ARef > ref ( ) const ; <nl> folly : : Optional < ARds > rds ( ) const ; <nl> <nl> / * <nl> struct AliasClass { <nl> folly : : Optional < AElemI > is_elemI ( ) const ; <nl> folly : : Optional < AElemS > is_elemS ( ) const ; <nl> folly : : Optional < AStack > is_stack ( ) const ; <nl> - folly : : Optional < ARef > is_ref ( ) const ; <nl> folly : : Optional < ARds > is_rds ( ) const ; <nl> <nl> / * <nl> struct AliasClass { <nl> ElemI , <nl> ElemS , <nl> Stack , <nl> - Ref , <nl> Rds , <nl> <nl> IterAll , / / The union of all fields for a given iterator . <nl> struct AliasClass { <nl> AElemI m_elemI ; <nl> AElemS m_elemS ; <nl> AStack m_stack ; <nl> - ARef m_ref ; <nl> ARds m_rds ; <nl> <nl> UIterAll m_iterAll ; <nl> auto const AIterEndAny = AliasClass { AliasClass : : BIterEnd } ; <nl> auto const AIterAny = AliasClass { AliasClass : : BIter } ; <nl> auto const APropAny = AliasClass { AliasClass : : BProp } ; <nl> auto const AHeapAny = AliasClass { AliasClass : : BHeap } ; <nl> - auto const ARefAny = AliasClass { AliasClass : : BRef } ; <nl> auto const AStackAny = AliasClass { AliasClass : : BStack } ; <nl> auto const ARdsAny = AliasClass { AliasClass : : BRds } ; <nl> auto const AElemIAny = AliasClass { AliasClass : : BElemI } ; <nl> mmm a / hphp / runtime / vm / jit / arg - group . cpp <nl> ppp b / hphp / runtime / vm / jit / arg - group . cpp <nl> ArgGroup & ArgGroup : : ssa ( int i , bool allowFP ) { <nl> push_arg ( ArgDesc ( ArgDesc : : Kind : : Imm , 0 ) ) ; / / Push a dummy parameter <nl> } <nl> } else { <nl> - if ( wide_tv_val & & ( s - > isA ( TLvalToGen ) & & ! s - > isA ( TBottom ) ) ) { <nl> + if ( wide_tv_val & & ( s - > isA ( TLvalToCell ) & & ! s - > isA ( TBottom ) ) ) { <nl> / / If there ' s exactly one register argument slot left , the whole tv_lval <nl> / / goes on the stack instead of being split between a register and the <nl> / / stack . <nl> mmm a / hphp / runtime / vm / jit / call - spec . cpp <nl> ppp b / hphp / runtime / vm / jit / call - spec . cpp <nl> std : : string show_types ( const std : : vector < Type > & ts ) { <nl> template < typename F > <nl> void verify_return_type ( Type ret , const CallDest & dest , F fail ) { <nl> if ( dest . type = = DestType : : TV ) { <nl> - / / We really want equality here : TGen corresponds to a full TypedValue <nl> + / / We really want equality here : TCell corresponds to a full TypedValue <nl> / / being returned . <nl> - if ( ret = = TGen ) return ; <nl> + if ( ret = = TCell ) return ; <nl> } else { <nl> if ( ret < = dest . valueType ) return ; <nl> <nl> void verify_return_type ( Type ret , const CallDest & dest , F fail ) { <nl> <nl> / / Some JIT types are much more specific than what we can express in C + + , <nl> / / so treat certain classes of types as equivalent . <nl> - static std : : array < Type , 6 > constexpr special_types = { <nl> - TPtrToGen , <nl> - TLvalToGen , <nl> - TBoxedInitCell , <nl> + static std : : array < Type , 5 > constexpr special_types = { <nl> + TPtrToCell , <nl> + TLvalToCell , <nl> TObj , <nl> TStr , <nl> TArrLike , <nl> bool CallSpec : : verifySignature ( const CallDest & dest , <nl> for ( ; parami < type - > params . size ( ) & & argi < args . size ( ) ; <nl> + + parami , + + argi ) { <nl> auto const param = type - > params [ parami ] ; <nl> - / / TGen ( for a TypedValue parameter ) and wide TLvalToGen are special : one <nl> + / / TCell ( for a TypedValue parameter ) and wide TLvalToCell are special : one <nl> / / SSATmp represents two argument registers , and the latter is passed as a <nl> / / dummy TBottom argument . Make sure both are present . <nl> - if ( param = = TGen | | ( wide_tv_val & & param = = TLvalToGen ) ) { <nl> + if ( param = = TCell | | ( wide_tv_val & & param = = TLvalToCell ) ) { <nl> if ( ! ( args [ argi ] < = param ) ) { <nl> fail ( " Incompatible type { } for first half of { } parameter { } " , <nl> args [ argi ] , param , parami ) ; <nl> mmm a / hphp / runtime / vm / jit / call - spec . h <nl> ppp b / hphp / runtime / vm / jit / call - spec . h <nl> namespace detail { <nl> T ( Func * , TFunc ) \ <nl> T ( ClsMethDataRef , TClsMeth ) \ <nl> T ( NamedEntity * , TNamedEntity ) \ <nl> - T ( RefData * , TBoxedInitCell ) \ <nl> T ( ResourceHdr * , TRes ) \ <nl> T ( StringData * , TStr ) \ <nl> T ( TCA , TTCA ) \ <nl> - T ( TypedValue & , TPtrToGen ) \ <nl> - T ( TypedValue * , TPtrToGen ) \ <nl> - T ( TypedValue , TGen ) \ <nl> + T ( TypedValue & , TPtrToCell ) \ <nl> + T ( TypedValue * , TPtrToCell ) \ <nl> + T ( TypedValue , TCell ) \ <nl> T ( bool , TBool ) \ <nl> T ( double , TDbl ) \ <nl> T ( int32_t , TInt ) \ <nl> namespace detail { <nl> T ( unsigned long , TInt ) \ <nl> T ( unsigned long long , TInt ) \ <nl> T ( unsigned int , U ( Int , RDSHandle ) ) \ <nl> - T ( tv_lval , TLvalToGen ) \ <nl> - T ( tv_rval , TLvalToGen ) \ <nl> + T ( tv_lval , TLvalToCell ) \ <nl> + T ( tv_rval , TLvalToCell ) \ <nl> T ( MInstrPropState * , U ( MIPropSPtr , Nullptr ) ) <nl> <nl> / * <nl> mmm a / hphp / runtime / vm / jit / cfg - clean . cpp <nl> ppp b / hphp / runtime / vm / jit / cfg - clean . cpp <nl> bool convertCondBranchToJmp ( IRUnit & unit , Block * block ) { <nl> CheckMBase , <nl> CheckInit , <nl> CheckInitMem , <nl> - CheckRDSInitialized , <nl> - CheckRefInner ) ) { <nl> + CheckRDSInitialized ) ) { <nl> return false ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / check . cpp <nl> ppp b / hphp / runtime / vm / jit / check . cpp <nl> bool checkOperandTypes ( const IRInstruction * inst , const IRUnit * / * unit * / ) { <nl> IdxSeq < __VA_ARGS__ > { } \ <nl> ) ; <nl> # define DLdObjCls <nl> - # define DUnboxPtr <nl> - # define DBoxPtr <nl> # define DAllocObj <nl> # define DArrElem <nl> # define DVecElem <nl> bool checkOperandTypes ( const IRInstruction * inst , const IRUnit * / * unit * / ) { <nl> # undef DParamMayRelax <nl> # undef DParam <nl> # undef DLdObjCls <nl> - # undef DUnboxPtr <nl> - # undef DBoxPtr <nl> # undef DAllocObj <nl> # undef DArrElem <nl> # undef DVecElem <nl> mmm a / hphp / runtime / vm / jit / code - gen - helpers - inl . h <nl> ppp b / hphp / runtime / vm / jit / code - gen - helpers - inl . h <nl> namespace HPHP { namespace jit { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> inline Vptr memTVTypePtr ( SSATmp * ptr , Vloc loc ) { <nl> - assertx ( ptr - > isA ( TPtrToGen ) | | ptr - > isA ( TLvalToGen ) ) ; <nl> - if ( wide_tv_val & & ptr - > isA ( TLvalToGen ) ) return * loc . reg ( tv_lval : : type_idx ) ; <nl> + assertx ( ptr - > isA ( TPtrToCell ) | | ptr - > isA ( TLvalToCell ) ) ; <nl> + if ( wide_tv_val & & ptr - > isA ( TLvalToCell ) ) return * loc . reg ( tv_lval : : type_idx ) ; <nl> <nl> return loc . reg ( ) [ TVOFF ( m_type ) ] ; <nl> } <nl> <nl> inline Vptr memTVValPtr ( SSATmp * ptr , Vloc loc ) { <nl> - assertx ( ptr - > isA ( TPtrToGen ) | | ptr - > isA ( TLvalToGen ) ) ; <nl> - if ( wide_tv_val & & ptr - > isA ( TLvalToGen ) ) return * loc . reg ( tv_lval : : val_idx ) ; <nl> + assertx ( ptr - > isA ( TPtrToCell ) | | ptr - > isA ( TLvalToCell ) ) ; <nl> + if ( wide_tv_val & & ptr - > isA ( TLvalToCell ) ) return * loc . reg ( tv_lval : : val_idx ) ; <nl> <nl> return loc . reg ( ) [ TVOFF ( m_data ) ] ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / code - gen - helpers . h <nl> ppp b / hphp / runtime / vm / jit / code - gen - helpers . h <nl> Vreg zeroExtendIfBool ( Vout & v , Type type , Vreg reg ) ; <nl> <nl> / * <nl> * Return a pointer to the type or value field of the pointee of ` ptr ' , whether <nl> - * it is a TPtrToGen or a TLvalToGen . <nl> + * it is a TPtrToCell or a TLvalToCell . <nl> * / <nl> Vptr memTVTypePtr ( SSATmp * ptr , Vloc loc ) ; <nl> Vptr memTVValPtr ( SSATmp * ptr , Vloc loc ) ; <nl> mmm a / hphp / runtime / vm / jit / dce . cpp <nl> ppp b / hphp / runtime / vm / jit / dce . cpp <nl> bool canDCE ( IRInstruction * inst ) { <nl> case IsWaitHandle : <nl> case IsCol : <nl> case IsDVArray : <nl> - case UnboxPtr : <nl> case LdStk : <nl> case LdLoc : <nl> case LdStkAddr : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case LdIterBase : <nl> case LdIterPos : <nl> case LdIterEnd : <nl> - case LdRef : <nl> case LdFrameThis : <nl> case LdFrameCls : <nl> case LdSmashable : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case GetMemoKeyScalar : <nl> case LookupSPropSlot : <nl> case ConstructClosure : <nl> - case Box : <nl> case AllocPackedArray : <nl> case AllocStructArray : <nl> case AllocStructDArray : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case CheckDArray : <nl> case CheckMixedArrayKeys : <nl> case CheckSmashableClass : <nl> - case HintLocInner : <nl> - case HintStkInner : <nl> - case HintMBaseInner : <nl> case CheckLoc : <nl> case CheckStk : <nl> case CheckMBase : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case LdLocPseudoMain : <nl> case LdVectorBase : <nl> case LdPairBase : <nl> - case CheckRefInner : <nl> case DefCls : <nl> case LdClsCtor : <nl> case LdCls : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case StLoc : <nl> case StLocPseudoMain : <nl> case StLocRange : <nl> - case StRef : <nl> case EagerSyncVMRegs : <nl> case ReqBindJmp : <nl> case ReqRetranslate : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case MapGet : <nl> case CGetElem : <nl> case ArraySet : <nl> - case ArraySetRef : <nl> case VecSet : <nl> - case VecSetRef : <nl> case DictSet : <nl> - case DictSetRef : <nl> case MapSet : <nl> case VectorSet : <nl> case SetElem : <nl> bool canDCE ( IRInstruction * inst ) { <nl> case MemoSetLSBCache : <nl> case MemoSetInstanceValue : <nl> case MemoSetInstanceCache : <nl> - case BoxPtr : <nl> case ThrowAsTypeStructException : <nl> case RecordReifiedGenericsAndGetTSList : <nl> case ResolveTypeStruct : <nl> bool findWeakActRecUses ( const BlockList & blocks , <nl> case CheckLoc : <nl> case AssertLoc : <nl> case LdLocAddr : <nl> - case HintLocInner : <nl> incWeak ( inst , inst - > src ( 0 ) ) ; <nl> break ; <nl> <nl> void performActRecFixups ( const BlockList & blocks , <nl> case LdLocAddr : <nl> case AssertLoc : <nl> case CheckLoc : <nl> - case HintLocInner : <nl> if ( state [ inst . src ( 0 ) - > inst ( ) ] . isDead ( ) ) { <nl> convertToStackInst ( unit , inst ) ; <nl> needsReflow = true ; <nl> IRInstruction * resolveFpDefLabel ( const SSATmp * fp ) { <nl> } <nl> <nl> void convertToStackInst ( IRUnit & unit , IRInstruction & inst ) { <nl> - assertx ( inst . is ( CheckLoc , AssertLoc , LdLoc , StLoc , LdLocAddr , HintLocInner , <nl> + assertx ( inst . is ( CheckLoc , AssertLoc , LdLoc , StLoc , LdLocAddr , <nl> MemoGetStaticCache , MemoSetStaticCache , <nl> MemoGetLSBCache , MemoSetLSBCache , <nl> MemoGetInstanceCache , MemoSetInstanceCache ) ) ; <nl> void convertToStackInst ( IRUnit & unit , IRInstruction & inst ) { <nl> inst . setNext ( next ) ; <nl> return ; <nl> } <nl> - case HintLocInner : <nl> - unit . replace ( <nl> - & inst , <nl> - HintStkInner , <nl> - IRSPRelOffsetData { locToStkOff ( * inst . extra < LocalId > ( ) , inst . src ( 0 ) ) } , <nl> - inst . typeParam ( ) , <nl> - mainSP <nl> - ) ; <nl> - return ; <nl> case MemoGetStaticCache : <nl> case MemoSetStaticCache : <nl> case MemoGetLSBCache : <nl> mmm a / hphp / runtime / vm / jit / dce . h <nl> ppp b / hphp / runtime / vm / jit / dce . h <nl> void fullDCE ( IRUnit & ) ; <nl> * eliding DefInlineFP <nl> * <nl> * Precondition : inst is LdLoc , StLoc , LdLocAddr , CheckLoc , AssertLoc , or <nl> - * HintLocInner <nl> * Precondition : inst - > src ( 0 ) - > inst ( ) is DefInlineFP <nl> * / <nl> void convertToStackInst ( IRUnit & unit , IRInstruction & inst ) ; <nl> mmm a / hphp / runtime / vm / jit / extra - data . h <nl> ppp b / hphp / runtime / vm / jit / extra - data . h <nl> X ( ProfileSwitchDest , ProfileSwitchData ) ; <nl> X ( JmpSwitchDest , JmpSwitchData ) ; <nl> X ( LdSSwitchDestFast , LdSSwitchData ) ; <nl> X ( LdSSwitchDestSlow , LdSSwitchData ) ; <nl> - X ( HintLocInner , LocalId ) ; <nl> X ( CheckLoc , LocalId ) ; <nl> X ( AssertLoc , LocalId ) ; <nl> X ( LdLocAddr , LocalId ) ; <nl> X ( InstanceOfIfaceVtable , InstanceOfIfaceVtableData ) ; <nl> X ( ResolveTypeStruct , ResolveTypeStructData ) ; <nl> X ( ExtendsClass , ExtendsClassData ) ; <nl> X ( CheckStk , IRSPRelOffsetData ) ; <nl> - X ( HintStkInner , IRSPRelOffsetData ) ; <nl> X ( StStk , IRSPRelOffsetData ) ; <nl> X ( StOutValue , IndexData ) ; <nl> X ( LdOutAddr , IndexData ) ; <nl> mmm a / hphp / runtime / vm / jit / frame - state . cpp <nl> ppp b / hphp / runtime / vm / jit / frame - state . cpp <nl> bool check_invariants ( const FrameState & state ) { <nl> local . value - > toString ( ) <nl> ) ; <nl> always_assert_flog ( <nl> - local . type = = TGen , <nl> + local . type = = TCell , <nl> " We should never be tracking non - predicted types for locals in " <nl> " a pseudomain right now . Local { } had type { } " , <nl> id , <nl> void FrameStateMgr : : update ( const IRInstruction * inst ) { <nl> auto const extra = inst - > extra < ContEnter > ( ) ; <nl> setValue ( stk ( extra - > spOffset ) , nullptr ) ; <nl> trackCall ( ) ; <nl> - setType ( stk ( extra - > spOffset ) , TGen ) ; <nl> + setType ( stk ( extra - > spOffset ) , TCell ) ; <nl> / / ContEnter pops a cell . <nl> assertx ( cur ( ) . bcSPOff = = inst - > marker ( ) . spOff ( ) ) ; <nl> cur ( ) . bcSPOff - - ; <nl> void FrameStateMgr : : update ( const IRInstruction * inst ) { <nl> case StMem : <nl> / / If we ever start using StMem to store to pointers that might be <nl> / / stack / locals , we have to update tracked state here . <nl> - always_assert ( ! inst - > src ( 0 ) - > type ( ) . maybe ( TPtrToFrameGen ) ) ; <nl> - always_assert ( ! inst - > src ( 0 ) - > type ( ) . maybe ( TPtrToStkGen ) ) ; <nl> + always_assert ( ! inst - > src ( 0 ) - > type ( ) . maybe ( TPtrToFrameCell ) ) ; <nl> + always_assert ( ! inst - > src ( 0 ) - > type ( ) . maybe ( TPtrToStkCell ) ) ; <nl> break ; <nl> <nl> case LdStk : <nl> void FrameStateMgr : : update ( const IRInstruction * inst ) { <nl> TypeSource : : makeGuard ( inst ) ) ; <nl> break ; <nl> <nl> - case HintStkInner : <nl> - setBoxedPrediction ( stk ( inst - > extra < HintStkInner > ( ) - > offset ) , <nl> - inst - > typeParam ( ) ) ; <nl> - break ; <nl> - <nl> - case HintLocInner : <nl> - setBoxedPrediction ( loc ( inst - > extra < HintLocInner > ( ) - > locId ) , <nl> - inst - > typeParam ( ) ) ; <nl> - break ; <nl> - <nl> - case HintMBaseInner : <nl> - setBoxedPrediction ( Location : : MBase { } , inst - > typeParam ( ) ) ; <nl> - break ; <nl> - <nl> case StLoc : <nl> setValue ( loc ( inst - > extra < LocalId > ( ) - > locId ) , inst - > src ( 1 ) ) ; <nl> break ; <nl> void FrameStateMgr : : update ( const IRInstruction * inst ) { <nl> inst - > src ( 1 ) - > type ( ) ) ; <nl> break ; <nl> <nl> - case StRef : <nl> - updateLocalRefPredictions ( inst - > src ( 0 ) , inst - > src ( 1 ) ) ; <nl> - break ; <nl> - <nl> case EndCatch : <nl> / * <nl> * Hitting this means we ' ve messed up with syncing the stack in a catch <nl> void FrameStateMgr : : update ( const IRInstruction * inst ) { <nl> auto const end = it + extra . nChangedLocals ; <nl> for ( ; it ! = end ; + + it ) { <nl> auto & local = * it ; <nl> - / / If changing the inner type of a boxed local , also drop the <nl> - / / information about inner types for any other boxed locals . <nl> - if ( local . type < = TBoxedCell ) dropLocalRefsInnerTypes ( ) ; <nl> setType ( loc ( local . id ) , local . type ) ; <nl> } <nl> } <nl> void FrameStateMgr : : updateMInstr ( const IRInstruction * inst ) { <nl> auto const isPM = cur ( ) . curFunc - > isPseudoMain ( ) ; <nl> <nl> auto const baseTmp = inst - > src ( minstrBaseIdx ( inst - > op ( ) ) ) ; <nl> - if ( ! baseTmp - > type ( ) . maybe ( TLvalToGen ) ) return ; <nl> + if ( ! baseTmp - > type ( ) . maybe ( TLvalToCell ) ) return ; <nl> <nl> auto const base = pointee ( baseTmp ) ; <nl> auto const mbase = cur ( ) . mbr . pointee ; <nl> void FrameStateMgr : : updateMInstr ( const IRInstruction * inst ) { <nl> / / Drop the value and don ' t bother with precise effects . <nl> return setType ( l , oldType ) ; <nl> } <nl> - if ( oldType < = TBoxedCell ) return ; <nl> <nl> if ( auto const baseType = effect_on ( oldType ) ) { <nl> widenType ( l , oldType | * baseType ) ; <nl> void FrameStateMgr : : updateMInstr ( const IRInstruction * inst ) { <nl> / / ` l ' ( with corresponding Ptr type ` kind ' ) , we apply the effect of ` inst ' <nl> / / only to ` l ' . Returns the base value type if ` inst ' had an effect . <nl> auto const apply_one = [ & ] ( Location l , Ptr kind ) - > folly : : Optional < Type > { <nl> - auto const oldTy = typeOf ( l ) & TGen ; / / exclude TCls from ptr ( ) <nl> + auto const oldTy = typeOf ( l ) & TCell ; / / exclude TCls from ptr ( ) <nl> if ( auto const ptrTy = effect_on ( oldTy . lval ( kind ) ) ) { <nl> auto const baseTy = ptrTy - > derefIfPtr ( ) ; <nl> - setType ( l , baseTy < = TBoxedCell ? TBoxedInitCell : baseTy ) ; <nl> + setType ( l , baseTy ) ; <nl> return baseTy ; <nl> } <nl> return folly : : none ; <nl> } ; <nl> <nl> - auto const apply_one_with_inner = [ & ] ( Location l , Ptr kind ) { <nl> - if ( auto const ty = apply_one ( l , kind ) ) { <nl> - if ( * ty < = TBoxedCell ) setBoxedPrediction ( l , * ty ) ; <nl> - } <nl> - } ; <nl> - <nl> if ( auto const bframe = base . frame ( ) ) { <nl> if ( ! isPM ) { <nl> auto const l = loc ( bframe - > ids . singleValue ( ) ) ; <nl> - apply_one_with_inner ( l , Ptr : : Frame ) ; <nl> + apply_one ( l , Ptr : : Frame ) ; <nl> } <nl> } <nl> if ( auto const bstack = base . stack ( ) ) { <nl> void FrameStateMgr : : updateMInstr ( const IRInstruction * inst ) { <nl> if ( base . maybe ( mbase ) ) { <nl> if ( mbase . isSingleLocation ( ) ) { <nl> auto const ptr = cur ( ) . mbr . ptrType . ptrKind ( ) ; <nl> - apply_one_with_inner ( Location : : MBase { } , ptr ) ; <nl> + apply_one ( Location : : MBase { } , ptr ) ; <nl> } else { <nl> apply ( Location : : MBase { } ) ; <nl> } <nl> void FrameStateMgr : : updateMBase ( const IRInstruction * inst ) { <nl> X ( Prop ) \ <nl> X ( ElemI ) \ <nl> X ( ElemS ) \ <nl> - X ( Ref ) \ <nl> X ( MIState ) <nl> # define X ( Mem ) \ <nl> ( base . maybe ( A # # Mem # # Any ) & & stores . maybe ( A # # Mem # # Any ) ) | | <nl> void FrameStateMgr : : collectPostConds ( Block * block ) { <nl> auto const type = stack ( irSPRel ) . type ; <nl> auto const changed = stack ( irSPRel ) . maybeChanged ; <nl> <nl> - if ( changed | | type < TGen ) { <nl> + if ( changed | | type < TCell ) { <nl> auto const fpRel = bcSPRel . to < FPInvOffset > ( bcSPOff ) ; <nl> <nl> FTRACE ( 1 , " Stack ( { } , { } ) : { } ( { } ) \ n " , bcSPRel . offset , fpRel . offset , <nl> void FrameStateMgr : : collectPostConds ( Block * block ) { <nl> for ( unsigned i = 0 ; i < func ( ) - > numLocals ( ) ; i + + ) { <nl> auto const t = local ( i ) . type ; <nl> auto const changed = local ( i ) . maybeChanged ; <nl> - if ( changed | | t < TGen ) { <nl> + if ( changed | | t < TCell ) { <nl> FTRACE ( 1 , " Local { } : { } ( { } ) \ n " , i , t . toString ( ) , <nl> changed ? " changed " : " refined " ) ; <nl> auto & vec = changed ? pConds . changed : pConds . refined ; <nl> void FrameStateMgr : : collectPostConds ( Block * block ) { <nl> <nl> auto const ty = mbase ( ) . type ; <nl> auto const changed = mbase ( ) . maybeChanged ; <nl> - if ( changed | | ty < TGen ) { <nl> + if ( changed | | ty < TCell ) { <nl> FTRACE ( 1 , " MBase { { } } : { } ( { } ) \ n " , ty , changed ? " changed " : " refined " ) ; <nl> auto & vec = changed ? pConds . changed : pConds . refined ; <nl> vec . push_back ( { Location : : MBase { } , ty } ) ; <nl> void FrameStateMgr : : setValueImpl ( Location l , <nl> * Update the value ( and type ) for ` l ' . <nl> * / <nl> void FrameStateMgr : : setValue ( Location l , SSATmp * value ) { <nl> - / * <nl> - * We update the predicted type for boxed local values in some special cases <nl> - * to something smart . <nl> - * / <nl> - auto const predicted_local = [ & ] ( ) - > folly : : Optional < Type > { <nl> - if ( ! value ) return folly : : none ; <nl> - auto const inst = value - > inst ( ) ; <nl> - <nl> - switch ( inst - > op ( ) ) { <nl> - case LdLoc : <nl> - if ( value - > type ( ) < = TBoxedCell ) { <nl> - auto const fp = inst - > src ( 0 ) ; <nl> - auto const locID = inst - > extra < LdLoc > ( ) - > locId ; <nl> - <nl> - / / Keep the same prediction as the src local . It might have been <nl> - / / loaded in a parent frame , though , so we have to find the <nl> - / / appropriate FrameState . <nl> - for ( auto const & frame : boost : : adaptors : : reverse ( m_stack ) ) { <nl> - if ( fp ! = frame . fpValue ) continue ; <nl> - <nl> - assertx ( locID < frame . locals . size ( ) ) ; <nl> - return frame . locals [ locID ] . predictedType ; <nl> - } <nl> - / / It ' s also possible it was loaded in the frame of a previously <nl> - / / inlined callee that we ' ve already popped . If that ' s the case , <nl> - / / just skip this optimization . <nl> - } <nl> - break ; <nl> - <nl> - case Box : <nl> - return boxType ( inst - > src ( 0 ) - > type ( ) ) ; <nl> - <nl> - default : <nl> - break ; <nl> - } <nl> - return folly : : none ; <nl> - } ; <nl> - <nl> switch ( l . tag ( ) ) { <nl> case LTag : : Local : <nl> - return setValueImpl ( l , localState ( l ) , value , predicted_local ( ) ) ; <nl> + return setValueImpl ( l , localState ( l ) , value ) ; <nl> case LTag : : Stack : <nl> cur ( ) . stackModified = true ; <nl> return setValueImpl ( l , stackState ( l ) , value ) ; <nl> void FrameStateMgr : : refinePredictedType ( Location l , Type type ) { <nl> not_reached ( ) ; <nl> } <nl> <nl> - template < LTag tag > <nl> - static void setBoxedPredictionImpl ( LocationState < tag > & state , Type type ) { <nl> - state . predictedType = refinePrediction ( state . type , type , state . type ) ; <nl> - } <nl> - <nl> - / * <nl> - * Set the predicted type for ` l ' , discarding any previous prediction . <nl> - * / <nl> - void FrameStateMgr : : setBoxedPrediction ( Location l , Type type ) { <nl> - switch ( l . tag ( ) ) { <nl> - case LTag : : Local : return setBoxedPredictionImpl ( localState ( l ) , type ) ; <nl> - case LTag : : Stack : return setBoxedPredictionImpl ( stackState ( l ) , type ) ; <nl> - case LTag : : MBase : return setBoxedPredictionImpl ( cur ( ) . mbase , type ) ; <nl> - } <nl> - not_reached ( ) ; <nl> - } <nl> - <nl> / * <nl> * Refine the value for ` state ' to ` newVal ' if it was set to ` oldVal ' . <nl> * <nl> void FrameStateMgr : : setLocalPredictedType ( uint32_t id , Type type ) { <nl> local . predictedType = updatePrediction ( type , local . type ) ; <nl> } <nl> <nl> - / * <nl> - * This is called when we store into a BoxedCell . Any locals that we know <nl> - * point to that cell can have their inner type predictions updated . <nl> - * / <nl> - void FrameStateMgr : : updateLocalRefPredictions ( SSATmp * boxedCell , SSATmp * val ) { <nl> - assertx ( boxedCell - > type ( ) < = TBoxedCell ) ; <nl> - for ( auto id = uint32_t { 0 } ; id < cur ( ) . locals . size ( ) ; + + id ) { <nl> - if ( canonical ( cur ( ) . locals [ id ] . value ) = = canonical ( boxedCell ) ) { <nl> - setBoxedPrediction ( loc ( id ) , boxType ( val - > type ( ) ) ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / * <nl> - * This function changes any boxed local into a BoxedInitCell type . It ' s safe <nl> - * to assume they ' re init because you can never have a reference to uninit . <nl> - * / <nl> - void FrameStateMgr : : dropLocalRefsInnerTypes ( ) { <nl> - for ( auto & frame : m_stack ) { <nl> - for ( auto & local : frame . locals ) { <nl> - if ( local . type < = TBoxedCell ) { <nl> - local . type = TBoxedInitCell ; <nl> - local . predictedType = TBoxedInitCell ; <nl> - local . maybeChanged = true ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> void FrameStateMgr : : clearLocals ( ) { <nl> ITRACE ( 2 , " clearLocals \ n " ) ; <nl> for ( auto i = uint32_t { 0 } ; i < cur ( ) . locals . size ( ) ; + + i ) { <nl> mmm a / hphp / runtime / vm / jit / frame - state . h <nl> ppp b / hphp / runtime / vm / jit / frame - state . h <nl> struct LocationState { <nl> false , <nl> " invalid LTag for LocationState " ) ; <nl> <nl> - static constexpr Type default_type ( ) { return TGen ; } <nl> + static constexpr Type default_type ( ) { return TCell ; } <nl> <nl> template < LTag other > <nl> LocationState < tag > & operator = ( const LocationState < other > & o ) { <nl> using MBaseState = LocationState < LTag : : MBase > ; <nl> struct MBRState { <nl> SSATmp * ptr { nullptr } ; <nl> AliasClass pointee { AEmpty } ; / / defaults to " invalid " , not " Top " <nl> - Type ptrType { TLvalToGen } ; <nl> + Type ptrType { TLvalToCell } ; <nl> } ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> struct FrameStateMgr final { <nl> void setType ( Location l , Type type ) ; <nl> void widenType ( Location l , Type type ) ; <nl> void refineType ( Location l , Type type , TypeSource typeSrc ) ; <nl> - void setBoxedPrediction ( Location l , Type type ) ; <nl> void refinePredictedTmpType ( SSATmp * , Type ) ; <nl> <nl> template < LTag tag > <nl> struct FrameStateMgr final { <nl> * Local state update helpers . <nl> * / <nl> void setLocalPredictedType ( uint32_t id , Type type ) ; <nl> - void updateLocalRefPredictions ( SSATmp * , SSATmp * ) ; <nl> void killLocalsForCall ( bool ) ; <nl> - void dropLocalRefsInnerTypes ( ) ; <nl> void clearLocals ( ) ; <nl> <nl> private : <nl> mmm a / hphp / runtime / vm / jit / guard - constraint . cpp <nl> ppp b / hphp / runtime / vm / jit / guard - constraint . cpp <nl> bool typeFitsConstraint ( Type t , GuardConstraint gc ) { <nl> / / enough . <nl> return ! t . maybe ( TCounted ) | | <nl> t . subtypeOfAny ( TStr , TArr , TVec , TDict , TKeyset , TObj , <nl> - TRes , TBoxedCell , TClsMeth , TRecord ) ; <nl> + TRes , TClsMeth , TRecord ) ; <nl> <nl> case DataTypeBoxAndCountnessInit : <nl> return typeFitsConstraint ( t , DataTypeBoxAndCountness ) & & <nl> mmm a / hphp / runtime / vm / jit / inlining - decider . cpp <nl> ppp b / hphp / runtime / vm / jit / inlining - decider . cpp <nl> RegionDescPtr selectCalleeTracelet ( const Func * callee , <nl> <nl> for ( uint32_t i = 0 ; i < argTypes . size ( ) ; + + i ) { <nl> auto type = argTypes [ i ] ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> ctx . liveTypes . push_back ( { Location : : Local { i } , type } ) ; <nl> } <nl> <nl> RegionDescPtr selectCalleeRegion ( const irgen : : IRGS & irgs , <nl> / / DataTypeGeneric is used because we ' re just passing the locals into the <nl> / / callee . It ' s up to the callee to constrain further if needed . <nl> auto type = irgen : : publicTopType ( irgs , BCSPRelOffset { firstArgPos - i } ) ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> <nl> / / If we don ' t have sufficient type information to inline the region return <nl> / / early <nl> if ( type = = TBottom ) return nullptr ; <nl> - if ( ! ( type < = TCell ) & & ! ( type < = TBoxedCell ) ) { <nl> - traceRefusal ( sk , callee , folly : : sformat ( " maybe boxed arg num : { } " , i + 1 ) , <nl> - annotationsPtr ) ; <nl> - return nullptr ; <nl> - } <nl> FTRACE ( 2 , " arg { } : { } \ n " , i + 1 , type ) ; <nl> argTypes . push_back ( type ) ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / insert - asserts . cpp <nl> ppp b / hphp / runtime / vm / jit / insert - asserts . cpp <nl> void insertAfter ( IRInstruction * definer , IRInstruction * inst ) { <nl> void insertRefCountAsserts ( IRUnit & unit , IRInstruction & inst ) { <nl> for ( auto dst : inst . dsts ( ) ) { <nl> auto const t = dst - > type ( ) ; <nl> - if ( t < = TGen & & t . maybe ( TCounted ) ) { <nl> + if ( t < = TCell & & t . maybe ( TCounted ) ) { <nl> insertAfter ( & inst , <nl> unit . gen ( DbgAssertRefCount , inst . bcctx ( ) , ASSERT_REASON , dst ) <nl> ) ; <nl> mmm a / hphp / runtime / vm / jit / ir - builder . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - builder . cpp <nl> void IRBuilder : : appendInstruction ( IRInstruction * inst ) { <nl> m_constraints . prevTypes [ inst ] = m_state . typeOf ( * l ) ; <nl> } <nl> } <nl> - <nl> - / / And a LdRef or CheckRefInner automatically constrains the value to be a <nl> - / / boxed cell , specifically . <nl> - if ( inst - > is ( LdRef , CheckRefInner ) ) { <nl> - constrainValue ( inst - > src ( 0 ) , DataTypeSpecific ) ; <nl> - } <nl> } <nl> <nl> auto where = m_curBlock - > end ( ) ; <nl> SSATmp * IRBuilder : : preOptimizeCheckMBase ( IRInstruction * inst ) { <nl> return preOptimizeCheckLocation ( inst , Location : : MBase { } ) ; <nl> } <nl> <nl> - SSATmp * IRBuilder : : preOptimizeHintInner ( IRInstruction * inst , Location l ) { <nl> - if ( ! ( typeOf ( l , DataTypeGeneric ) < = TBoxedCell ) | | <nl> - predictedInnerType ( l ) . box ( ) < = inst - > typeParam ( ) ) { <nl> - inst - > convertToNop ( ) ; <nl> - } <nl> - return nullptr ; <nl> - } <nl> - <nl> - SSATmp * IRBuilder : : preOptimizeHintLocInner ( IRInstruction * inst ) { <nl> - return preOptimizeHintInner ( inst , loc ( inst - > extra < HintLocInner > ( ) - > locId ) ) ; <nl> - } <nl> - <nl> - SSATmp * IRBuilder : : preOptimizeHintMBaseInner ( IRInstruction * inst ) { <nl> - return preOptimizeHintInner ( inst , Location : : MBase { } ) ; <nl> - } <nl> - <nl> SSATmp * IRBuilder : : preOptimizeAssertTypeOp ( IRInstruction * inst , <nl> const Type oldType , <nl> SSATmp * oldVal , <nl> SSATmp * IRBuilder : : preOptimizeAssertTypeOp ( IRInstruction * inst , <nl> <nl> / / Eliminate this AssertTypeOp if : <nl> / / 1 ) oldType is at least as good as newType and : <nl> - / / a ) typeParam = = Gen <nl> + / / a ) typeParam = = Cell <nl> / / b ) oldVal is from a DefConst <nl> / / c ) oldType . hasConstVal ( ) <nl> / / The AssertType will never be useful for guard constraining in these <nl> SSATmp * IRBuilder : : preOptimizeAssertTypeOp ( IRInstruction * inst , <nl> / / 2 ) The source instruction is known to be another assert that ' s at least <nl> / / as good as this one . <nl> if ( ( oldType < = newType & & <nl> - ( inst - > typeParam ( ) = = TGen | | <nl> + ( inst - > typeParam ( ) = = TCell | | <nl> ( oldVal & & oldVal - > inst ( ) - > is ( DefConst ) ) | | <nl> oldType . hasConstVal ( ) ) ) | | <nl> ( srcInst & & <nl> SSATmp * IRBuilder : : preOptimizeLdFrameCls ( IRInstruction * inst ) { <nl> SSATmp * IRBuilder : : preOptimize ( IRInstruction * inst ) { <nl> # define X ( op ) case op : return preOptimize # # op ( inst ) ; <nl> switch ( inst - > op ( ) ) { <nl> - X ( HintLocInner ) <nl> - X ( HintMBaseInner ) <nl> X ( AssertType ) <nl> X ( AssertLoc ) <nl> X ( AssertStk ) <nl> Type IRBuilder : : typeOf ( Location l , GuardConstraint gc ) { <nl> return m_state . typeOf ( l ) ; <nl> } <nl> <nl> - Type IRBuilder : : predictedInnerType ( Location l ) const { <nl> - auto const ty = m_state . predictedTypeOf ( l ) ; <nl> - assertx ( ty < = TBoxedCell ) ; <nl> - return ldRefReturn ( ty . unbox ( ) ) ; <nl> - } <nl> - <nl> - Type IRBuilder : : predictedLocalInnerType ( uint32_t id ) const { <nl> - return predictedInnerType ( loc ( id ) ) ; <nl> - } <nl> - <nl> - Type IRBuilder : : predictedStackInnerType ( IRSPRelOffset offset ) const { <nl> - return predictedInnerType ( stk ( offset ) ) ; <nl> - } <nl> - <nl> - Type IRBuilder : : predictedMBaseInnerType ( ) const { <nl> - auto const ty = m_state . mbase ( ) . predictedType ; <nl> - assertx ( ty < = TBoxedCell ) ; <nl> - return ldRefReturn ( ty . unbox ( ) ) ; <nl> - } <nl> - <nl> / * <nl> * Wrap a local or stack ID into a Location . <nl> * / <nl> mmm a / hphp / runtime / vm / jit / ir - builder . h <nl> ppp b / hphp / runtime / vm / jit / ir - builder . h <nl> struct IRBuilder { <nl> SSATmp * valueOf ( Location l , GuardConstraint gc ) ; <nl> Type typeOf ( Location l , GuardConstraint gc ) ; <nl> <nl> - / * <nl> - * Helper for unboxing predicted types . <nl> - * <nl> - * @ returns : ldRefReturn ( fs ( ) . predictedTypeOf ( location ) . unbox ( ) ) <nl> - * / <nl> - Type predictedInnerType ( Location l ) const ; <nl> - Type predictedLocalInnerType ( uint32_t id ) const ; <nl> - Type predictedStackInnerType ( IRSPRelOffset ) const ; <nl> - Type predictedMBaseInnerType ( ) const ; <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / * <nl> * Guard relaxation . <nl> struct IRBuilder { <nl> SSATmp * preOptimizeCheckLoc ( IRInstruction * ) ; <nl> SSATmp * preOptimizeCheckStk ( IRInstruction * ) ; <nl> SSATmp * preOptimizeCheckMBase ( IRInstruction * ) ; <nl> - SSATmp * preOptimizeHintInner ( IRInstruction * , Location ) ; <nl> - SSATmp * preOptimizeHintLocInner ( IRInstruction * ) ; <nl> - SSATmp * preOptimizeHintMBaseInner ( IRInstruction * ) ; <nl> SSATmp * preOptimizeAssertTypeOp ( IRInstruction * inst , <nl> Type oldType , <nl> SSATmp * oldVal , <nl> mmm a / hphp / runtime / vm / jit / ir - instruction . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - instruction . cpp <nl> bool consumesRefImpl ( const IRInstruction * inst , int srcNo ) { <nl> return srcNo = = 1 ; <nl> <nl> case ArraySet : <nl> - case ArraySetRef : <nl> case VecSet : <nl> - case VecSetRef : <nl> case DictSet : <nl> - case DictSetRef : <nl> case AddNewElem : <nl> case AddNewElemKeyset : <nl> case AddNewElemVec : <nl> Type thisTypeFromFunc ( const Func * func ) { <nl> <nl> namespace { <nl> <nl> - Type unboxPtr ( Type t ) { <nl> - assertx ( t < = TPtrToGen | | t < = TLvalToGen ) ; <nl> - auto const mcell = t & TMemToCell ; <nl> - auto const mref = t & TMemToBoxedInitCell ; <nl> - return mref . deref ( ) . inner ( ) . mem ( t . memKind ( ) , Ptr : : Ref ) | mcell ; <nl> - } <nl> - <nl> - Type boxPtr ( Type t ) { <nl> - assertx ( t < = TPtrToGen | | t < = TLvalToGen ) ; <nl> - auto const rawBoxed = t . deref ( ) . unbox ( ) . box ( ) ; <nl> - auto const noNull = rawBoxed - TBoxedUninit ; <nl> - return noNull . mem ( t . memKind ( ) , t . ptrKind ( ) - Ptr : : Ref ) ; <nl> - } <nl> - <nl> Type allocObjReturn ( const IRInstruction * inst ) { <nl> switch ( inst - > op ( ) ) { <nl> case ConstructClosure : <nl> Type keysetElemReturn ( const IRInstruction * inst ) { <nl> <nl> Type setElemReturn ( const IRInstruction * inst ) { <nl> assertx ( inst - > op ( ) = = SetElem ) ; <nl> - auto baseType = inst - > src ( minstrBaseIdx ( inst - > op ( ) ) ) - > type ( ) . strip ( ) ; <nl> + auto baseType = inst - > src ( minstrBaseIdx ( inst - > op ( ) ) ) - > type ( ) . derefIfPtr ( ) ; <nl> <nl> / / If the base is a Str , the result will always be a StaticStr ( or <nl> / / an exception ) . If the base might be a str , the result wil be <nl> Type memoKeyReturn ( const IRInstruction * inst ) { <nl> <nl> Type ptrToLvalReturn ( const IRInstruction * inst ) { <nl> auto const ptr = inst - > src ( 0 ) - > type ( ) ; <nl> - assertx ( ptr < = TPtrToGen ) ; <nl> + assertx ( ptr < = TPtrToCell ) ; <nl> return ptr . deref ( ) . mem ( Mem : : Lval , ptr . ptrKind ( ) ) ; <nl> } <nl> <nl> Type ptrToLvalReturn ( const IRInstruction * inst ) { <nl> Type ptrIterReturn ( const IRInstruction * inst ) { <nl> if ( inst - > is ( AdvanceMixedPtrIter , AdvancePackedPtrIter ) ) { <nl> auto const ptr = inst - > src ( 0 ) - > type ( ) ; <nl> - assertx ( ptr < = TPtrToElemGen ) ; <nl> + assertx ( ptr < = TPtrToElemCell ) ; <nl> return ptr ; <nl> } <nl> assertx ( inst - > is ( GetMixedPtrIter , GetPackedPtrIter ) ) ; <nl> Type ptrIterReturn ( const IRInstruction * inst ) { <nl> if ( arr < = TArr ) return arrElemType ( arr , TInt | TStr , inst - > ctx ( ) ) . first ; <nl> if ( arr < = TVec ) return vecElemType ( arr , TInt , inst - > ctx ( ) ) . first ; <nl> if ( arr < = TDict ) return dictElemType ( arr , TInt | TStr ) . first ; <nl> - return TGen ; <nl> + return TCell ; <nl> } ( ) ; <nl> return value . ptr ( Ptr : : Elem ) ; <nl> } <nl> <nl> Type ptrIterValReturn ( const IRInstruction * inst ) { <nl> auto const ptr = inst - > src ( 0 ) - > type ( ) ; <nl> - assertx ( ptr < = TPtrToElemGen ) ; <nl> + assertx ( ptr < = TPtrToElemCell ) ; <nl> return ptr . deref ( ) ; <nl> } <nl> <nl> Type outputType ( const IRInstruction * inst , int / * dstId * / ) { <nl> } \ <nl> return TCls ; \ <nl> } <nl> - # define DUnboxPtr return unboxPtr ( inst - > src ( 0 ) - > type ( ) ) ; <nl> - # define DBoxPtr return boxPtr ( inst - > src ( 0 ) - > type ( ) ) ; <nl> # define DAllocObj return allocObjReturn ( inst ) ; <nl> # define DArrElem return arrElemReturn ( inst ) ; <nl> # define DVecElem return vecElemReturn ( inst ) ; <nl> Type outputType ( const IRInstruction * inst , int / * dstId * / ) { <nl> # undef DParamMayRelax <nl> # undef DParam <nl> # undef DLdObjCls <nl> - # undef DUnboxPtr <nl> - # undef DBoxPtr <nl> # undef DAllocObj <nl> # undef DArrElem <nl> # undef DVecElem <nl> mmm a / hphp / runtime / vm / jit / ir - opcode . cpp <nl> ppp b / hphp / runtime / vm / jit / ir - opcode . cpp <nl> TRACE_SET_MOD ( hhir ) ; <nl> # define DParamMayRelax ( t ) HasDest <nl> # define DParam ( t ) HasDest <nl> # define DLdObjCls HasDest <nl> - # define DUnboxPtr HasDest <nl> - # define DBoxPtr HasDest <nl> # define DAllocObj HasDest <nl> # define DArrElem HasDest <nl> # define DVecElem HasDest <nl> OpInfo g_opInfo [ ] = { <nl> # undef DParamMayRelax <nl> # undef DParam <nl> # undef DLdObjCls <nl> - # undef DUnboxPtr <nl> - # undef DBoxPtr <nl> # undef DArrElem <nl> # undef DVecElem <nl> # undef DDictElem <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case ArrayAdd : <nl> case ArrayGet : <nl> case ArraySet : <nl> - case ArraySetRef : <nl> case BaseG : <nl> case Call : <nl> case CallBuiltin : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case DictAddElemStrKey : <nl> case DictGet : <nl> case DictSet : <nl> - case DictSetRef : <nl> case ElemArrayD : <nl> case ElemArrayU : <nl> case ElemArrayX : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case UnsetElem : <nl> case UnsetProp : <nl> case VecSet : <nl> - case VecSetRef : <nl> case VectorSet : <nl> case VerifyParamCallable : <nl> case VerifyParamCls : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case AsyncSwitchFast : <nl> case BeginCatch : <nl> case BeginInlining : <nl> - case Box : <nl> - case BoxPtr : <nl> case Ceil : <nl> case CheckArrayCOW : <nl> case CheckCold : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case CheckPackedArrayDataBounds : <nl> case CheckRange : <nl> case CheckRDSInitialized : <nl> - case CheckRefInner : <nl> case CheckInOuts : <nl> case CheckSmashableClass : <nl> case CheckStk : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case GtStr : <nl> case GtStrInt : <nl> case HasToString : <nl> - case HintLocInner : <nl> - case HintMBaseInner : <nl> - case HintStkInner : <nl> case IncProfCounter : <nl> case IncRef : <nl> case IncStat : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case LdPairBase : <nl> case LdPropAddr : <nl> case LdRDSAddr : <nl> - case LdRef : <nl> case LdRetVal : <nl> case LdSSwitchDestFast : <nl> case LdSmashable : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case StMem : <nl> case StMIPropState : <nl> case StOutValue : <nl> - case StRef : <nl> case StrictlyIntegerConv : <nl> case StringIsset : <nl> case StStk : <nl> bool opcodeMayRaise ( Opcode opc ) { <nl> case SubInt : <nl> case SubIntO : <nl> case SyncReturnBC : <nl> - case UnboxPtr : <nl> case Unreachable : <nl> case UnwindCheckSideExit : <nl> case VecFirst : <nl> mmm a / hphp / runtime / vm / jit / ir - opcode . h <nl> ppp b / hphp / runtime / vm / jit / ir - opcode . h <nl> struct SSATmp ; <nl> * DParam ( t ) single dst has type of the instruction ' s type parameter , <nl> * which must be a subtype of t <nl> * DParamMayRelax ( t ) like DParam , except type may relax <nl> - * DUnboxPtr Unboxed PtrTo * T ; adds possibility of pointing into a ref <nl> - * DBoxPtr Boxed PtrTo * T <nl> * DAllocObj single dst has a type of a newly allocated object ; may be a <nl> * specialized object type if the class is known <nl> * DArrPacked single dst has a packed array type <nl> mmm a / hphp / runtime / vm / jit / irgen - arith . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - arith . cpp <nl> void emitSetOpL ( IRGS & env , int32_t id , SetOpOp subop ) { <nl> return ; <nl> } <nl> <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> - auto loc = ldLocInner ( env , id , ldrefExit , ldPMExit , DataTypeGeneric ) ; <nl> + auto loc = ldLoc ( env , id , ldPMExit , DataTypeGeneric ) ; <nl> <nl> if ( * subOpc = = Op : : Concat ) { <nl> / * <nl> void emitSetOpL ( IRGS & env , int32_t id , SetOpOp subop ) { <nl> auto const val = popC ( env ) ; <nl> env . irb - > constrainValue ( loc , DataTypeSpecific ) ; <nl> implConcat ( env , val , loc , [ & ] ( SSATmp * result ) { <nl> - / * <nl> - * Null exit block for ' ldrefExit ' because we won ' t actually need to <nl> - * reload the inner cell since we are doing a stLocNRC . ( Note that the <nl> - * inner cell may have changed type if we re - entered during Concat . ) <nl> - * <nl> - * We can ' t put a non - null block here either , because it may need to <nl> - * side - exit and we ' ve already made observable progress executing this <nl> - * instruction . If we ever change ConcatStrFoo not to decref its sources <nl> - * we ' ll need to address this ( or punt on a boxed source ) . <nl> - * / <nl> - pushIncRef ( env , stLocNRC ( env , id , nullptr , ldPMExit , result ) ) ; <nl> + pushIncRef ( env , stLocNRC ( env , id , ldPMExit , result ) ) ; <nl> } ) ; <nl> return ; <nl> } <nl> void emitSetOpL ( IRGS & env , int32_t id , SetOpOp subop ) { <nl> auto const result = opc = = AddIntO | | opc = = SubIntO | | opc = = MulIntO <nl> ? gen ( env , opc , exitSlow , loc , val ) <nl> : gen ( env , opc , loc , val ) ; <nl> - pushStLoc ( env , id , ldrefExit , ldPMExit , result ) ; <nl> + pushStLoc ( env , id , ldPMExit , result ) ; <nl> } <nl> <nl> void emitIncDecL ( IRGS & env , int32_t id , IncDecOp subop ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const src = ldLocInnerWarn ( <nl> + auto const src = ldLocWarn ( <nl> env , <nl> id , <nl> - ldrefExit , <nl> ldPMExit , <nl> DataTypeSpecific <nl> ) ; <nl> void emitIncDecL ( IRGS & env , int32_t id , IncDecOp subop ) { <nl> pushIncRef ( env , isPre ( subop ) ? result : src ) ; <nl> / / Update marker to ensure newly - pushed value isn ' t clobbered by DecRef . <nl> updateMarker ( env ) ; <nl> - stLoc ( env , id , ldrefExit , ldPMExit , result ) ; <nl> + stLoc ( env , id , ldPMExit , result ) ; <nl> return ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / irgen - basic . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - basic . cpp <nl> void emitClassGetTS ( IRGS & env ) { <nl> } <nl> <nl> void emitCGetL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const loc = ldLocInnerWarn ( <nl> + auto const loc = ldLocWarn ( <nl> env , <nl> id , <nl> - ldrefExit , <nl> ldPMExit , <nl> DataTypeBoxAndCountnessInit <nl> ) ; <nl> void emitCGetL ( IRGS & env , int32_t id ) { <nl> } <nl> <nl> void emitCGetQuietL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> pushIncRef ( <nl> env , <nl> [ & ] { <nl> - auto const loc = ldLocInner ( <nl> + auto const loc = ldLoc ( <nl> env , <nl> id , <nl> - ldrefExit , <nl> ldPMExit , <nl> DataTypeBoxAndCountnessInit <nl> ) ; <nl> void emitCGetQuietL ( IRGS & env , int32_t id ) { <nl> } <nl> <nl> void emitCUGetL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - pushIncRef ( env , ldLocInner ( env , id , ldrefExit , ldPMExit , DataTypeGeneric ) ) ; <nl> + pushIncRef ( env , ldLoc ( env , id , ldPMExit , DataTypeGeneric ) ) ; <nl> } <nl> <nl> void emitPushL ( IRGS & env , int32_t id ) { <nl> void emitPushL ( IRGS & env , int32_t id ) { <nl> } <nl> <nl> void emitCGetL2 ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> auto const oldTop = pop ( env , DataTypeGeneric ) ; <nl> - auto const val = ldLocInnerWarn ( <nl> + auto const val = ldLocWarn ( <nl> env , <nl> id , <nl> - ldrefExit , <nl> ldPMExit , <nl> DataTypeBoxAndCountnessInit <nl> ) ; <nl> void emitUnsetL ( IRGS & env , int32_t id ) { <nl> } <nl> <nl> void emitSetL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> <nl> / / since we ' re just storing the value in a local , this function doesn ' t care <nl> / / about the type of the value . stLoc needs to IncRef the value so it may <nl> / / constrain it further . <nl> auto const src = popC ( env , DataTypeGeneric ) ; <nl> - pushStLoc ( env , id , ldrefExit , ldPMExit , src ) ; <nl> + pushStLoc ( env , id , ldPMExit , src ) ; <nl> } <nl> <nl> void emitInitThisLoc ( IRGS & env , int32_t id ) { <nl> void emitPopU2 ( IRGS & env ) { <nl> } <nl> <nl> void emitPopL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> auto const src = popC ( env , DataTypeGeneric ) ; <nl> - stLocMove ( env , id , ldrefExit , ldPMExit , src ) ; <nl> + stLocMove ( env , id , ldPMExit , src ) ; <nl> } <nl> <nl> void emitPopFrame ( IRGS & env , uint32_t nout ) { <nl> mmm a / hphp / runtime / vm / jit / irgen - builtin . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - builtin . cpp <nl> struct ParamPrep { <nl> SSATmp * value { nullptr } ; <nl> bool passByAddr { false } ; <nl> bool needsConversion { false } ; <nl> - bool isOutputArg { false } ; <nl> bool isInOut { false } ; <nl> } ; <nl> <nl> SSATmp * opt_shapes_idx ( IRGS & env , const ParamPrep & params ) { <nl> ) ; <nl> <nl> auto const finish = [ & ] ( SSATmp * val ) { <nl> - auto const cell = is_dict ? val : unbox ( env , val , nullptr ) ; <nl> - gen ( env , IncRef , cell ) ; <nl> - return cell ; <nl> + gen ( env , IncRef , val ) ; <nl> + return val ; <nl> } ; <nl> return finish ( profiledType ( env , elm , [ & ] { <nl> auto const cell = finish ( elm ) ; <nl> prepare_params ( IRGS & / * env * / , const Func * callee , SSATmp * ctx , <nl> auto & pi = callee - > params ( ) [ offset ] ; <nl> <nl> cur . value = loadParam ( offset , ty ) ; <nl> - cur . isOutputArg = pi . nativeArg & & ty = = TBoxedCell ; <nl> / / If ty > TBottom , it had some kind of type hint . <nl> - / / A by - reference parameter thats defaulted will get a plain <nl> - / / value ( typically null ) , rather than a BoxedCell ; so we still <nl> - / / need to apply a conversion there . <nl> - cur . needsConversion = cur . isOutputArg | | <nl> - ( offset < numNonDefault & & ty > TBottom ) ; <nl> + cur . needsConversion = ( offset < numNonDefault & & ty > TBottom ) ; <nl> cur . isInOut = callee - > isInOut ( offset ) ; <nl> / / We do actually mean exact type equality here . We ' re only capable of <nl> / / passing the following primitives through registers ; everything else goes <nl> jit : : vector < SSATmp * > realize_params ( IRGS & env , <nl> <nl> seenBottom | = ( param . value - > type ( ) = = TBottom ) ; <nl> <nl> - if ( param . value - > type ( ) < = TPtrToGen ) { <nl> + if ( param . value - > type ( ) < = TPtrToCell ) { <nl> ret [ argIdx + + ] = realize_param ( <nl> env , param , callee , targetTy , <nl> [ & ] ( const Type & ty , Block * fail ) - > SSATmp * { <nl> jit : : vector < SSATmp * > realize_params ( IRGS & env , <nl> if ( needDVCheck ( paramIdx , ty ) ) { <nl> dvCheck ( paramIdx , gen ( env , LdMem , ty , param . value ) ) ; <nl> } <nl> - return param . isOutputArg ? <nl> - gen ( env , LdMem , TBoxedCell , param . value ) : nullptr ; <nl> + return nullptr ; <nl> } , <nl> [ & ] ( const Type & ty ) - > SSATmp * { <nl> hint ( env , Block : : Hint : : Unlikely ) ; <nl> - if ( param . isOutputArg ) { <nl> - return cns ( env , TNullptr ) ; <nl> - } <nl> auto val = gen ( env , LdMem , TCell , param . value ) ; <nl> assertx ( ty . isKnownDataType ( ) ) ; <nl> maybeCoerceValue ( <nl> jit : : vector < SSATmp * > realize_params ( IRGS & env , <nl> return nullptr ; <nl> } , <nl> [ & ] { <nl> - if ( ! param . passByAddr & & ! param . isOutputArg ) { <nl> + if ( ! param . passByAddr ) { <nl> assertx ( targetTy = = TBool | | <nl> targetTy = = TInt | | <nl> targetTy = = TDbl | | <nl> jit : : vector < SSATmp * > realize_params ( IRGS & env , <nl> } , <nl> [ & ] ( const Type & ty ) - > SSATmp * { <nl> hint ( env , Block : : Hint : : Unlikely ) ; <nl> - if ( param . isOutputArg ) return cns ( env , TNullptr ) ; <nl> assert ( ty . isKnownDataType ( ) ) ; <nl> return maybeCoerceValue ( <nl> env , <nl> jit : : vector < SSATmp * > realize_params ( IRGS & env , <nl> * with the new value ) . <nl> * / <nl> auto v = param . value ; <nl> - if ( param . isOutputArg ) { <nl> - param . value = oldVal ; <nl> - } <nl> return v ; <nl> } ) ; <nl> continue ; <nl> Type builtinReturnType ( const Func * builtin ) { <nl> / / type . <nl> assertx ( builtin - > isCPPBuiltin ( ) ) ; <nl> <nl> - / / NB : It is * not * safe to be pessimistic here and return TGen ( or any other <nl> + / / NB : It is * not * safe to be pessimistic here and return TCell ( or any other <nl> / / approximation ) . The builtin ' s return type inferred here is used to control <nl> / / code - gen when lowering the builtin call to vasm and must be no more general <nl> / / than the HNI declaration ( if present ) . <nl> void implArrayIdx ( IRGS & env ) { <nl> ) ; <nl> <nl> auto finish = [ & ] ( SSATmp * tmp ) { <nl> - auto const value = unbox ( env , tmp , nullptr ) ; <nl> - pushIncRef ( env , value ) ; <nl> + pushIncRef ( env , tmp ) ; <nl> decRef ( env , base ) ; <nl> decRef ( env , key ) ; <nl> decRef ( env , def ) ; <nl> void emitGetMemoKeyL ( IRGS & env , int32_t locId ) { <nl> DEBUG_ONLY auto const func = curFunc ( env ) ; <nl> assertx ( func - > isMemoizeWrapper ( ) ) ; <nl> <nl> - auto const value = ldLocInnerWarn ( <nl> + auto const value = ldLocWarn ( <nl> env , <nl> locId , <nl> - makeExit ( env ) , <nl> nullptr , <nl> DataTypeSpecific <nl> ) ; <nl> mmm a / hphp / runtime / vm / jit / irgen - create . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - create . cpp <nl> void initObjProps ( IRGS & env , const Class * cls , SSATmp * obj ) { <nl> env , <nl> LdPropAddr , <nl> ByteOffsetData { ( ptrdiff_t ) ( cls - > declPropOffset ( slot ) ) } , <nl> - TLvalToPropGen , <nl> + TLvalToPropCell , <nl> obj <nl> ) ; <nl> gen ( env , StMem , addr , val ) ; <nl> void emitNewKeysetArray ( IRGS & env , uint32_t numArgs ) { <nl> } <nl> <nl> void emitNewLikeArrayL ( IRGS & env , int32_t id , uint32_t capacity ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const ld = ldLocInner ( env , id , ldrefExit , ldPMExit , DataTypeSpecific ) ; <nl> + auto const ld = ldLoc ( env , id , ldPMExit , DataTypeSpecific ) ; <nl> <nl> SSATmp * arr ; <nl> if ( ld - > isA ( TArr ) ) { <nl> mmm a / hphp / runtime / vm / jit / irgen - guards . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - guards . cpp <nl> namespace { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - / / If its known that the location doesn ' t contain a boxed value , then everything <nl> - / / after the check should be unreachable . Bail out now to avoid asserting on <nl> - / / incompatible types . This can happen if we ' re inlining and one of the <nl> - / / arguments has a type which doesn ' t match what we previously profiled ( the <nl> - / / guard will always fail ) . <nl> - bool haltIfNotBoxed ( IRGS & env , const Location & loc ) { <nl> - auto const knownType = env . irb - > fs ( ) . typeOf ( loc ) ; <nl> - if ( ! knownType . maybe ( TBoxedInitCell ) ) { <nl> - gen ( env , Unreachable , ASSERT_REASON ) ; <nl> - return true ; <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> void checkTypeLocal ( IRGS & env , uint32_t locId , Type type , <nl> Offset dest , bool outerOnly ) { <nl> auto exit = env . irb - > guardFailBlock ( ) ; <nl> if ( exit = = nullptr ) exit = makeExit ( env , dest ) ; <nl> <nl> - if ( type < = TCell ) { <nl> - gen ( env , CheckLoc , type , LocalId ( locId ) , exit , fp ( env ) ) ; <nl> - return ; <nl> - } <nl> - assertx ( type < = TBoxedInitCell ) ; <nl> - <nl> - gen ( env , CheckLoc , TBoxedInitCell , LocalId ( locId ) , exit , fp ( env ) ) ; <nl> - <nl> - if ( haltIfNotBoxed ( env , Location : : Local { locId } ) ) return ; <nl> - <nl> - gen ( env , HintLocInner , type , LocalId { locId } , fp ( env ) ) ; <nl> - <nl> - auto const innerType = env . irb - > predictedLocalInnerType ( locId ) ; <nl> - if ( ! outerOnly & & innerType < TInitCell ) { <nl> - env . irb - > constrainLocal ( locId , DataTypeSpecific , " HintLocInner " ) ; <nl> - auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const val = ldLoc ( env , locId , ldPMExit , DataTypeSpecific ) ; <nl> - gen ( env , CheckRefInner , innerType , exit , val ) ; <nl> - } <nl> + gen ( env , CheckLoc , type , LocalId ( locId ) , exit , fp ( env ) ) ; <nl> } <nl> <nl> void checkTypeStack ( IRGS & env , BCSPRelOffset idx , Type type , <nl> void checkTypeStack ( IRGS & env , BCSPRelOffset idx , Type type , <nl> if ( exit = = nullptr ) exit = makeExit ( env , dest ) ; <nl> <nl> auto const soff = IRSPRelOffsetData { offsetFromIRSP ( env , idx ) } ; <nl> - <nl> - if ( type < = TCell ) { <nl> - gen ( env , CheckStk , type , soff , exit , sp ( env ) ) ; <nl> - return ; <nl> - } <nl> - assertx ( type < = TBoxedInitCell ) ; <nl> - <nl> - gen ( env , CheckStk , TBoxedInitCell , soff , exit , sp ( env ) ) ; <nl> - <nl> - if ( haltIfNotBoxed ( env , Location : : Stack { offsetFromFP ( env , soff . offset ) } ) ) { <nl> - return ; <nl> - } <nl> - <nl> - gen ( env , HintStkInner , type , soff , sp ( env ) ) ; <nl> - <nl> - auto const innerType = env . irb - > predictedStackInnerType ( soff . offset ) ; <nl> - if ( ! outerOnly & & innerType < TInitCell ) { <nl> - env . irb - > constrainStack ( soff . offset , DataTypeSpecific ) ; <nl> - auto const stk = gen ( env , LdStk , TBoxedInitCell , soff , sp ( env ) ) ; <nl> - gen ( env , CheckRefInner , innerType , exit , stk ) ; <nl> - } <nl> + gen ( env , CheckStk , type , soff , exit , sp ( env ) ) ; <nl> } <nl> <nl> void checkTypeMBase ( IRGS & env , Type type , Offset dest , bool outerOnly ) { <nl> auto exit = env . irb - > guardFailBlock ( ) ; <nl> if ( exit = = nullptr ) exit = makeExit ( env , dest ) ; <nl> <nl> - auto const mbr = gen ( env , LdMBase , TLvalToGen ) ; <nl> - <nl> - if ( type < = TCell ) { <nl> - gen ( env , CheckMBase , type , exit , mbr ) ; <nl> - return ; <nl> - } <nl> - assertx ( type < = TBoxedInitCell ) ; <nl> - <nl> - gen ( env , CheckMBase , TBoxedInitCell , exit , mbr ) ; <nl> - <nl> - if ( haltIfNotBoxed ( env , Location : : MBase { } ) ) return ; <nl> - <nl> - gen ( env , HintMBaseInner , type ) ; <nl> - <nl> - auto const innerType = env . irb - > predictedMBaseInnerType ( ) ; <nl> - if ( ! outerOnly & & innerType < TInitCell ) { <nl> - env . irb - > constrainLocation ( Location : : MBase { } , DataTypeSpecific ) ; <nl> - auto const basePtr = gen ( env , LdMBase , TLvalToGen ) ; <nl> - auto const base = gen ( env , LdMem , TBoxedInitCell , basePtr ) ; <nl> - gen ( env , CheckRefInner , innerType , exit , base ) ; <nl> - } <nl> + auto const mbr = gen ( env , LdMBase , TLvalToCell ) ; <nl> + gen ( env , CheckMBase , type , exit , mbr ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> static void assertTypeMBase ( IRGS & env , Type type ) { <nl> } <nl> <nl> void assertTypeLocation ( IRGS & env , const Location & loc , Type type ) { <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> <nl> switch ( loc . tag ( ) ) { <nl> case LTag : : Stack : <nl> void assertTypeLocation ( IRGS & env , const Location & loc , Type type ) { <nl> <nl> void checkType ( IRGS & env , const Location & loc , <nl> Type type , Offset dest , bool outerOnly ) { <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> <nl> switch ( loc . tag ( ) ) { <nl> case LTag : : Stack : <nl> void checkType ( IRGS & env , const Location & loc , <nl> <nl> void predictType ( IRGS & env , const Location & loc , Type type ) { <nl> FTRACE ( 1 , " predictType { } : { } \ n " , show ( loc ) , type ) ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> env . irb - > fs ( ) . refinePredictedType ( loc , type ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / irgen - inlining . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - inlining . cpp <nl> void beginInlining ( IRGS & env , <nl> jit : : vector < SSATmp * > params { numArgs + 1 } ; <nl> <nl> for ( unsigned i = 0 ; i < numArgs ; + + i ) { <nl> - params [ numArgs - i - 1 ] = popF ( env ) ; <nl> + params [ numArgs - i - 1 ] = popC ( env ) ; <nl> } <nl> <nl> / / NB : Now that we ' ve popped the callee ' s arguments off the stack <nl> mmm a / hphp / runtime / vm / jit / irgen - internal . h <nl> ppp b / hphp / runtime / vm / jit / irgen - internal . h <nl> void ifNonNull ( IRGS & env , SSATmp * tmp , Then then ) { <nl> * <nl> * <nl> * B0 : <nl> - * x1 : Int : = CheckType < Int > ( x0 : Gen ) - > B1 <nl> + * x1 : Int : = CheckType < Int > ( x0 : Cell ) - > B1 <nl> * [ here refined = = = x1 ] <nl> * phijmp < something > - > B3 <nl> * <nl> * B1 : <nl> - * x2 : Bool : = CheckType < Bool > ( x0 : Gen ) - > B2 <nl> + * x2 : Bool : = CheckType < Bool > ( x0 : Cell ) - > B2 <nl> * . . . <nl> * phijmp < something > - > B3 <nl> * <nl> inline SSATmp * popC ( IRGS & env , GuardConstraint gc = DataTypeSpecific ) { <nl> } <nl> <nl> inline SSATmp * popCU ( IRGS & env ) { return assertType ( pop ( env ) , TCell ) ; } <nl> - inline SSATmp * popF ( IRGS & env ) { return assertType ( pop ( env ) , TGen ) ; } <nl> inline SSATmp * popU ( IRGS & env ) { return assertType ( pop ( env ) , TUninit ) ; } <nl> <nl> inline void discard ( IRGS & env , uint32_t n = 1 ) { <nl> inline SSATmp * topC ( IRGS & env , BCSPRelOffset i = BCSPRelOffset { 0 } , <nl> return assertType ( top ( env , i , gc ) , TCell ) ; <nl> } <nl> <nl> - inline SSATmp * topF ( IRGS & env , BCSPRelOffset i = BCSPRelOffset { 0 } , <nl> - GuardConstraint gc = DataTypeSpecific ) { <nl> - return assertType ( top ( env , i , gc ) , TGen ) ; <nl> - } <nl> - <nl> - inline SSATmp * topR ( IRGS & env , BCSPRelOffset i = BCSPRelOffset { 0 } ) { <nl> - return assertType ( top ( env , i ) , TGen ) ; <nl> - } <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> inline BCMarker makeMarker ( IRGS & env , Offset bcOff ) { <nl> inline SSATmp * ldCtxCls ( IRGS & env ) { <nl> return gen ( env , LdObjClass , ctx ) ; <nl> } <nl> <nl> - inline SSATmp * unbox ( IRGS & env , SSATmp * val , Block * exit ) { <nl> - auto const type = val - > type ( ) ; <nl> - / / If we don ' t have an exit the LdRef can ' t be a guard . <nl> - auto const inner = exit ? ( type & TBoxedCell ) . inner ( ) : TInitCell ; <nl> - <nl> - if ( type < = TCell ) { <nl> - env . irb - > constrainValue ( val , DataTypeBoxAndCountness ) ; <nl> - return val ; <nl> - } <nl> - if ( type < = TBoxedCell ) { <nl> - gen ( env , CheckRefInner , inner , exit , val ) ; <nl> - return gen ( env , LdRef , inner , val ) ; <nl> - } <nl> - <nl> - return cond ( <nl> - env , <nl> - [ & ] ( Block * taken ) { <nl> - return gen ( env , CheckType , TBoxedCell , taken , val ) ; <nl> - } , <nl> - [ & ] ( SSATmp * box ) { / / Next : val is a ref <nl> - env . irb - > constrainValue ( box , DataTypeBoxAndCountness ) ; <nl> - gen ( env , CheckRefInner , inner , exit , box ) ; <nl> - return gen ( env , LdRef , inner , box ) ; <nl> - } , <nl> - [ & ] { / / Taken : val is unboxed <nl> - return gen ( env , AssertType , TCell , val ) ; <nl> - } <nl> - ) ; <nl> - } <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Other common helpers <nl> <nl> inline SSATmp * ldLoc ( IRGS & env , <nl> assertx ( ! type . isSpecialized ( ) ) ; <nl> assertx ( type = = type . dropConstVal ( ) ) ; <nl> <nl> - / / We don ' t support locals being type Gen , so if we ever get into such a <nl> - / / case , we need to punt . <nl> - if ( type = = TGen ) PUNT ( LdGbl - Gen ) ; <nl> return gen ( env , LdLocPseudoMain , type , exit , LocalId ( locId ) , fp ( env ) ) ; <nl> } <nl> <nl> - return gen ( env , LdLoc , TGen , LocalId ( locId ) , fp ( env ) ) ; <nl> - } <nl> - <nl> - / * <nl> - * Load a local , and if it ' s boxed dereference to get the inner cell . <nl> - * <nl> - * Note : For boxed values , this will generate a LdRef instruction which takes <nl> - * the given exit trace in case the inner type doesn ' t match the tracked <nl> - * type for this local . This check may be optimized away if we can <nl> - * determine that the inner type must match the tracked type . <nl> - * / <nl> - inline SSATmp * ldLocInner ( IRGS & env , <nl> - uint32_t locId , <nl> - Block * ldrefExit , <nl> - Block * ldPMExit , <nl> - GuardConstraint gc ) { <nl> - / / We only care if the local is KindOfRef or not . DataTypeBoxAndCountness <nl> - / / gets us that . <nl> - auto const loc = ldLoc ( env , locId , ldPMExit , DataTypeBoxAndCountness ) ; <nl> - <nl> - if ( loc - > type ( ) < = TCell ) { <nl> - env . irb - > constrainValue ( loc , gc ) ; <nl> - return loc ; <nl> - } <nl> - <nl> - / / Handle the Boxed case manually outside of unbox ( ) so we can use the <nl> - / / local ' s predicted type . <nl> - if ( loc - > type ( ) < = TBoxedCell ) { <nl> - auto const predTy = env . irb - > predictedLocalInnerType ( locId ) ; <nl> - gen ( env , CheckRefInner , predTy , ldrefExit , loc ) ; <nl> - return gen ( env , LdRef , predTy , loc ) ; <nl> - } ; <nl> - <nl> - return unbox ( env , loc , ldrefExit ) ; <nl> + return gen ( env , LdLoc , TCell , LocalId ( locId ) , fp ( env ) ) ; <nl> } <nl> <nl> / * <nl> inline SSATmp * ldLocInner ( IRGS & env , <nl> * caller requires the catch trace to be generated at a point earlier than when <nl> * it calls this function . <nl> * / <nl> - inline SSATmp * ldLocInnerWarn ( IRGS & env , <nl> - uint32_t id , <nl> - Block * ldrefExit , <nl> - Block * ldPMExit , <nl> - GuardConstraint gc ) { <nl> - auto const locVal = ldLocInner ( env , id , ldrefExit , ldPMExit , gc ) ; <nl> + inline SSATmp * ldLocWarn ( IRGS & env , <nl> + uint32_t id , <nl> + Block * ldPMExit , <nl> + GuardConstraint gc ) { <nl> + auto const locVal = ldLoc ( env , id , ldPMExit , gc ) ; <nl> auto const varName = curFunc ( env ) - > localVarName ( id ) ; <nl> <nl> auto warnUninit = [ & ] { <nl> inline SSATmp * stLocRaw ( IRGS & env , uint32_t id , SSATmp * fp , SSATmp * newVal ) { <nl> * increment . If the caller of this function needs to push the stored value on <nl> * stack , it should set ' incRefNew ' so that ' newVal ' will have its ref - count <nl> * incremented . <nl> - * <nl> - * Pre : ! newVal - > type ( ) . maybe ( TBoxedCell ) <nl> - * Pre : exit ! = nullptr if the local may be boxed <nl> * / <nl> inline SSATmp * stLocImpl ( IRGS & env , <nl> uint32_t id , <nl> - Block * ldrefExit , <nl> Block * ldPMExit , <nl> SSATmp * newVal , <nl> bool decRefOld , <nl> bool incRefNew ) { <nl> - assertx ( ! newVal - > type ( ) . maybe ( TBoxedCell ) ) ; <nl> - <nl> auto const cat = decRefOld ? DataTypeBoxAndCountness : DataTypeGeneric ; <nl> auto const oldLoc = ldLoc ( env , id , ldPMExit , cat ) ; <nl> <nl> - auto unboxed_case = [ & ] { <nl> - stLocRaw ( env , id , fp ( env ) , newVal ) ; <nl> - if ( incRefNew ) gen ( env , IncRef , newVal ) ; <nl> - if ( decRefOld ) decRef ( env , oldLoc ) ; <nl> - return newVal ; <nl> - } ; <nl> - <nl> - auto boxed_case = [ & ] ( SSATmp * box ) { <nl> - / / It ' s important that the IncRef happens after the guard on the inner type <nl> - / / of the ref , since it may side - exit . <nl> - auto const predTy = env . irb - > predictedLocalInnerType ( id ) ; <nl> - <nl> - / / We may not have a ldrefExit , but if so we better not be loading the inner <nl> - / / ref . <nl> - if ( ldrefExit = = nullptr ) always_assert ( ! decRefOld ) ; <nl> - if ( ldrefExit ! = nullptr ) gen ( env , CheckRefInner , predTy , ldrefExit , box ) ; <nl> - <nl> - auto const innerCell = decRefOld ? gen ( env , LdRef , predTy , box ) : nullptr ; <nl> - gen ( env , StRef , box , newVal ) ; <nl> - if ( incRefNew ) gen ( env , IncRef , newVal ) ; <nl> - if ( decRefOld ) { <nl> - decRef ( env , innerCell ) ; <nl> - env . irb - > constrainValue ( box , DataTypeBoxAndCountness ) ; <nl> - } <nl> - return newVal ; <nl> - } ; <nl> - <nl> - if ( oldLoc - > type ( ) < = TCell ) return unboxed_case ( ) ; <nl> - if ( oldLoc - > type ( ) < = TBoxedCell ) return boxed_case ( oldLoc ) ; <nl> - <nl> - return cond ( <nl> - env , <nl> - [ & ] ( Block * taken ) { <nl> - return gen ( env , CheckType , TBoxedCell , taken , oldLoc ) ; <nl> - } , <nl> - boxed_case , <nl> - unboxed_case <nl> - ) ; <nl> + stLocRaw ( env , id , fp ( env ) , newVal ) ; <nl> + if ( incRefNew ) gen ( env , IncRef , newVal ) ; <nl> + if ( decRefOld ) decRef ( env , oldLoc ) ; <nl> + return newVal ; <nl> } <nl> <nl> inline SSATmp * stLoc ( IRGS & env , <nl> uint32_t id , <nl> - Block * ldrefExit , <nl> Block * ldPMExit , <nl> SSATmp * newVal ) { <nl> constexpr bool decRefOld = true ; <nl> constexpr bool incRefNew = true ; <nl> - return stLocImpl ( env , id , ldrefExit , ldPMExit , newVal , decRefOld , incRefNew ) ; <nl> + return stLocImpl ( env , id , ldPMExit , newVal , decRefOld , incRefNew ) ; <nl> } <nl> <nl> inline SSATmp * stLocNRC ( IRGS & env , <nl> uint32_t id , <nl> - Block * ldrefExit , <nl> Block * ldPMExit , <nl> SSATmp * newVal ) { <nl> constexpr bool decRefOld = false ; <nl> constexpr bool incRefNew = false ; <nl> - return stLocImpl ( env , id , ldrefExit , ldPMExit , newVal , decRefOld , incRefNew ) ; <nl> + return stLocImpl ( env , id , ldPMExit , newVal , decRefOld , incRefNew ) ; <nl> } <nl> <nl> inline void stLocMove ( IRGS & env , <nl> uint32_t id , <nl> - Block * ldrefExit , <nl> Block * ldPMExit , <nl> SSATmp * newVal ) { <nl> - assertx ( ! newVal - > type ( ) . maybe ( TBoxedCell ) ) ; <nl> - <nl> auto const oldLoc = ldLoc ( env , id , ldPMExit , DataTypeBoxAndCountness ) ; <nl> <nl> - / / If the local isn ' t a ref and we ' re not in a pseudo - main , we can just move <nl> - / / newValue into the local without manipulating its ref - count . <nl> - auto unboxed_case = [ & ] { <nl> - if ( curFunc ( env ) - > isPseudoMain ( ) ) gen ( env , IncRef , newVal ) ; <nl> - stLocRaw ( env , id , fp ( env ) , newVal ) ; <nl> - decRef ( env , oldLoc ) ; <nl> - if ( curFunc ( env ) - > isPseudoMain ( ) ) decRef ( env , newVal ) ; <nl> - return nullptr ; <nl> - } ; <nl> - <nl> - / / However , if the local is a ref , we ' ll manipulate the ref - counts as if this <nl> - / / was a SetL , PopC pair . Otherwise , overwriting the ref ' s inner value can <nl> - / / trigger a destructor , which can then overwrite the ref ' s inner value <nl> - / / again . If we don ' t increment newVal ' s ref - count , this second overwrite <nl> - / / might trigger newVal ' s destructor , where it wouldn ' t otherwise . So , to keep <nl> - / / destructor ordering consistent with SetL , PopC , we ' ll emulate its ref - count <nl> - / / behavior . <nl> - auto boxed_case = [ & ] ( SSATmp * box ) { <nl> - / / It ' s important that the IncRef happens after the guard on the inner type <nl> - / / of the ref , since it may side - exit . <nl> - auto const predTy = env . irb - > predictedLocalInnerType ( id ) ; <nl> - <nl> - assertx ( ldrefExit ) ; <nl> - gen ( env , CheckRefInner , predTy , ldrefExit , box ) ; <nl> - <nl> - auto const innerCell = gen ( env , LdRef , predTy , box ) ; <nl> - gen ( env , StRef , box , newVal ) ; <nl> - gen ( env , IncRef , newVal ) ; <nl> - decRef ( env , innerCell ) ; <nl> - decRef ( env , newVal ) ; <nl> - env . irb - > constrainValue ( box , DataTypeBoxAndCountness ) ; <nl> - return nullptr ; <nl> - } ; <nl> - <nl> - if ( oldLoc - > type ( ) < = TCell ) { <nl> - unboxed_case ( ) ; <nl> - return ; <nl> - } <nl> - if ( oldLoc - > type ( ) < = TBoxedCell ) { <nl> - boxed_case ( oldLoc ) ; <nl> - return ; <nl> - } <nl> - <nl> - cond ( <nl> - env , <nl> - [ & ] ( Block * taken ) { <nl> - return gen ( env , CheckType , TBoxedCell , taken , oldLoc ) ; <nl> - } , <nl> - boxed_case , <nl> - unboxed_case <nl> - ) ; <nl> + if ( curFunc ( env ) - > isPseudoMain ( ) ) gen ( env , IncRef , newVal ) ; <nl> + stLocRaw ( env , id , fp ( env ) , newVal ) ; <nl> + decRef ( env , oldLoc ) ; <nl> + if ( curFunc ( env ) - > isPseudoMain ( ) ) decRef ( env , newVal ) ; <nl> } <nl> <nl> inline SSATmp * pushStLoc ( IRGS & env , <nl> uint32_t id , <nl> - Block * ldrefExit , <nl> Block * ldPMExit , <nl> SSATmp * newVal ) { <nl> constexpr bool decRefOld = true ; <nl> inline SSATmp * pushStLoc ( IRGS & env , <nl> auto const ret = stLocImpl ( <nl> env , <nl> id , <nl> - ldrefExit , <nl> ldPMExit , <nl> newVal , <nl> decRefOld , <nl> inline void decRefThis ( IRGS & env ) { <nl> decRef ( env , ctx ) ; <nl> } <nl> <nl> - template < class F > <nl> - SSATmp * boxHelper ( IRGS & env , SSATmp * value , F rewrite ) { <nl> - auto const t = value - > type ( ) ; <nl> - if ( t < = TCell ) { <nl> - if ( t < = TUninit ) { <nl> - value = cns ( env , TInitNull ) ; <nl> - } <nl> - value = gen ( env , Box , value ) ; <nl> - rewrite ( value ) ; <nl> - } else if ( t . maybe ( TCell ) ) { <nl> - value = cond ( env , <nl> - [ & ] ( Block * taken ) { <nl> - auto const ret = gen ( env , CheckType , TBoxedInitCell , <nl> - taken , value ) ; <nl> - env . irb - > constrainValue ( ret , DataTypeSpecific ) ; <nl> - return ret ; <nl> - } , <nl> - [ & ] ( SSATmp * box ) { / / Next : value is Boxed <nl> - return box ; <nl> - } , <nl> - [ & ] { / / Taken : value is not Boxed <nl> - auto const tmpType = t - TBoxedInitCell ; <nl> - assertx ( tmpType < = TCell ) ; <nl> - auto const tmp = gen ( env , AssertType , tmpType , value ) ; <nl> - auto const ret = gen ( env , Box , tmp ) ; <nl> - rewrite ( ret ) ; <nl> - return ret ; <nl> - } ) ; <nl> - } <nl> - return value ; <nl> - } <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> / * <nl> mmm a / hphp / runtime / vm / jit / irgen - interpone . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - interpone . cpp <nl> Type setOpResult ( Type locType , Type valType , SetOpOp op ) { <nl> switch ( op ) { <nl> case SetOpOp : : PlusEqual : <nl> case SetOpOp : : MinusEqual : <nl> - case SetOpOp : : MulEqual : return arithOpResult ( locType . unbox ( ) , valType ) ; <nl> + case SetOpOp : : MulEqual : return arithOpResult ( locType , valType ) ; <nl> case SetOpOp : : PlusEqualO : <nl> case SetOpOp : : MinusEqualO : <nl> - case SetOpOp : : MulEqualO : return arithOpOverResult ( locType . unbox ( ) , valType ) ; <nl> + case SetOpOp : : MulEqualO : return arithOpOverResult ( locType , valType ) ; <nl> case SetOpOp : : ConcatEqual : return TStr ; <nl> case SetOpOp : : PowEqual : <nl> case SetOpOp : : DivEqual : <nl> case SetOpOp : : ModEqual : return TUncountedInit ; <nl> case SetOpOp : : AndEqual : <nl> case SetOpOp : : OrEqual : <nl> - case SetOpOp : : XorEqual : return bitOpResult ( locType . unbox ( ) , valType ) ; <nl> + case SetOpOp : : XorEqual : return bitOpResult ( locType , valType ) ; <nl> case SetOpOp : : SlEqual : <nl> case SetOpOp : : SrEqual : return TInt ; <nl> } <nl> folly : : Optional < Type > interpOutputType ( IRGS & env , <nl> case OutArithO : <nl> return arithOpOverResult ( topType ( env , BCSPRelOffset { 0 } ) , <nl> topType ( env , BCSPRelOffset { 1 } ) ) ; <nl> - case OutUnknown : return TGen ; <nl> + case OutUnknown : return TCell ; <nl> <nl> case OutBitOp : <nl> return bitOpResult ( topType ( env , BCSPRelOffset { 0 } ) , <nl> folly : : Optional < Type > interpOutputType ( IRGS & env , <nl> topType ( env , BCSPRelOffset { 0 } ) , <nl> SetOpOp ( getImm ( sk . pc ( ) , 1 ) . u_OA ) ) ; <nl> case OutIncDec : { <nl> - auto ty = localType ( ) . unbox ( ) ; <nl> + auto ty = localType ( ) ; <nl> return ty < = TDbl ? ty : TCell ; <nl> } <nl> case OutNone : return folly : : none ; <nl> <nl> case OutCInput : { <nl> - auto ttype = topType ( env , BCSPRelOffset { 0 } ) ; <nl> - if ( ttype < = TCell ) return ttype ; <nl> - / / All instructions that are OutCInput or OutCInputL cannot push uninit or <nl> - / / a ref , so only specific inner types need to be checked . <nl> - if ( ttype . unbox ( ) < TInitCell ) { <nl> - checkTypeType = ttype . unbox ( ) ; <nl> - } <nl> - return TCell ; <nl> + return topType ( env , BCSPRelOffset { 0 } ) ; <nl> } <nl> <nl> case OutCInputL : { <nl> auto ltype = localType ( ) ; <nl> if ( ltype < = TCell ) return ltype ; <nl> - if ( ltype . unbox ( ) < TInitCell ) { <nl> - checkTypeType = ltype . unbox ( ) ; <nl> + if ( ltype < TInitCell ) { <nl> + checkTypeType = ltype ; <nl> } <nl> return TCell ; <nl> } <nl> interpOutputLocals ( IRGS & env , <nl> assertx ( id < kMaxHhbcImms ) ; <nl> setLocType ( getImm ( sk . pc ( ) , id ) . u_LA , t ) ; <nl> } ; <nl> - auto handleBoxiness = [ & ] ( Type testTy , Type useTy ) { <nl> - return testTy < = TBoxedCell ? TBoxedInitCell : <nl> - testTy . maybe ( TBoxedCell ) ? TGen : <nl> - useTy ; <nl> - } ; <nl> <nl> auto const mDefine = static_cast < unsigned char > ( MOpMode : : Define ) ; <nl> <nl> interpOutputLocals ( IRGS & env , <nl> case OpSetOpL : <nl> case OpIncDecL : { <nl> assertx ( pushedType . hasValue ( ) ) ; <nl> - auto locType = env . irb - > local ( localInputId ( sk ) , DataTypeSpecific ) . type ; <nl> - assertx ( locType < TGen | | curFunc ( env ) - > isPseudoMain ( ) ) ; <nl> <nl> auto stackType = pushedType . value ( ) ; <nl> - setImmLocType ( 0 , handleBoxiness ( locType , stackType ) ) ; <nl> + setImmLocType ( 0 , stackType ) ; <nl> break ; <nl> } <nl> <nl> interpOutputLocals ( IRGS & env , <nl> <nl> case OpSetL : <nl> case OpPopL : { <nl> - auto locType = env . irb - > local ( localInputId ( sk ) , DataTypeSpecific ) . type ; <nl> auto stackType = topType ( env , BCSPRelOffset { 0 } ) ; <nl> / / [ Set , Pop ] L preserves reffiness of a local . <nl> - setImmLocType ( 0 , handleBoxiness ( locType , stackType ) ) ; <nl> + setImmLocType ( 0 , stackType ) ; <nl> break ; <nl> } <nl> <nl> interpOutputLocals ( IRGS & env , <nl> / * fallthrough * / <nl> case OpIterInit : <nl> case OpIterNext : <nl> - setImmLocType ( 2 , TGen ) ; <nl> + setImmLocType ( 2 , TCell ) ; <nl> break ; <nl> <nl> case OpLIterInitK : <nl> interpOutputLocals ( IRGS & env , <nl> / * fallthrough * / <nl> case OpLIterInit : <nl> case OpLIterNext : <nl> - setImmLocType ( 3 , TGen ) ; <nl> + setImmLocType ( 3 , TCell ) ; <nl> break ; <nl> <nl> case OpVerifyParamTypeTS : <nl> case OpVerifyParamType : { <nl> - auto locType = env . irb - > local ( localInputId ( sk ) , DataTypeSpecific ) . type ; <nl> - setImmLocType ( 0 , handleBoxiness ( locType , TCell ) ) ; <nl> + setImmLocType ( 0 , TCell ) ; <nl> break ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / irgen - minstr . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - minstr . cpp <nl> struct PropInfo { <nl> bool isConst { false } ; <nl> bool lateInit { false } ; <nl> bool lateInitCheck { false } ; <nl> - Type knownType { TGen } ; <nl> + Type knownType { TCell } ; <nl> const HPHP : : TypeConstraint * typeConstraint { nullptr } ; <nl> const Class * objClass { nullptr } ; <nl> const Class * propClass { nullptr } ; <nl> Type knownTypeForProp ( const Class : : Prop & prop , <nl> const Class * propCls , <nl> const Class * ctx , <nl> bool ignoreLateInit ) { <nl> - auto knownType = TGen ; <nl> + auto knownType = TCell ; <nl> if ( RuntimeOption : : EvalCheckPropTypeHints > = 3 ) { <nl> knownType = typeFromPropTC ( prop . typeConstraint , propCls , ctx , false ) ; <nl> if ( ! ( prop . attrs & AttrNoImplicitNullable ) ) knownType | = TInitNull ; <nl> bool prop_ignores_tvref ( IRGS & env , SSATmp * base , const SSATmp * key ) { <nl> if ( ! base - > isA ( TObj ) | | ! base - > type ( ) . clsSpec ( ) . cls ( ) ) return false ; <nl> <nl> auto cls = base - > type ( ) . clsSpec ( ) . cls ( ) ; <nl> - auto propType = TGen ; <nl> + auto propType = TCell ; <nl> auto isDeclared = false ; <nl> auto propClass = cls ; <nl> <nl> folly : : Optional < GuardConstraint > simpleOpConstraint ( SimpleOp op ) { <nl> * refined , based on earlier tracked updates to the member base . <nl> * / <nl> SSATmp * ldMBase ( IRGS & env ) { <nl> - return gen ( env , LdMBase , TLvalToGen ) ; <nl> + return gen ( env , LdMBase , TLvalToCell ) ; <nl> } <nl> void stMBase ( IRGS & env , SSATmp * base ) { <nl> - if ( base - > isA ( TPtrToGen ) ) base = gen ( env , ConvPtrToLval , base ) ; <nl> - assert_flog ( base - > isA ( TLvalToGen ) , " Unexpected mbase : { } " , * base - > inst ( ) ) ; <nl> + if ( base - > isA ( TPtrToCell ) ) base = gen ( env , ConvPtrToLval , base ) ; <nl> + assert_flog ( base - > isA ( TLvalToCell ) , " Unexpected mbase : { } " , * base - > inst ( ) ) ; <nl> <nl> gen ( env , StMBase , base ) ; <nl> } <nl> SSATmp * tvRefPtr ( IRGS & env ) { <nl> <nl> SSATmp * propTvRefPtr ( IRGS & env , SSATmp * base , const SSATmp * key ) { <nl> return prop_ignores_tvref ( env , base , key ) <nl> - ? cns ( env , Type : : cns ( nullptr , TPtrToMISGen ) ) <nl> + ? cns ( env , Type : : cns ( nullptr , TPtrToMISCell ) ) <nl> : tvRefPtr ( env ) ; <nl> } <nl> <nl> SSATmp * ptrToUninit ( IRGS & env ) { <nl> } <nl> <nl> bool baseMightPromote ( const SSATmp * base ) { <nl> - auto const ty = base - > type ( ) . strip ( ) ; <nl> + auto const ty = base - > type ( ) . derefIfPtr ( ) ; <nl> return <nl> ty . maybe ( TNull ) | | <nl> ty . maybe ( Type : : cns ( false ) ) | | <nl> SSATmp * checkInitProp ( IRGS & env , <nl> bool doDefine ) { <nl> assertx ( key - > isA ( TStaticStr ) ) ; <nl> assertx ( baseAsObj - > isA ( TObj ) ) ; <nl> - assertx ( propAddr - > type ( ) < = TLvalToGen ) ; <nl> + assertx ( propAddr - > type ( ) < = TLvalToCell ) ; <nl> assertx ( ! doWarn | | ! doDefine ) ; <nl> <nl> auto const needsCheck = doWarn & & propAddr - > type ( ) . deref ( ) . maybe ( TUninit ) ; <nl> SSATmp * emitPackedArrayGet ( IRGS & env , SSATmp * base , SSATmp * key , MOpMode mode , <nl> key - > isA ( TInt ) ) ; <nl> <nl> auto finishMe = [ & ] ( SSATmp * elem ) { <nl> - auto unboxed = unbox ( env , elem , nullptr ) ; <nl> - gen ( env , IncRef , unboxed ) ; <nl> - return unboxed ; <nl> + gen ( env , IncRef , elem ) ; <nl> + return elem ; <nl> } ; <nl> <nl> auto check = [ & ] ( Block * taken ) { <nl> SSATmp * emitArrayGet ( IRGS & env , SSATmp * base , SSATmp * key , MOpMode mode , <nl> } <nl> ) ; <nl> auto finishMe = [ & ] ( SSATmp * element ) { <nl> - auto const cell = unbox ( env , element , nullptr ) ; <nl> - gen ( env , IncRef , cell ) ; <nl> - return cell ; <nl> + gen ( env , IncRef , element ) ; <nl> + return element ; <nl> } ; <nl> auto const pelem = profiledType ( env , elem , [ & ] { finish ( finishMe ( elem ) ) ; } ) ; <nl> return finishMe ( pelem ) ; <nl> void initTvRefs ( IRGS & env ) { <nl> * / <nl> void cleanTvRefs ( IRGS & env ) { <nl> for ( auto ptr : { tvRefPtr ( env ) , tvRef2Ptr ( env ) } ) { <nl> - decRef ( env , gen ( env , LdMem , TGen , ptr ) ) ; <nl> + decRef ( env , gen ( env , LdMem , TCell , ptr ) ) ; <nl> } <nl> } <nl> <nl> SSATmp * ratchetRefs ( IRGS & env , SSATmp * base ) { <nl> [ & ] { / / Taken : tvRef isn ' t Uninit . Ratchet the refs <nl> auto tvRef2 = tvRef2Ptr ( env ) ; <nl> / / Clean up tvRef2 before overwriting it . <nl> - auto const oldRef2 = gen ( env , LdMem , TGen , tvRef2 ) ; <nl> + auto const oldRef2 = gen ( env , LdMem , TCell , tvRef2 ) ; <nl> decRef ( env , oldRef2 ) ; <nl> <nl> / / Copy tvRef to tvRef2 . <nl> - auto const tvRefVal = gen ( env , LdMem , TGen , tvRef ) ; <nl> + auto const tvRefVal = gen ( env , LdMem , TCell , tvRef ) ; <nl> gen ( env , StMem , tvRef2 , tvRefVal ) ; <nl> / / Reset tvRef . <nl> gen ( env , StMem , tvRef , cns ( env , TUninit ) ) ; <nl> void baseGImpl ( IRGS & env , SSATmp * name , MOpMode mode ) { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - / * <nl> - * Punt if the given base type isn ' t known to be boxed or unboxed . <nl> - * / <nl> - void puntGenBase ( Type baseType ) { <nl> - if ( baseType . maybe ( TCell ) & & baseType . maybe ( TBoxedCell ) ) { <nl> - PUNT ( MInstr - GenBase ) ; <nl> - } <nl> - } <nl> - <nl> / * <nl> * Update FrameState for a base at a known location . <nl> * / <nl> void simpleBaseImpl ( IRGS & env , SSATmp * base , MOpMode mode , Location l ) { <nl> - puntGenBase ( base - > type ( ) ) ; <nl> - <nl> - auto const predicted = base - > isA ( TBoxedCell ) <nl> - ? folly : : make_optional ( env . irb - > fs ( ) . predictedTypeOf ( l ) ) <nl> - : folly : : none ; <nl> - env . irb - > fs ( ) . setMemberBase ( base , predicted ) ; <nl> + env . irb - > fs ( ) . setMemberBase ( base ) ; <nl> <nl> setEmptyMIPropState ( env , base , mode ) ; <nl> } <nl> void simpleBaseImpl ( IRGS & env , SSATmp * base , MOpMode mode , Location l ) { <nl> * / <nl> SSATmp * extractBase ( IRGS & env ) { <nl> auto const & mbase = env . irb - > fs ( ) . mbase ( ) ; <nl> - puntGenBase ( mbase . type ) ; <nl> <nl> env . irb - > constrainLocation ( Location : : MBase { } , DataTypeSpecific ) ; <nl> <nl> SSATmp * extractBase ( IRGS & env ) { <nl> <nl> env . irb - > constrainValue ( base , DataTypeSpecific ) ; <nl> <nl> - if ( base - > isA ( TBoxedCell ) ) { <nl> - auto const innerTy = env . irb - > predictedMBaseInnerType ( ) ; <nl> - gen ( env , CheckRefInner , innerTy , makeExit ( env ) , base ) ; <nl> - <nl> - auto const inner = gen ( env , LdRef , innerTy , base ) ; <nl> - env . irb - > constrainValue ( inner , DataTypeSpecific ) ; <nl> - return inner ; <nl> - } <nl> - <nl> return base ; <nl> } <nl> <nl> SSATmp * extractBase ( IRGS & env ) { <nl> * / <nl> void constrainBase ( IRGS & env ) { extractBase ( env ) ; } <nl> <nl> - / * <nl> - * Type of extractBase ( ) . <nl> - * <nl> - * Used to determine whether to actually unpack the member base ( and thus <nl> - * constrain types ) for a given minstr implementation . <nl> - * / <nl> - Type predictedBaseType ( const IRGS & env ) { <nl> - auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> - <nl> - return baseType < = TBoxedCell <nl> - ? env . irb - > predictedMBaseInnerType ( ) <nl> - : baseType ; <nl> - } <nl> - <nl> / * <nl> * Return the extracted object base if the predicted type is TObj , else just <nl> * return the base pointer . <nl> * / <nl> SSATmp * extractBaseIfObj ( IRGS & env ) { <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> return baseType < = TObj ? extractBase ( env ) : ldMBase ( env ) ; <nl> } <nl> <nl> SSATmp * propGenericImpl ( IRGS & env , MOpMode mode , SSATmp * base , SSATmp * key , <nl> } <nl> <nl> SSATmp * propImpl ( IRGS & env , MOpMode mode , SSATmp * key , bool nullsafe ) { <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> <nl> if ( mode = = MOpMode : : Unset & & ! baseType . maybe ( TObj ) ) { <nl> constrainBase ( env ) ; <nl> SSATmp * elemImpl ( IRGS & env , MOpMode mode , SSATmp * key ) { <nl> auto const unset = mode = = MOpMode : : Unset ; <nl> auto const define = mode = = MOpMode : : Define ; <nl> <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> <nl> assertx ( ! define | | ! unset ) ; <nl> assertx ( ! define | | ! warn ) ; <nl> SSATmp * cGetPropImpl ( IRGS & env , SSATmp * base , SSATmp * key , <nl> auto propAddr = <nl> emitPropSpecialized ( env , base , key , nullsafe , mode , propInfo ) . first ; <nl> auto const ty = propAddr - > type ( ) . deref ( ) ; <nl> - auto const cellPtr = <nl> - ty . maybe ( TBoxedCell ) ? gen ( env , UnboxPtr , propAddr ) : propAddr ; <nl> - auto const result = gen ( env , LdMem , ty . unbox ( ) , cellPtr ) ; <nl> + auto const result = gen ( env , LdMem , ty , propAddr ) ; <nl> auto const profres = profiledType ( env , result , [ & ] { <nl> gen ( env , IncRef , result ) ; <nl> finish ( result ) ; <nl> SSATmp * setPropImpl ( IRGS & env , uint32_t nDiscard , SSATmp * key ) { <nl> ) ; <nl> <nl> auto propTy = propPtr - > type ( ) . deref ( ) ; <nl> - if ( propTy . maybe ( TBoxedCell ) ) { <nl> - propTy = propTy . unbox ( ) ; <nl> - propPtr = gen ( env , UnboxPtr , propPtr ) ; <nl> - } <nl> <nl> env . irb - > constrainValue ( value , DataTypeBoxAndCountness ) ; <nl> auto const oldVal = gen ( env , LdMem , propTy , propPtr ) ; <nl> SSATmp * emitArrayLikeSet ( IRGS & env , SSATmp * key , SSATmp * value ) { <nl> / / don ' t go down this path for pseudomains . <nl> if ( curFunc ( env ) - > isPseudoMain ( ) ) return nullptr ; <nl> <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> auto const base = extractBase ( env ) ; <nl> assertx ( baseType < = TArrLike ) ; <nl> <nl> SSATmp * emitArrayLikeSet ( IRGS & env , SSATmp * key , SSATmp * value ) { <nl> } ( ) ; <nl> if ( ! baseLoc ) return nullptr ; <nl> <nl> - / / base may be from inside a RefData inside a stack / local , so to determine <nl> - / / setRef we must check the actual value of the stack / local . <nl> - auto const rawBaseType = provenType ( env , * baseLoc ) ; <nl> - auto const setRef = rawBaseType < = TBoxedCell ; <nl> - <nl> - if ( setRef ) { <nl> - auto const box = [ & ] { <nl> - switch ( baseLoc - > tag ( ) ) { <nl> - case LTag : : Local : <nl> - return ldLoc ( env , baseLoc - > localId ( ) , nullptr , DataTypeSpecific ) ; <nl> - case LTag : : Stack : <nl> - return top ( env , offsetFromBCSP ( env , baseLoc - > stackIdx ( ) ) ) ; <nl> - case LTag : : MBase : <nl> - always_assert ( false ) ; <nl> - } <nl> - not_reached ( ) ; <nl> - } ( ) ; <nl> - gen ( env , <nl> - isVec ? VecSetRef : isDict ? DictSetRef : ArraySetRef , <nl> - base , key , value , box ) ; <nl> - <nl> - / / Unlike the non - ref case , we don ' t need to do anything to the stack / local <nl> - / / because any load of the box will be guarded . <nl> - return value ; <nl> - } <nl> - <nl> auto const newArr = gen ( env , <nl> isVec ? VecSet : isDict ? DictSet : ArraySet , <nl> base , key , value ) ; <nl> SSATmp * emitArrayLikeSet ( IRGS & env , SSATmp * key , SSATmp * value ) { <nl> / / Update the base ' s location with the new array . <nl> switch ( baseLoc - > tag ( ) ) { <nl> case LTag : : Local : <nl> - / / We know it ' s not boxed ( setRef above handles that ) , and the helper has <nl> - / / already decref ' d the old array and incref ' d newArr . <nl> gen ( env , StLoc , LocalId { baseLoc - > localId ( ) } , fp ( env ) , newArr ) ; <nl> break ; <nl> case LTag : : Stack : <nl> void setNewElemPackedArrayDataImpl ( IRGS & env , uint32_t nDiscard , <nl> SSATmp * setNewElemImpl ( IRGS & env , uint32_t nDiscard ) { <nl> auto const value = topC ( env ) ; <nl> <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> <nl> / / We load the member base pointer before calling makeCatchSet ( ) to avoid <nl> / / mismatched in - states for any catch block edges we emit later on . <nl> SSATmp * setNewElemImpl ( IRGS & env , uint32_t nDiscard ) { <nl> SSATmp * setElemImpl ( IRGS & env , uint32_t nDiscard , SSATmp * key ) { <nl> auto value = topC ( env , BCSPRelOffset { 0 } , DataTypeGeneric ) ; <nl> <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> auto const simpleOp = simpleCollectionOp ( baseType , key - > type ( ) , false , false ) ; <nl> <nl> if ( auto gc = simpleOpConstraint ( simpleOp ) ) { <nl> SSATmp * memberKey ( IRGS & env , MemberKey mk ) { <nl> case MW : <nl> return nullptr ; <nl> case MEL : case MPL : <nl> - return ldLocInnerWarn ( env , mk . iva , makeExit ( env ) , <nl> - makePseudoMainExit ( env ) , DataTypeSpecific ) ; <nl> + return ldLocWarn ( env , mk . iva , makePseudoMainExit ( env ) , DataTypeSpecific ) ; <nl> case MEC : case MPC : <nl> return topC ( env , BCSPRelOffset { int32_t ( mk . iva ) } ) ; <nl> case MEI : <nl> void emitBaseGC ( IRGS & env , uint32_t idx , MOpMode mode ) { <nl> <nl> void emitBaseGL ( IRGS & env , int32_t locId , MOpMode mode ) { <nl> initTvRefs ( env ) ; <nl> - auto name = ldLocInner ( env , locId , makeExit ( env ) , makePseudoMainExit ( env ) , <nl> - DataTypeSpecific ) ; <nl> + auto name = ldLoc ( env , locId , makePseudoMainExit ( env ) , DataTypeSpecific ) ; <nl> baseGImpl ( env , name , mode ) ; <nl> } <nl> <nl> void emitBaseL ( IRGS & env , int32_t locId , MOpMode mode ) { <nl> <nl> auto base = ldLoc ( env , locId , makePseudoMainExit ( env ) , DataTypeGeneric ) ; <nl> <nl> + if ( ! base - > type ( ) . isKnownDataType ( ) ) PUNT ( unknown - BaseL ) ; <nl> + <nl> if ( base - > isA ( TUninit ) & & mode = = MOpMode : : Warn ) { <nl> env . irb - > constrainLocal ( locId , DataTypeSpecific , <nl> " emitBaseL : Uninit base local " ) ; <nl> void emitDim ( IRGS & env , MOpMode mode , MemberKey mk ) { <nl> void emitQueryM ( IRGS & env , uint32_t nDiscard , QueryMOp query , MemberKey mk ) { <nl> if ( mk . mcode = = MW ) PUNT ( QueryNewElem ) ; <nl> <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> if ( baseType < = TClsMeth ) { <nl> PUNT ( QueryM_is_ClsMeth ) ; <nl> } <nl> void emitQueryM ( IRGS & env , uint32_t nDiscard , QueryMOp query , MemberKey mk ) { <nl> } <nl> <nl> void emitSetM ( IRGS & env , uint32_t nDiscard , MemberKey mk ) { <nl> - auto const baseType = predictedBaseType ( env ) ; <nl> + auto const baseType = env . irb - > fs ( ) . mbase ( ) . type ; <nl> if ( baseType < = TClsMeth ) { <nl> PUNT ( SetM_is_ClsMeth ) ; <nl> } <nl> SSATmp * setOpPropImpl ( IRGS & env , SetOpOp op , SSATmp * base , <nl> ) ; <nl> assertx ( obj ! = nullptr ) ; <nl> <nl> - propPtr = gen ( env , UnboxPtr , propPtr ) ; <nl> - <nl> auto const lhs = gen ( env , LdMem , propPtr - > type ( ) . deref ( ) , propPtr ) ; <nl> if ( auto const result = inlineSetOp ( env , op , lhs , rhs ) ) { <nl> verifyPropType ( <nl> mmm a / hphp / runtime / vm / jit / irgen - minstr . h <nl> ppp b / hphp / runtime / vm / jit / irgen - minstr . h <nl> SSATmp * profiledArrayAccess ( IRGS & env , SSATmp * arr , SSATmp * key , <nl> * / <nl> template < class Finish > <nl> SSATmp * profiledType ( IRGS & env , SSATmp * tmp , Finish finish ) { <nl> - if ( tmp - > type ( ) < = TGen & & tmp - > type ( ) . isKnownDataType ( ) ) { <nl> + if ( tmp - > type ( ) < = TCell & & tmp - > type ( ) . isKnownDataType ( ) ) { <nl> return tmp ; <nl> } <nl> <nl> SSATmp * profiledType ( IRGS & env , SSATmp * tmp , Finish finish ) { <nl> <nl> Type typeToCheck = relaxToGuardable ( reducedType ) ; <nl> <nl> - if ( typeToCheck = = TGen ) return tmp ; <nl> + if ( typeToCheck = = TCell ) return tmp ; <nl> <nl> SSATmp * ptmp { nullptr } ; <nl> <nl> mmm a / hphp / runtime / vm / jit / irgen - sprop - global . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - sprop - global . cpp <nl> ClsPropLookup ldClsPropAddrKnown ( IRGS & env , <nl> auto const ctx = curClass ( env ) ; <nl> auto const & prop = cls - > staticProperties ( ) [ slot ] ; <nl> <nl> - auto knownType = TGen ; <nl> + auto knownType = TCell ; <nl> if ( RuntimeOption : : EvalCheckPropTypeHints > = 3 ) { <nl> knownType = typeFromPropTC ( prop . typeConstraint , cls , ctx , true ) ; <nl> if ( ! ( prop . attrs & AttrNoImplicitNullable ) ) knownType | = TInitNull ; <nl> void emitCGetS ( IRGS & env ) { <nl> <nl> auto const propAddr = <nl> ldClsPropAddr ( env , ssaCls , ssaPropName , true , false , false ) . propPtr ; <nl> - auto const unboxed = gen ( env , UnboxPtr , propAddr ) ; <nl> - auto const ldMem = gen ( env , LdMem , unboxed - > type ( ) . deref ( ) , unboxed ) ; <nl> + auto const ldMem = gen ( env , LdMem , propAddr - > type ( ) . deref ( ) , propAddr ) ; <nl> <nl> discard ( env ) ; <nl> destroyName ( env , ssaPropName ) ; <nl> void emitSetS ( IRGS & env ) { <nl> gen ( env , VerifyProp , ssaCls , slot , value , cns ( env , true ) ) ; <nl> } <nl> <nl> - auto const ptr = gen ( env , UnboxPtr , lookup . propPtr ) ; <nl> - <nl> discard ( env ) ; <nl> destroyName ( env , ssaPropName ) ; <nl> - bindMem ( env , ptr , value ) ; <nl> + bindMem ( env , lookup . propPtr , value ) ; <nl> } <nl> <nl> void emitIssetS ( IRGS & env ) { <nl> void emitIssetS ( IRGS & env ) { <nl> return gen ( env , CheckNonNull , taken , propAddr ) ; <nl> } , <nl> [ & ] ( SSATmp * ptr ) { / / Next : property or global exists <nl> - return gen ( env , IsNTypeMem , TNull , gen ( env , UnboxPtr , ptr ) ) ; <nl> + return gen ( env , IsNTypeMem , TNull , ptr ) ; <nl> } , <nl> [ & ] { / / Taken : LdClsPropAddr * returned Nullptr because it isn ' t defined <nl> return cns ( env , false ) ; <nl> void emitEmptyS ( IRGS & env ) { <nl> return gen ( env , CheckNonNull , taken , propAddr ) ; <nl> } , <nl> [ & ] ( SSATmp * ptr ) { <nl> - auto const unbox = gen ( env , UnboxPtr , ptr ) ; <nl> - auto const val = gen ( env , LdMem , unbox - > type ( ) . deref ( ) , unbox ) ; <nl> + auto const val = gen ( env , LdMem , ptr - > type ( ) . deref ( ) , ptr ) ; <nl> return gen ( env , XorBool , gen ( env , ConvCellToBool , val ) , cns ( env , true ) ) ; <nl> } , <nl> [ & ] { / / Taken : LdClsPropAddr * returned Nullptr because it isn ' t defined <nl> void emitIncDecS ( IRGS & env , IncDecOp subop ) { <nl> <nl> auto const lookup = <nl> ldClsPropAddr ( env , ssaCls , ssaPropName , true , false , true ) ; <nl> - auto const unboxed = gen ( env , UnboxPtr , lookup . propPtr ) ; <nl> - auto const oldVal = gen ( env , LdMem , unboxed - > type ( ) . deref ( ) , unboxed ) ; <nl> + auto const oldVal = <nl> + gen ( env , LdMem , lookup . propPtr - > type ( ) . deref ( ) , lookup . propPtr ) ; <nl> <nl> auto const result = incDec ( env , subop , oldVal ) ; <nl> if ( ! result ) PUNT ( IncDecS ) ; <nl> void emitIncDecS ( IRGS & env , IncDecOp subop ) { <nl> / / Update marker to ensure newly - pushed value isn ' t clobbered by DecRef . <nl> updateMarker ( env ) ; <nl> <nl> - gen ( env , StMem , unboxed , result ) ; <nl> + gen ( env , StMem , lookup . propPtr , result ) ; <nl> gen ( env , IncRef , result ) ; <nl> decRef ( env , oldVal ) ; <nl> } <nl> void emitCGetG ( IRGS & env ) { <nl> env , <nl> [ & ] ( Block * taken ) { return gen ( env , LdGblAddr , taken , name ) ; } , <nl> [ & ] ( SSATmp * ptr ) { <nl> - auto tmp = gen ( env , LdMem , TCell , gen ( env , UnboxPtr , ptr ) ) ; <nl> + auto tmp = gen ( env , LdMem , TCell , ptr ) ; <nl> gen ( env , IncRef , tmp ) ; <nl> return tmp ; <nl> } , <nl> void emitSetG ( IRGS & env ) { <nl> auto const name = topC ( env , BCSPRelOffset { 1 } ) ; <nl> if ( ! name - > isA ( TStr ) ) PUNT ( SetG - NameNotStr ) ; <nl> auto const value = popC ( env , DataTypeCountness ) ; <nl> - auto const unboxed = gen ( env , UnboxPtr , gen ( env , LdGblAddrDef , name ) ) ; <nl> + auto const ptr = gen ( env , LdGblAddrDef , name ) ; <nl> destroyName ( env , name ) ; <nl> - bindMem ( env , unboxed , value ) ; <nl> + bindMem ( env , ptr , value ) ; <nl> } <nl> <nl> void emitIssetG ( IRGS & env ) { <nl> void emitIssetG ( IRGS & env ) { <nl> return gen ( env , LdGblAddr , taken , name ) ; <nl> } , <nl> [ & ] ( SSATmp * ptr ) { / / Next : global exists <nl> - return gen ( env , IsNTypeMem , TNull , gen ( env , UnboxPtr , ptr ) ) ; <nl> + return gen ( env , IsNTypeMem , TNull , ptr ) ; <nl> } , <nl> [ & ] { / / Taken : global doesn ' t exist <nl> return cns ( env , false ) ; <nl> void emitEmptyG ( IRGS & env ) { <nl> return gen ( env , LdGblAddr , taken , name ) ; <nl> } , <nl> [ & ] ( SSATmp * ptr ) { / / Next : global exists <nl> - auto const unboxed = gen ( env , UnboxPtr , ptr ) ; <nl> - auto const val = gen ( env , LdMem , TCell , unboxed ) ; <nl> + auto const val = gen ( env , LdMem , TCell , ptr ) ; <nl> return gen ( env , XorBool , gen ( env , ConvCellToBool , val ) , cns ( env , true ) ) ; <nl> } , <nl> [ & ] { / / Taken : global doesn ' t exist <nl> mmm a / hphp / runtime / vm / jit / irgen - state . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - state . cpp <nl> std : : string show ( const IRGS & irgs ) { <nl> auto const stkVal = irgs . irb - > stack ( spRel , DataTypeGeneric ) . value ; <nl> <nl> std : : string elemStr ; <nl> - if ( stkTy = = TGen ) { <nl> + if ( stkTy = = TCell ) { <nl> elemStr = " unknown " ; <nl> } else if ( stkVal ) { <nl> elemStr = stkVal - > inst ( ) - > toString ( ) ; <nl> std : : string show ( const IRGS & irgs ) { <nl> auto const predicted = irgs . irb - > fs ( ) . local ( i ) . predictedType ; <nl> if ( predicted < localTy ) str + = folly : : sformat ( " ( predict : { } ) " , predicted ) ; <nl> <nl> - if ( localTy < = TBoxedCell ) { <nl> - auto const pred = irgs . irb - > predictedLocalInnerType ( i ) ; <nl> - if ( pred ! = TBottom ) { <nl> - str + = folly : : sformat ( " ( predict inner : { } ) " , pred . toString ( ) ) ; <nl> - } <nl> - } <nl> - <nl> out < < folly : : format ( " | { : < 100 } | \ n " , <nl> folly : : format ( " { : > 2 } : { } " , i , str ) ) ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / irgen - types . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - types . cpp <nl> SSATmp * implInstanceCheck ( IRGS & env , SSATmp * src , const StringData * className , <nl> * The lambda parameters are as follows : <nl> * <nl> * - GetVal : Return the SSATmp of the value to test <nl> - * - PredInner : When the value is a BoxedInitCell , return the predicted inner <nl> - * type of the value . <nl> * - FuncToStr : Emit code to deal with any func to string conversions . <nl> * - ClsMethToVec : Emit code to deal with any ClsMeth to array conversions <nl> * - Fail : Emit code to deal with the type check failing . <nl> SSATmp * implInstanceCheck ( IRGS & env , SSATmp * src , const StringData * className , <nl> * runtime class of the object the property belongs to . <nl> * / <nl> template < typename GetVal , <nl> - typename PredInner , <nl> typename FuncToStr , <nl> typename ClassToStr , <nl> typename ClsMethToVec , <nl> void verifyTypeImpl ( IRGS & env , <nl> bool onlyCheckNullability , <nl> SSATmp * propCls , <nl> GetVal getVal , <nl> - PredInner predInner , <nl> FuncToStr funcToStr , <nl> ClassToStr classToStr , <nl> ClsMethToVec clsMethToVec , <nl> void verifyTypeImpl ( IRGS & env , <nl> } <nl> <nl> auto val = getVal ( ) ; <nl> - assertx ( val - > type ( ) < = TCell | | val - > type ( ) < = TBoxedCell ) ; <nl> - <nl> - auto const valType = [ & ] ( ) - > Type { <nl> - if ( val - > type ( ) < = TCell ) return val - > type ( ) ; <nl> - auto const pred = predInner ( val ) ; <nl> - gen ( env , CheckRefInner , pred , makeExit ( env ) , val ) ; <nl> - val = gen ( env , LdRef , pred , val ) ; <nl> - return pred ; <nl> - } ( ) ; <nl> + assertx ( val - > type ( ) < = TCell ) ; <nl> + <nl> + auto const valType = val - > type ( ) ; <nl> <nl> if ( ! valType . isKnownDataType ( ) ) return giveup ( ) ; <nl> <nl> void verifyRetTypeImpl ( IRGS & env , int32_t id , int32_t ind , <nl> [ & ] { / / Get value to test <nl> return topC ( env , BCSPRelOffset { ind } ) ; <nl> } , <nl> - [ ] ( SSATmp * ) - > Type { / / Get boxed inner value <nl> - PUNT ( VerifyReturnTypeBoxed ) ; <nl> - } , <nl> [ & ] ( SSATmp * val ) { / / func to string conversions <nl> auto const str = gen ( env , LdFuncName , val ) ; <nl> auto const offset = offsetFromIRSP ( env , BCSPRelOffset { ind } ) ; <nl> void verifyParamTypeImpl ( IRGS & env , int32_t id ) { <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> return ldLoc ( env , id , ldPMExit , DataTypeSpecific ) ; <nl> } , <nl> - [ & ] ( SSATmp * val ) { / / Get boxed inner type <nl> - return env . irb - > predictedLocalInnerType ( id ) ; <nl> - } , <nl> [ & ] ( SSATmp * val ) { / / func to string conversions <nl> auto const str = gen ( env , LdFuncName , val ) ; <nl> stLocRaw ( env , id , fp ( env ) , str ) ; <nl> void verifyPropType ( IRGS & env , <nl> env . irb - > constrainValue ( val , DataTypeSpecific ) ; <nl> return val ; <nl> } , <nl> - [ & ] ( SSATmp * ) - > Type { / / Get boxed inner type <nl> - / / We ' ve already asserted that the value is a Cell . <nl> - always_assert ( false ) ; <nl> - } , <nl> [ & ] ( SSATmp * ) { return false ; } , / / No func to string automatic conversions <nl> [ & ] ( SSATmp * ) { return false ; } , / / No class to string automatic conversions <nl> [ & ] ( SSATmp * ) { return false ; } , / / No clsmeth to vec automatic conversions <nl> void emitOODeclExists ( IRGS & env , OODeclExistsOp subop ) { <nl> } <nl> <nl> void emitIssetL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const ld = ldLocInner ( env , id , ldrefExit , ldPMExit , DataTypeSpecific ) ; <nl> + auto const ld = ldLoc ( env , id , ldPMExit , DataTypeSpecific ) ; <nl> if ( ld - > isA ( TClsMeth ) ) { <nl> PUNT ( IssetL_is_ClsMeth ) ; <nl> } <nl> void emitIssetL ( IRGS & env , int32_t id ) { <nl> } <nl> <nl> void emitEmptyL ( IRGS & env , int32_t id ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const ld = ldLocInner ( env , id , ldrefExit , ldPMExit , DataTypeSpecific ) ; <nl> + auto const ld = ldLoc ( env , id , ldPMExit , DataTypeSpecific ) ; <nl> if ( ld - > isA ( TClsMeth ) ) { <nl> PUNT ( EmptyL_is_ClsMeth ) ; <nl> } <nl> void emitIsTypeC ( IRGS & env , IsTypeOp subop ) { <nl> } <nl> <nl> void emitIsTypeL ( IRGS & env , int32_t id , IsTypeOp subop ) { <nl> - auto const ldrefExit = makeExit ( env ) ; <nl> auto const ldPMExit = makePseudoMainExit ( env ) ; <nl> - auto const val = <nl> - ldLocInnerWarn ( env , id , ldrefExit , ldPMExit , DataTypeSpecific ) ; <nl> + auto const val = ldLocWarn ( env , id , ldPMExit , DataTypeSpecific ) ; <nl> <nl> if ( subop = = IsTypeOp : : VArray | | subop = = IsTypeOp : : DArray ) { <nl> push ( env , isDVArrayImpl ( env , val , subop ) ) ; <nl> deleted file mode 100644 <nl> index b9b670c8bdb . . 00000000000 <nl> mmm a / hphp / runtime / vm / jit / irlower - box . cpp <nl> ppp / dev / null <nl> <nl> - / * <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | HipHop for PHP | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | Copyright ( c ) 2010 - present Facebook , Inc . ( http : / / www . facebook . com ) | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | This source file is subject to version 3 . 01 of the PHP license , | <nl> - | that is bundled with this package in the file LICENSE , and is | <nl> - | available through the world - wide - web at the following url : | <nl> - | http : / / www . php . net / license / 3_01 . txt | <nl> - | If you did not receive a copy of the PHP license and are unable to | <nl> - | obtain it through the world - wide - web , please send a note to | <nl> - | license @ php . net so we can mail you a copy immediately . | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - * / <nl> - <nl> - # include " hphp / runtime / vm / jit / irlower - internal . h " <nl> - <nl> - # include " hphp / runtime / base / ref - data . h " <nl> - # include " hphp / runtime / base / tv - mutate . h " <nl> - # include " hphp / runtime / base / tv - variant . h " <nl> - <nl> - # include " hphp / runtime / vm / jit / arg - group . h " <nl> - # include " hphp / runtime / vm / jit / call - spec . h " <nl> - # include " hphp / runtime / vm / jit / code - gen - cf . h " <nl> - # include " hphp / runtime / vm / jit / extra - data . h " <nl> - # include " hphp / runtime / vm / jit / ir - instruction . h " <nl> - # include " hphp / runtime / vm / jit / ir - opcode . h " <nl> - # include " hphp / runtime / vm / jit / ssa - tmp . h " <nl> - # include " hphp / runtime / vm / jit / translator - inline . h " <nl> - # include " hphp / runtime / vm / jit / type . h " <nl> - # include " hphp / runtime / vm / jit / types . h " <nl> - # include " hphp / runtime / vm / jit / vasm - gen . h " <nl> - # include " hphp / runtime / vm / jit / vasm - instr . h " <nl> - # include " hphp / runtime / vm / jit / vasm - reg . h " <nl> - <nl> - # include " hphp / util / trace . h " <nl> - <nl> - namespace HPHP { namespace jit { namespace irlower { <nl> - <nl> - TRACE_SET_MOD ( irlower ) ; <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - TypedValue * tvBoxHelper ( TypedValue * tv ) { tvBox ( tv ) ; return tv ; } <nl> - tv_lval tvBoxHelper ( tv_lval tv ) { tvBox ( tv ) ; return tv ; } <nl> - <nl> - void cgBoxPtr ( IRLS & env , const IRInstruction * inst ) { <nl> - auto const src = inst - > src ( 0 ) ; <nl> - auto const srcLoc = irlower : : srcLoc ( env , inst , 0 ) ; <nl> - auto const dst = dstLoc ( env , inst , 0 ) . reg ( ) ; <nl> - auto & v = vmain ( env ) ; <nl> - <nl> - emitTypeTest ( <nl> - v , env , TBoxedCell , <nl> - memTVTypePtr ( src , srcLoc ) , memTVValPtr ( src , srcLoc ) , v . makeReg ( ) , <nl> - [ & ] ( ConditionCode cc , Vreg sf ) { <nl> - cond ( v , cc , sf , dst , [ & ] ( Vout & / * v * / ) { return srcLoc . reg ( ) ; } , <nl> - [ & ] ( Vout & v ) { <nl> - auto const args = argGroup ( env , inst ) . ssa ( 0 / * addr * / ) ; <nl> - auto const ret = v . makeReg ( ) ; <nl> - auto const target = [ & ] { <nl> - if ( src - > isA ( TPtrToGen ) ) { <nl> - TypedValue * ( * f ) ( TypedValue * ) = tvBoxHelper ; <nl> - return CallSpec : : direct ( f ) ; <nl> - } <nl> - tv_lval ( * f ) ( tv_lval ) = tvBoxHelper ; <nl> - return CallSpec : : direct ( f ) ; <nl> - } ( ) ; <nl> - cgCallHelper ( v , env , target , <nl> - callDest ( ret ) , SyncOptions : : None , args ) ; <nl> - return ret ; <nl> - } ) ; <nl> - } ) ; <nl> - } <nl> - <nl> - void cgUnboxPtr ( IRLS & env , const IRInstruction * inst ) { <nl> - auto const src = inst - > src ( 0 ) ; <nl> - auto const dst = inst - > dst ( ) ; <nl> - auto const wide = wide_tv_val & & dst - > isA ( TLvalToGen ) ; <nl> - assertx ( ( src - > isA ( TPtrToGen ) & & dst - > isA ( TPtrToGen ) ) | | <nl> - ( src - > isA ( TLvalToGen ) & & dst - > isA ( TLvalToGen ) ) ) ; <nl> - <nl> - auto const srcLoc = irlower : : srcLoc ( env , inst , 0 ) ; <nl> - auto const dstLoc = irlower : : dstLoc ( env , inst , 0 ) ; <nl> - auto const valIdx = wide ? tv_lval : : val_idx : 0 ; <nl> - auto & v = vmain ( env ) ; <nl> - <nl> - auto const sf = v . makeReg ( ) ; <nl> - auto const type_ptr = memTVTypePtr ( src , srcLoc ) ; <nl> - emitCmpTVType ( v , sf , KindOfRef , type_ptr ) ; <nl> - <nl> - auto const val_ptr = memTVValPtr ( src , srcLoc ) ; <nl> - if ( RefData : : cellOffset ( ) = = 0 ) { <nl> - v < < cloadq { CC_E , sf , srcLoc . reg ( valIdx ) , val_ptr , dstLoc . reg ( valIdx ) } ; <nl> - if ( wide ) { <nl> - static_assert ( TVOFF ( m_data ) = = 0 , " " ) ; <nl> - auto const ref_type = v . makeReg ( ) ; <nl> - v < < lea { dstLoc . reg ( valIdx ) [ TVOFF ( m_type ) ] , ref_type } ; <nl> - v < < cmovq { CC_E , sf , srcLoc . reg ( tv_lval : : type_idx ) , <nl> - ref_type , dstLoc . reg ( tv_lval : : type_idx ) } ; <nl> - } <nl> - return ; <nl> - } <nl> - <nl> - auto const ref_ptr = v . makeReg ( ) ; <nl> - auto const cell_ptr = v . makeReg ( ) ; <nl> - v < < load { val_ptr , ref_ptr } ; <nl> - v < < lea { ref_ptr [ RefData : : cellOffset ( ) ] , cell_ptr } ; <nl> - v < < cmovq { CC_E , sf , srcLoc . reg ( valIdx ) , cell_ptr , dstLoc . reg ( valIdx ) } ; <nl> - if ( wide ) { <nl> - static_assert ( TVOFF ( m_data ) = = 0 , " " ) ; <nl> - auto const ref_type = v . makeReg ( ) ; <nl> - v < < lea { cell_ptr [ TVOFF ( m_type ) ] , ref_type } ; <nl> - v < < cmovq { CC_E , sf , srcLoc . reg ( tv_lval : : type_idx ) , <nl> - ref_type , dstLoc . reg ( tv_lval : : type_idx ) } ; <nl> - } <nl> - } <nl> - <nl> - IMPL_OPCODE_CALL ( Box ) <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> - } } } <nl> mmm a / hphp / runtime / vm / jit / irlower - branch . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - branch . cpp <nl> void cgSelect ( IRLS & env , const IRInstruction * inst ) { <nl> } <nl> <nl> / / First copy the type if the destination needs one . This should only apply to <nl> - / / types < = TGen . <nl> + / / types < = TCell . <nl> if ( dloc . hasReg ( 1 ) ) { <nl> assertx ( trueTy . isKnownDataType ( ) | | tloc . hasReg ( 1 ) ) ; <nl> assertx ( falseTy . isKnownDataType ( ) | | floc . hasReg ( 1 ) ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - call . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - call . cpp <nl> void cgCallBuiltin ( IRLS & env , const IRInstruction * inst ) { <nl> / / req : : ptr types ( String , Array , Object ) need adjusting to point to <nl> / / & ptr - > m_data . <nl> if ( TVOFF ( m_data ) & & ! pi . nativeArg & & isReqPtrRef ( pi . builtinType ) ) { <nl> - assertx ( inst - > src ( srcNum ) - > type ( ) < = TPtrToGen ) ; <nl> + assertx ( inst - > src ( srcNum ) - > type ( ) < = TPtrToCell ) ; <nl> args . addr ( srcLoc ( env , inst , srcNum ) . reg ( ) , TVOFF ( m_data ) ) ; <nl> } else if ( pi . nativeArg & & ! pi . builtinType ) { <nl> / / This condition indicates a MixedTV ( i . e . , TypedValue - by - value ) arg . <nl> void cgCallBuiltin ( IRLS & env , const IRInstruction * inst ) { <nl> return end ( v ) ; <nl> } <nl> <nl> - if ( returnType < = TCell | | returnType < = TBoxedCell ) { <nl> + if ( returnType < = TCell ) { <nl> / / The return type is Variant ; fold KindOfUninit to KindOfNull . <nl> assertx ( isBuiltinByRef ( funcReturnType ) & & ! isReqPtrRef ( funcReturnType ) ) ; <nl> static_assert ( KindOfUninit = = static_cast < DataType > ( 0 ) , <nl> mmm a / hphp / runtime / vm / jit / irlower - internal - inl . h <nl> ppp b / hphp / runtime / vm / jit / irlower - internal - inl . h <nl> inline CallDest callDest ( IRLS & env , const IRInstruction * inst ) { <nl> <nl> auto const loc = dstLoc ( env , inst , 0 ) ; <nl> assertx ( loc . numAllocated ( ) = = 1 | | <nl> - ( inst - > dst ( ) - > isA ( TLvalToGen ) & & loc . numAllocated ( ) = = 2 ) ) ; <nl> + ( inst - > dst ( ) - > isA ( TLvalToCell ) & & loc . numAllocated ( ) = = 2 ) ) ; <nl> <nl> auto const dst = inst - > dst ( ) ; <nl> auto const kind = dst - > isA ( TBool ) ? DestType : : Byte : <nl> inline CallDest callDestTV ( IRLS & env , const IRInstruction * inst ) { <nl> <nl> if ( loc . isFullSIMD ( ) ) { <nl> assertx ( loc . numAllocated ( ) = = 1 ) ; <nl> - return { DestType : : SIMD , TGen , loc . reg ( 0 ) } ; <nl> + return { DestType : : SIMD , TCell , loc . reg ( 0 ) } ; <nl> } <nl> <nl> / / loc . reg ( 1 ) may be InvalidReg , if the type is statically known . This is <nl> / / expected and handled by users of CallDest . <nl> - return { DestType : : TV , TGen , loc . reg ( 0 ) , loc . reg ( 1 ) } ; <nl> + return { DestType : : TV , TCell , loc . reg ( 0 ) , loc . reg ( 1 ) } ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void emitTypeTest ( Vout & v , IRLS & env , Type type , <nl> ) ; <nl> <nl> / / Nothing to check . <nl> - if ( type = = TGen ) return ; <nl> + if ( type = = TCell ) return ; <nl> <nl> / / Profile the type being guarded . We skip TUncounted here because that ' s <nl> / / handled in emitIsTVTypeRefCounted , which has a number of other callers . <nl> void emitTypeTest ( Vout & v , IRLS & env , Type type , <nl> if ( type = = TCell ) return cmp ( KindOfRef , CC_NE ) ; <nl> <nl> always_assert ( type . isKnownDataType ( ) ) ; <nl> - always_assert ( ! ( type < TBoxedInitCell ) ) ; <nl> <nl> auto const dt = type . toDataType ( ) ; <nl> return cmp ( dt , CC_E ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - load - store . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - load - store . cpp <nl> void cgDbgTrashMem ( IRLS & env , const IRInstruction * inst ) { <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - void cgLdRef ( IRLS & env , const IRInstruction * inst ) { <nl> - auto const ptr = srcLoc ( env , inst , 0 ) . reg ( ) ; <nl> - loadTV ( vmain ( env ) , inst - > dst ( ) , dstLoc ( env , inst , 0 ) , <nl> - ptr [ RefData : : cellOffset ( ) ] ) ; <nl> - } <nl> - <nl> - void cgStRef ( IRLS & env , const IRInstruction * inst ) { <nl> - auto const ptr = srcLoc ( env , inst , 0 ) . reg ( ) ; <nl> - auto const valLoc = srcLoc ( env , inst , 1 ) ; <nl> - always_assert ( ! srcLoc ( env , inst , 1 ) . isFullSIMD ( ) ) ; <nl> - <nl> - storeTV ( vmain ( env ) , ptr [ RefData : : cellOffset ( ) ] , valLoc , inst - > src ( 1 ) ) ; <nl> - } <nl> - <nl> - / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - <nl> void cgLdElem ( IRLS & env , const IRInstruction * inst ) { <nl> auto const rbase = srcLoc ( env , inst , 0 ) . reg ( ) ; <nl> auto const ridx = srcLoc ( env , inst , 1 ) . reg ( ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - minstr . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - minstr . cpp <nl> void cgCheckMixedArrayKeys ( IRLS & env , const IRInstruction * inst ) { <nl> namespace { <nl> <nl> void implArraySet ( IRLS & env , const IRInstruction * inst ) { <nl> - auto const setRef = inst - > op ( ) = = ArraySetRef ; <nl> - BUILD_OPTAB2 ( setRef , <nl> - ARRAYSET_REF_HELPER_TABLE , <nl> - ARRAYSET_HELPER_TABLE , <nl> - getKeyType ( inst - > src ( 1 ) ) ) ; <nl> + BUILD_OPTAB ( ARRAYSET_HELPER_TABLE , getKeyType ( inst - > src ( 1 ) ) ) ; <nl> <nl> auto args = argGroup ( env , inst ) . ssa ( 0 ) . ssa ( 1 ) ; <nl> args . typedValue ( 2 ) ; <nl> - if ( setRef ) args . ssa ( 3 ) ; <nl> <nl> auto & v = vmain ( env ) ; <nl> cgCallHelper ( v , env , target , callDest ( env , inst ) , SyncOptions : : Sync , args ) ; <nl> void cgMixedArrayGetK ( IRLS & env , const IRInstruction * inst ) { <nl> } <nl> <nl> void cgArraySet ( IRLS & env , const IRInstruction * i ) { implArraySet ( env , i ) ; } <nl> - void cgArraySetRef ( IRLS & env , const IRInstruction * i ) { implArraySet ( env , i ) ; } <nl> <nl> IMPL_OPCODE_CALL ( SetNewElemArray ) ; <nl> <nl> LvalPtrs implPackedLayoutElemAddr ( IRLS & env , Vloc arrLoc , <nl> } <nl> <nl> void implVecSet ( IRLS & env , const IRInstruction * inst ) { <nl> - bool const setRef = inst - > op ( ) = = VecSetRef ; <nl> - <nl> - BUILD_OPTAB2 ( setRef , <nl> - VECSET_REF_HELPER_TABLE , <nl> - VECSET_HELPER_TABLE , <nl> - RuntimeOption : : EvalArrayProvenance ) ; <nl> + BUILD_OPTAB ( VECSET_HELPER_TABLE , RuntimeOption : : EvalArrayProvenance ) ; <nl> <nl> auto args = argGroup ( env , inst ) . <nl> ssa ( 0 ) . <nl> ssa ( 1 ) . <nl> typedValue ( 2 ) ; <nl> - if ( setRef ) args . ssa ( 3 ) ; <nl> <nl> auto & v = vmain ( env ) ; <nl> cgCallHelper ( v , env , target , callDest ( env , inst ) , SyncOptions : : Sync , args ) ; <nl> void cgElemVecD ( IRLS & env , const IRInstruction * inst ) { <nl> IMPL_OPCODE_CALL ( ElemVecU ) <nl> <nl> void cgVecSet ( IRLS & env , const IRInstruction * i ) { implVecSet ( env , i ) ; } <nl> - void cgVecSetRef ( IRLS & env , const IRInstruction * i ) { implVecSet ( env , i ) ; } <nl> <nl> void cgSetNewElemVec ( IRLS & env , const IRInstruction * inst ) { <nl> auto const target = RuntimeOption : : EvalArrayProvenance <nl> void implDictGet ( IRLS & env , const IRInstruction * inst ) { <nl> } <nl> <nl> void implDictSet ( IRLS & env , const IRInstruction * inst ) { <nl> - bool const setRef = inst - > op ( ) = = DictSetRef ; <nl> - BUILD_OPTAB2 ( setRef , <nl> - DICTSET_REF_HELPER_TABLE , <nl> - DICTSET_HELPER_TABLE , <nl> - getKeyType ( inst - > src ( 1 ) ) , <nl> - RuntimeOption : : EvalArrayProvenance ) ; <nl> + BUILD_OPTAB ( DICTSET_HELPER_TABLE , <nl> + getKeyType ( inst - > src ( 1 ) ) , <nl> + RuntimeOption : : EvalArrayProvenance ) ; <nl> <nl> auto args = argGroup ( env , inst ) . <nl> ssa ( 0 ) . <nl> ssa ( 1 ) . <nl> typedValue ( 2 ) ; <nl> - if ( setRef ) args . ssa ( 3 ) ; <nl> <nl> auto & v = vmain ( env ) ; <nl> cgCallHelper ( v , env , target , callDest ( env , inst ) , SyncOptions : : Sync , args ) ; <nl> void cgDictGetK ( IRLS & env , const IRInstruction * inst ) { <nl> } <nl> <nl> void cgDictSet ( IRLS & env , const IRInstruction * i ) { implDictSet ( env , i ) ; } <nl> - void cgDictSetRef ( IRLS & env , const IRInstruction * i ) { implDictSet ( env , i ) ; } <nl> <nl> IMPL_OPCODE_CALL ( DictAddElemIntKey ) ; <nl> IMPL_OPCODE_CALL ( DictAddElemStrKey ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - refcount . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - refcount . cpp <nl> void ifNonPersistent ( Vout & v , Vout & vtaken , Type ty , Vloc loc , Then then ) { <nl> template < class Then > <nl> void ifRefCountedType ( Vout & v , Vout & vtaken , Type ty , Vloc loc , Then then ) { <nl> if ( ! ty . maybe ( TCounted ) ) return ; <nl> - if ( ty < = TGen & & ty . isKnownDataType ( ) ) { <nl> + if ( ty < = TCell & & ty . isKnownDataType ( ) ) { <nl> if ( isRefcountedType ( ty . toDataType ( ) ) ) then ( v ) ; <nl> return ; <nl> } <nl> auto const sf = v . makeReg ( ) ; <nl> - assertx ( ty < = TGen ) ; <nl> + assertx ( ty < = TCell ) ; <nl> auto const cond = emitIsTVTypeRefCounted ( v , sf , loc . reg ( 1 ) ) ; <nl> unlikelyIfThen ( v , vtaken , cond , sf , then ) ; <nl> } <nl> void cgIncRef ( IRLS & env , const IRInstruction * inst ) { <nl> if ( data . total > 0 ) { <nl> if ( data . percent ( data . refcounted ) < <nl> RuntimeOption : : EvalJitPGOUnlikelyIncRefCountedPercent <nl> - & & ! ( ty < = TGen & & ty . isKnownDataType ( ) ) ) { <nl> + & & ! ( ty < = TCell & & ty . isKnownDataType ( ) ) ) { <nl> unlikelyCounted = true ; <nl> FTRACE ( 3 , " irlower - inc - dec : Emitting cold counted check for { } , { } \ n " , <nl> data , * inst ) ; <nl> void cgDecRefNZ ( IRLS & env , const IRInstruction * inst ) { <nl> if ( data . total > 0 ) { <nl> if ( data . percent ( data . refcounted ) < <nl> RuntimeOption : : EvalJitPGOUnlikelyDecRefCountedPercent <nl> - & & ! ( ty < = TGen & & ty . isKnownDataType ( ) ) ) { <nl> + & & ! ( ty < = TCell & & ty . isKnownDataType ( ) ) ) { <nl> unlikelyCounted = true ; <nl> FTRACE ( 3 , " irlower - inc - dec : Emitting cold counted check for { } , { } \ n " , <nl> data , * inst ) ; <nl> mmm a / hphp / runtime / vm / jit / irlower - type . cpp <nl> ppp b / hphp / runtime / vm / jit / irlower - type . cpp <nl> void cgCheckType ( IRLS & env , const IRInstruction * inst ) { <nl> return ; <nl> } <nl> <nl> - if ( src - > type ( ) < = TBoxedCell & & typeParam < = TBoxedCell ) { <nl> - / / We should never have specific known Boxed types ; those should only be <nl> - / / used for hints and predictions . <nl> - always_assert ( ! ( typeParam < TBoxedInitCell ) ) ; <nl> - doMov ( ) ; <nl> - return ; <nl> - } <nl> - <nl> / * <nl> * See if we ' re just checking the array kind or object class of a value with <nl> * a mostly - known type . <nl> void cgCheckStk ( IRLS & env , const IRInstruction * inst ) { <nl> base + TVOFF ( m_type ) , base + TVOFF ( m_data ) , inst - > taken ( ) ) ; <nl> } <nl> <nl> - void cgCheckRefInner ( IRLS & env , const IRInstruction * inst ) { <nl> - if ( inst - > typeParam ( ) > = TInitCell ) return ; <nl> - auto const base = srcLoc ( env , inst , 0 ) . reg ( ) [ RefData : : cellOffset ( ) ] ; <nl> - <nl> - emitTypeCheck ( vmain ( env ) , env , inst - > typeParam ( ) , <nl> - base + TVOFF ( m_type ) , base + TVOFF ( m_data ) , inst - > taken ( ) ) ; <nl> - } <nl> - <nl> void cgCheckMBase ( IRLS & env , const IRInstruction * inst ) { <nl> cgCheckTypeMem ( env , inst ) ; <nl> } <nl> void implIsType ( IRLS & env , const IRInstruction * inst , bool negate ) { <nl> v < < setcc { negate ? ccNegate ( cc ) : cc , sf , dst } ; <nl> } ; <nl> <nl> - if ( src - > isA ( TPtrToGen ) ) { <nl> + if ( src - > isA ( TPtrToCell ) ) { <nl> auto const base = loc . reg ( ) ; <nl> emitTypeTest ( v , env , inst - > typeParam ( ) , base [ TVOFF ( m_type ) ] , <nl> base [ TVOFF ( m_data ) ] , v . makeReg ( ) , doJcc ) ; <nl> return ; <nl> } <nl> - assertx ( src - > isA ( TGen ) ) ; <nl> + assertx ( src - > isA ( TCell ) ) ; <nl> <nl> auto const data = loc . reg ( 0 ) ; <nl> auto const type = loc . reg ( 1 ) ! = InvalidReg <nl> void cgAssertType ( IRLS & env , const IRInstruction * inst ) { <nl> void cgAssertLoc ( IRLS & , const IRInstruction * ) { } <nl> void cgAssertStk ( IRLS & , const IRInstruction * ) { } <nl> void cgAssertMBase ( IRLS & , const IRInstruction * ) { } <nl> - void cgHintLocInner ( IRLS & , const IRInstruction * ) { } <nl> - void cgHintStkInner ( IRLS & , const IRInstruction * ) { } <nl> - void cgHintMBaseInner ( IRLS & , const IRInstruction * ) { } <nl> <nl> void cgProfileType ( IRLS & env , const IRInstruction * inst ) { <nl> auto const extra = inst - > extra < RDSHandleData > ( ) ; <nl> mmm a / hphp / runtime / vm / jit / load - elim . cpp <nl> ppp b / hphp / runtime / vm / jit / load - elim . cpp <nl> Flags handle_general_effects ( Local & env , <nl> case CheckLoc : <nl> case CheckStk : <nl> case CheckMBase : <nl> - case CheckRefInner : <nl> if ( auto flags = handleCheck ( inst . typeParam ( ) ) ) return * flags ; <nl> break ; <nl> <nl> case CheckInitMem : <nl> - if ( auto flags = handleCheck ( TInitGen ) ) return * flags ; <nl> + if ( auto flags = handleCheck ( TInitCell ) ) return * flags ; <nl> break ; <nl> <nl> case CheckIter : { <nl> Flags handle_general_effects ( Local & env , <nl> break ; <nl> } <nl> <nl> - case UnboxPtr : <nl> - if ( auto const meta = env . global . ainfo . find ( canonicalize ( m . loads ) ) ) { <nl> - if ( ! env . state . avail [ meta - > index ] ) break ; <nl> - auto const tloc = & env . state . tracked [ meta - > index ] ; <nl> - if ( ! tloc - > knownType . maybe ( TBoxedCell ) ) { <nl> - return FReducible { inst . src ( 0 ) , inst . dst ( ) - > type ( ) , meta - > index } ; <nl> - } <nl> - } <nl> - break ; <nl> - <nl> case CheckPackedArrayDataBounds : { <nl> if ( ! ( inst . src ( 0 ) - > type ( ) < = ( TVec | Type : : Array ( ArrayData : : kPackedKind ) ) ) ) { <nl> break ; <nl> void reduce_inst ( Global & env , IRInstruction & inst , const FReducible & flags ) { <nl> case CheckLoc : <nl> case CheckStk : <nl> case CheckMBase : <nl> - case CheckRefInner : <nl> reduce_to ( CheckType , inst . typeParam ( ) ) ; <nl> break ; <nl> <nl> void reduce_inst ( Global & env , IRInstruction & inst , const FReducible & flags ) { <nl> reduce_to ( AssertType , flags . knownType ) ; <nl> break ; <nl> <nl> - case UnboxPtr : <nl> - env . unit . replace ( & inst , AssertType , flags . knownType , resolved ) ; <nl> - break ; <nl> - <nl> default : always_assert ( false ) ; <nl> } <nl> <nl> void optimize_inst ( Global & env , IRInstruction & inst , Flags flags ) { <nl> redundantFlags . knownType . toString ( ) , <nl> resolved - > toString ( ) ) ; <nl> <nl> - if ( resolved - > type ( ) . subtypeOfAny ( TGen , TCls ) ) { <nl> + if ( resolved - > type ( ) < = TCell ) { <nl> env . unit . replace ( & inst , AssertType , redundantFlags . knownType , resolved ) ; <nl> } else { <nl> env . unit . replace ( & inst , Mov , resolved ) ; <nl> mmm a / hphp / runtime / vm / jit / memory - effects . cpp <nl> ppp b / hphp / runtime / vm / jit / memory - effects . cpp <nl> AliasClass pointee ( <nl> jit : : flat_set < const IRInstruction * > * visited_labels <nl> ) { <nl> auto const type = ptr - > type ( ) ; <nl> - always_assert ( type < = TMemToGen ) ; <nl> - auto const maybeRef = type . maybe ( TMemToRefGen ) ; <nl> - auto const typeNR = type - TMemToRefGen ; <nl> + always_assert ( type < = TMemToCell ) ; <nl> auto const canonPtr = canonical ( ptr ) ; <nl> - if ( ! canonPtr - > isA ( TMemToGen ) ) { <nl> + if ( ! canonPtr - > isA ( TMemToCell ) ) { <nl> / / This can happen when ptr is TBottom from a passthrough instruction with <nl> / / a src that isn ' t TBottom . The most common cause of this is something <nl> / / like " t5 : Bottom = CheckType < Str > t2 : Int " . It means ptr isn ' t really a <nl> AliasClass pointee ( <nl> <nl> auto const sinst = canonPtr - > inst ( ) ; <nl> <nl> - if ( sinst - > is ( UnboxPtr ) ) { <nl> - return ARefAny | pointee ( sinst - > src ( 0 ) , visited_labels ) ; <nl> - } <nl> - <nl> if ( sinst - > is ( LdRDSAddr , LdInitRDSAddr ) ) { <nl> return ARds { sinst - > extra < RDSHandleData > ( ) - > handle } ; <nl> } <nl> AliasClass pointee ( <nl> } <nl> <nl> auto specific = [ & ] ( ) - > folly : : Optional < AliasClass > { <nl> - if ( typeNR < = TBottom ) return AEmpty ; <nl> + if ( type < = TBottom ) return AEmpty ; <nl> <nl> - if ( typeNR < = TMemToFrameGen ) { <nl> + if ( type < = TMemToFrameCell ) { <nl> if ( sinst - > is ( LdLocAddr ) ) { <nl> return AliasClass { <nl> AFrame { sinst - > src ( 0 ) , sinst - > extra < LdLocAddr > ( ) - > locId } <nl> AliasClass pointee ( <nl> return AFrameAny ; <nl> } <nl> <nl> - if ( typeNR < = TMemToStkGen ) { <nl> + if ( type < = TMemToStkCell ) { <nl> if ( sinst - > is ( LdStkAddr ) ) { <nl> return AliasClass { <nl> AStack { sinst - > src ( 0 ) , sinst - > extra < LdStkAddr > ( ) - > offset , 1 } <nl> AliasClass pointee ( <nl> return AStackAny ; <nl> } <nl> <nl> - if ( typeNR < = TMemToPropGen ) { <nl> + if ( type < = TMemToPropCell ) { <nl> if ( sinst - > is ( LdPropAddr , LdInitPropAddr ) ) { <nl> return AliasClass { <nl> AProp { <nl> AliasClass pointee ( <nl> return APropAny ; <nl> } <nl> <nl> - if ( typeNR < = TMemToMISGen ) { <nl> + if ( type < = TMemToMISCell ) { <nl> if ( sinst - > is ( LdMIStateAddr ) ) { <nl> return mis_from_offset ( sinst - > src ( 0 ) - > intVal ( ) ) ; <nl> } <nl> AliasClass pointee ( <nl> return AElemAny ; <nl> } ; <nl> <nl> - if ( typeNR < = TMemToElemGen ) { <nl> + if ( type < = TMemToElemCell ) { <nl> if ( sinst - > is ( LdPackedArrayDataElemAddr ) ) return elem ( ) ; <nl> return AElemAny ; <nl> } <nl> <nl> / / The result of ElemArray { , W , U } is either the address of an array element , <nl> / / or & immutable_null_base . <nl> - if ( typeNR < = TMemToMembGen ) { <nl> + if ( type < = TMemToMembCell ) { <nl> if ( sinst - > is ( ElemArrayX , ElemDictX , ElemKeysetX ) ) return elem ( ) ; <nl> <nl> - / / Takes a PtrToGen as its first operand , so we can ' t easily grab an array <nl> - / / base . <nl> + / / Takes a PtrToCell as its first operand , so we can ' t easily grab an <nl> + / / array base . <nl> if ( sinst - > is ( ElemArrayU , ElemVecU , ElemDictU , ElemKeysetU ) ) { <nl> return AElemAny ; <nl> } <nl> AliasClass pointee ( <nl> if ( sinst - > is ( PropX , PropDX , PropQ ) ) { <nl> auto const src = [ & ] { <nl> if ( sinst - > is ( PropDX ) ) { <nl> - assertx ( sinst - > src ( sinst - > numSrcs ( ) - 2 ) - > isA ( TMemToMISGen ) ) ; <nl> + assertx ( sinst - > src ( sinst - > numSrcs ( ) - 2 ) - > isA ( TMemToMISCell ) ) ; <nl> assertx ( <nl> sinst - > src ( sinst - > numSrcs ( ) - 1 ) - > isA ( TMIPropSPtr | TNullptr ) <nl> ) ; <nl> return sinst - > src ( sinst - > numSrcs ( ) - 2 ) ; <nl> } else { <nl> - assertx ( sinst - > srcs ( ) . back ( ) - > isA ( TPtrToMISGen ) ) ; <nl> + assertx ( sinst - > srcs ( ) . back ( ) - > isA ( TPtrToMISCell ) ) ; <nl> return sinst - > srcs ( ) . back ( ) ; <nl> } <nl> } ( ) ; <nl> AliasClass pointee ( <nl> if ( sinst - > is ( ElemX , ElemDX , ElemUX ) ) { <nl> auto const src = [ & ] { <nl> if ( sinst - > is ( ElemDX ) ) { <nl> - assertx ( sinst - > src ( sinst - > numSrcs ( ) - 2 ) - > isA ( TMemToMISGen ) ) ; <nl> + assertx ( sinst - > src ( sinst - > numSrcs ( ) - 2 ) - > isA ( TMemToMISCell ) ) ; <nl> assertx ( <nl> sinst - > src ( sinst - > numSrcs ( ) - 1 ) - > isA ( TMIPropSPtr | TNullptr ) <nl> ) ; <nl> return sinst - > src ( sinst - > numSrcs ( ) - 2 ) ; <nl> } else { <nl> - assertx ( sinst - > srcs ( ) . back ( ) - > isA ( TPtrToMISGen ) ) ; <nl> + assertx ( sinst - > srcs ( ) . back ( ) - > isA ( TPtrToMISCell ) ) ; <nl> return sinst - > srcs ( ) . back ( ) ; <nl> } <nl> } ( ) ; <nl> AliasClass pointee ( <nl> return folly : : none ; <nl> } ( ) ; <nl> <nl> - auto ret = maybeRef ? ARefAny : AEmpty ; <nl> - if ( specific ) return * specific | ret ; <nl> + if ( specific ) return * specific ; <nl> <nl> / * <nl> * None of the above worked , so try to make the smallest union we can based <nl> * on the pointer type . <nl> * / <nl> - if ( typeNR . maybe ( TMemToStkGen ) ) ret = ret | AStackAny ; <nl> - if ( typeNR . maybe ( TMemToFrameGen ) ) ret = ret | AFrameAny ; <nl> - if ( typeNR . maybe ( TMemToPropGen ) ) ret = ret | APropAny ; <nl> - if ( typeNR . maybe ( TMemToElemGen ) ) ret = ret | AElemAny ; <nl> - if ( typeNR . maybe ( TMemToMISGen ) ) ret = ret | AMIStateTV ; <nl> - if ( typeNR . maybe ( TMemToClsInitGen ) ) ret = ret | AHeapAny ; <nl> - if ( typeNR . maybe ( TMemToClsCnsGen ) ) ret = ret | AHeapAny ; <nl> - if ( typeNR . maybe ( TMemToSPropGen ) ) ret = ret | ARdsAny ; <nl> + auto ret = AEmpty ; <nl> + if ( type . maybe ( TMemToStkCell ) ) ret = ret | AStackAny ; <nl> + if ( type . maybe ( TMemToFrameCell ) ) ret = ret | AFrameAny ; <nl> + if ( type . maybe ( TMemToPropCell ) ) ret = ret | APropAny ; <nl> + if ( type . maybe ( TMemToElemCell ) ) ret = ret | AElemAny ; <nl> + if ( type . maybe ( TMemToMISCell ) ) ret = ret | AMIStateTV ; <nl> + if ( type . maybe ( TMemToClsInitCell ) ) ret = ret | AHeapAny ; <nl> + if ( type . maybe ( TMemToClsCnsCell ) ) ret = ret | AHeapAny ; <nl> + if ( type . maybe ( TMemToSPropCell ) ) ret = ret | ARdsAny ; <nl> return ret ; <nl> } <nl> <nl> AliasClass pointee ( <nl> AliasClass all_pointees ( folly : : Range < SSATmp * * > srcs ) { <nl> auto ret = AliasClass { AEmpty } ; <nl> for ( auto const & src : srcs ) { <nl> - if ( src - > isA ( TMemToGen ) ) { <nl> + if ( src - > isA ( TMemToCell ) ) { <nl> ret = ret | pointee ( src ) ; <nl> } <nl> } <nl> return ret ; <nl> } <nl> <nl> - / / Return an AliasClass containing all locations pointed to by any MemToGen <nl> + / / Return an AliasClass containing all locations pointed to by any MemToCell <nl> / / sources to an instruction . <nl> AliasClass all_pointees ( const IRInstruction & inst ) { <nl> return all_pointees ( inst . srcs ( ) ) ; <nl> MemEffects minstr_with_tvref ( const IRInstruction & inst ) { <nl> <nl> auto const srcs = inst . srcs ( ) ; <nl> if ( inst . is ( ElemDX , PropDX ) ) { <nl> - assertx ( inst . src ( inst . numSrcs ( ) - 2 ) - > isA ( TMemToMISGen ) ) ; <nl> + assertx ( inst . src ( inst . numSrcs ( ) - 2 ) - > isA ( TMemToMISCell ) ) ; <nl> assertx ( inst . src ( inst . numSrcs ( ) - 1 ) - > isA ( TMIPropSPtr | TNullptr ) ) ; <nl> loads | = all_pointees ( srcs . subpiece ( 0 , srcs . size ( ) - 2 ) ) ; <nl> <nl> MemEffects minstr_with_tvref ( const IRInstruction & inst ) { <nl> stores | = AMIStatePropS ; <nl> } <nl> } else { <nl> - assertx ( srcs . back ( ) - > isA ( TMemToMISGen ) ) ; <nl> + assertx ( srcs . back ( ) - > isA ( TMemToMISCell ) ) ; <nl> loads | = all_pointees ( srcs . subpiece ( 0 , srcs . size ( ) - 1 ) ) ; <nl> kills = AMIStatePropS ; <nl> } <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> auto const stk = [ & ] ( ) - > AliasClass { <nl> AliasClass ret = AEmpty ; <nl> for ( auto i = uint32_t { 2 } ; i < inst . numSrcs ( ) ; + + i ) { <nl> - if ( inst . src ( i ) - > type ( ) < = TPtrToGen ) { <nl> + if ( inst . src ( i ) - > type ( ) < = TPtrToCell ) { <nl> auto const cls = pointee ( inst . src ( i ) ) ; <nl> if ( cls . maybe ( AStackAny ) ) { <nl> ret = ret | cls ; <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case FinishMemberOp : <nl> return may_load_store_kill ( AEmpty , AEmpty , AMIStateAny ) ; <nl> <nl> - case BoxPtr : <nl> - { <nl> - auto const mem = pointee ( inst . src ( 0 ) ) ; <nl> - return may_load_store ( mem , mem ) ; <nl> - } <nl> - case UnboxPtr : <nl> - return may_load_store ( pointee ( inst . src ( 0 ) ) , AEmpty ) ; <nl> - <nl> case IsNTypeMem : <nl> case IsTypeMem : <nl> case CheckTypeMem : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Object / Ref loads / stores <nl> <nl> - case CheckRefInner : <nl> - return may_load_store ( ARef { inst . src ( 0 ) } , AEmpty ) ; <nl> - case LdRef : <nl> - return PureLoad { ARef { inst . src ( 0 ) } } ; <nl> - case StRef : <nl> - return PureStore { ARef { inst . src ( 0 ) } , inst . src ( 1 ) , inst . src ( 0 ) } ; <nl> - <nl> case InitObjProps : <nl> return may_load_store ( AEmpty , APropAny ) ; <nl> <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> return may_load_store ( AElemAny , AEmpty ) ; <nl> <nl> case ArrayIdx : <nl> - return may_load_store ( AElemAny | ARefAny , AEmpty ) ; <nl> + return may_load_store ( AElemAny , AEmpty ) ; <nl> <nl> case SameArr : <nl> case NSameArr : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case ConvFuncToArr : <nl> case ConvIntToArr : <nl> case ConvIntToStr : <nl> - case Box : / / conditional allocation <nl> return IrrelevantEffects { } ; <nl> <nl> case AllocObj : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case GteInt : <nl> case GtBool : <nl> case GtInt : <nl> - case HintLocInner : <nl> - case HintStkInner : <nl> - case HintMBaseInner : <nl> case Jmp : <nl> case JmpNZero : <nl> case JmpZero : <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> { <nl> auto const src = inst . src ( 0 ) ; <nl> / / It could decref the inner ref . <nl> - auto affected = src - > isA ( TBoxedCell ) ? ARef { src } : <nl> - src - > type ( ) . maybe ( TBoxedCell ) ? ARefAny : AEmpty ; <nl> - if ( src - > type ( ) . maybe ( TKeyset | TBoxedKeyset ) ) { <nl> + auto affected = AEmpty ; <nl> + if ( src - > type ( ) . maybe ( TKeyset ) ) { <nl> / / TKeyset can ' t re - enter , but it will decref any contained <nl> / / strings . Without this , an incref of a string contained in <nl> / / a Keyset could be sunk past the decref of the Keyset . <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> / / Need to add affected to the ` store ' set . See comments about <nl> / / ` GeneralEffects ' in memory - effects . h . <nl> auto const effect = may_load_store ( affected , affected ) ; <nl> - if ( src - > type ( ) . maybe ( TArr | TVec | TDict | TObj | TRes | <nl> - TBoxedArr | TBoxedVec | TBoxedDict | <nl> - TBoxedObj | TBoxedRes ) ) { <nl> + if ( src - > type ( ) . maybe ( TArr | TVec | TDict | TObj | TRes ) ) { <nl> / / Could re - enter to run a destructor . Keysets are exempt because they <nl> / / can only contain strings or integers . <nl> return may_reenter ( inst , effect ) ; <nl> MemEffects memory_effects_impl ( const IRInstruction & inst ) { <nl> case DictAddElemStrKey : / / decrefs value <nl> case ArrayGet : / / kVPackedKind warnings <nl> case ArraySet : / / kVPackedKind warnings <nl> - case ArraySetRef : / / kVPackedKind warnings <nl> case DictGet : <nl> case KeysetGet : <nl> case VecSet : <nl> - case VecSetRef : <nl> case DictSet : <nl> - case DictSetRef : <nl> case ElemArrayX : <nl> case ElemDictX : <nl> case ElemKeysetX : <nl> mmm a / hphp / runtime / vm / jit / memory - effects . h <nl> ppp b / hphp / runtime / vm / jit / memory - effects . h <nl> MemEffects canonicalize ( MemEffects ) ; <nl> <nl> / * <nl> * Return an alias class representing the pointee of the given value , which <nl> - * must be < = TMemToGen . <nl> + * must be < = TMemToCell . <nl> * / <nl> AliasClass pointee ( const SSATmp * ) ; <nl> <nl> mmm a / hphp / runtime / vm / jit / minstr - effects . cpp <nl> ppp b / hphp / runtime / vm / jit / minstr - effects . cpp <nl> bool MInstrEffects : : supported ( const IRInstruction * inst ) { <nl> MInstrEffects : : MInstrEffects ( const Opcode rawOp , const Type origBase ) { <nl> / / Note : MInstrEffects wants to manipulate pointer types in some situations <nl> / / for historical reasons . We ' ll eventually change that . <nl> - bool const is_ptr = origBase < = TLvalToGen ; <nl> + bool const is_ptr = origBase < = TLvalToCell ; <nl> auto const basePtr = is_ptr ? origBase . ptrKind ( ) : Ptr : : Bottom ; <nl> baseType = origBase . derefIfPtr ( ) ; <nl> <nl> baseTypeChanged = baseValChanged = false ; <nl> <nl> - / / Process the inner and outer types separately and then recombine them , <nl> - / / since the minstr operations all operate on the inner cell of boxed bases . <nl> - / / We treat the new inner type as a prediction because it will be verified <nl> - / / the next time we load from the box . <nl> - auto inner = ( baseType & TBoxedCell ) . inner ( ) ; <nl> - auto outer = baseType & TCell ; <nl> - getBaseType ( rawOp , false , outer , baseValChanged ) ; <nl> - getBaseType ( rawOp , true , inner , baseValChanged ) ; <nl> + getBaseType ( rawOp , false , baseType , baseValChanged ) ; <nl> <nl> - baseType = inner . box ( ) | outer ; <nl> baseType = is_ptr ? baseType . lval ( basePtr ) : baseType ; <nl> - <nl> baseTypeChanged = baseType ! = origBase ; <nl> - <nl> - / * Boxed bases may have their inner value changed but the value of the box <nl> - * will never change . * / <nl> - baseValChanged = ! ( origBase < = TBoxedCell ) & & <nl> - ( baseValChanged | | baseTypeChanged ) ; <nl> + baseValChanged = baseValChanged | | baseTypeChanged ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / hphp / runtime / vm / jit / minstr - helpers . h <nl> ppp b / hphp / runtime / vm / jit / minstr - helpers . h <nl> CGETELEM_HELPER_TABLE ( X ) <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - template < KeyType keyType , bool setRef > <nl> + template < KeyType keyType > <nl> auto arraySetImpl ( ArrayData * a , key_type < keyType > key , <nl> Cell value , TypedValue * ref ) { <nl> static_assert ( keyType ! = KeyType : : Any , <nl> auto arraySetImpl ( ArrayData * a , key_type < keyType > key , <nl> assertx ( cellIsPlausible ( value ) ) ; <nl> assertx ( a - > isPHPArray ( ) ) ; <nl> auto const ret = a - > set ( key , value ) ; <nl> - return arrayRefShuffle < setRef , KindOfArray > ( a , ret , ref ) ; <nl> + return arrayRefShuffle < false , KindOfArray > ( a , ret , ref ) ; <nl> } <nl> <nl> # define ARRAYSET_HELPER_TABLE ( m ) \ <nl> auto arraySetImpl ( ArrayData * a , key_type < keyType > key , <nl> <nl> # define X ( nm , keyType ) \ <nl> inline ArrayData * nm ( ArrayData * a , key_type < keyType > key , Cell value ) { \ <nl> - return arraySetImpl < keyType , false > ( a , key , value , nullptr ) ; \ <nl> + return arraySetImpl < keyType > ( a , key , value , nullptr ) ; \ <nl> } <nl> ARRAYSET_HELPER_TABLE ( X ) <nl> # undef X <nl> <nl> - # define ARRAYSET_REF_HELPER_TABLE ( m ) \ <nl> - / * name keyType * / \ <nl> - m ( arraySetSR , KeyType : : Str ) \ <nl> - m ( arraySetIR , KeyType : : Int ) \ <nl> - <nl> - # define X ( nm , keyType ) \ <nl> - inline void nm ( ArrayData * a , key_type < keyType > key , \ <nl> - Cell value , RefData * ref ) { \ <nl> - arraySetImpl < keyType , true > ( a , key , value , ref - > cell ( ) ) ; \ <nl> - } <nl> - ARRAYSET_REF_HELPER_TABLE ( X ) <nl> - # undef X <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - template < bool setRef , bool copyProv > <nl> + template < bool copyProv > <nl> auto vecSetImpl ( ArrayData * a , int64_t key , Cell value , TypedValue * ref ) { <nl> assertx ( cellIsPlausible ( value ) ) ; <nl> assertx ( a - > isVecArray ( ) ) ; <nl> ArrayData * ret = PackedArray : : SetIntVec ( a , key , value ) ; <nl> - return arrayRefShuffle < setRef , KindOfVec > ( a , ret , ref ) ; <nl> + return arrayRefShuffle < false , KindOfVec > ( a , ret , ref ) ; <nl> } <nl> <nl> # define VECSET_HELPER_TABLE ( m ) \ <nl> auto vecSetImpl ( ArrayData * a , int64_t key , Cell value , TypedValue * ref ) { <nl> <nl> # define X ( nm , copyProv ) \ <nl> inline ArrayData * nm ( ArrayData * a , int64_t key , Cell val ) { \ <nl> - return vecSetImpl < false , copyProv > ( a , key , val , nullptr ) ; \ <nl> + return vecSetImpl < copyProv > ( a , key , val , nullptr ) ; \ <nl> } <nl> VECSET_HELPER_TABLE ( X ) <nl> # undef X <nl> <nl> - # define VECSET_REF_HELPER_TABLE ( m ) \ <nl> - / * name copyProv * / \ <nl> - m ( vecSetIRN , false ) \ <nl> - m ( vecSetIRP , true ) <nl> - <nl> - # define X ( nm , copyProv ) \ <nl> - inline void nm ( ArrayData * a , int64_t key , Cell val , RefData * ref ) { \ <nl> - vecSetImpl < true , copyProv > ( a , key , val , ref - > cell ( ) ) ; \ <nl> - } <nl> - VECSET_REF_HELPER_TABLE ( X ) <nl> - # undef X <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> inline ArrayData * dictSetImplPre ( ArrayData * a , int64_t i , Cell val ) { <nl> inline ArrayData * dictSetImplPre ( ArrayData * a , StringData * s , Cell val ) { <nl> return MixedArray : : SetStrDict ( a , s , val ) ; <nl> } <nl> <nl> - template < KeyType keyType , bool setRef , bool copyProv > <nl> + template < KeyType keyType , bool copyProv > <nl> auto <nl> dictSetImpl ( ArrayData * a , key_type < keyType > key , Cell value , TypedValue * ref ) { <nl> assertx ( cellIsPlausible ( value ) ) ; <nl> assertx ( a - > isDict ( ) ) ; <nl> auto ret = dictSetImplPre ( a , key , value ) ; <nl> - return arrayRefShuffle < setRef , KindOfDict > ( a , ret , ref ) ; <nl> + return arrayRefShuffle < false , KindOfDict > ( a , ret , ref ) ; <nl> } <nl> <nl> # define DICTSET_HELPER_TABLE ( m ) \ <nl> dictSetImpl ( ArrayData * a , key_type < keyType > key , Cell value , TypedValue * ref ) { <nl> <nl> # define X ( nm , keyType , copyProv ) \ <nl> inline ArrayData * nm ( ArrayData * a , key_type < keyType > key , Cell val ) { \ <nl> - return dictSetImpl < keyType , false , copyProv > ( a , key , val , nullptr ) ; \ <nl> + return dictSetImpl < keyType , copyProv > ( a , key , val , nullptr ) ; \ <nl> } <nl> DICTSET_HELPER_TABLE ( X ) <nl> # undef X <nl> <nl> - # define DICTSET_REF_HELPER_TABLE ( m ) \ <nl> - / * name keyType copyProv * / \ <nl> - m ( dictSetIRN , KeyType : : Int , false ) \ <nl> - m ( dictSetSRN , KeyType : : Str , false ) \ <nl> - m ( dictSetIRP , KeyType : : Int , true ) \ <nl> - m ( dictSetSRP , KeyType : : Str , true ) <nl> - <nl> - # define X ( nm , keyType , copyProv ) \ <nl> - inline \ <nl> - void nm ( ArrayData * a , key_type < keyType > key , Cell val , RefData * ref ) { \ <nl> - dictSetImpl < keyType , true , copyProv > ( a , key , val , ref - > cell ( ) ) ; \ <nl> - } <nl> - DICTSET_REF_HELPER_TABLE ( X ) <nl> - # undef X <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> template < bool copyProv > <nl> mmm a / hphp / runtime / vm / jit / native - calls . cpp <nl> ppp b / hphp / runtime / vm / jit / native - calls . cpp <nl> static CallMap s_callMap { <nl> { { SSA , 0 } , { SSA , 1 } , { TV , 2 } } } , <nl> <nl> { ArrayAdd , arrayAdd , DSSA , SSync , { { SSA , 0 } , { SSA , 1 } } } , <nl> - { Box , boxValue , DSSA , SNone , { { TV , 0 } } } , <nl> { Clone , & ObjectData : : clone , DSSA , SSync , { { SSA , 0 } } } , <nl> { NewPair , collections : : allocPair , DSSA , SNone , <nl> { { TV , 0 } , { TV , 1 } } } , <nl> mmm a / hphp / runtime / vm / jit / pdce - inlining . cpp <nl> ppp b / hphp / runtime / vm / jit / pdce - inlining . cpp <nl> to use the parent frame pointer ( generally with some additional fixup in any <nl> associated catch traces ) are also accepted . <nl> <nl> All pure memory access and pointer logic can be transformed , in <nl> - particular : LdLoc , StLoc , LdLocAddr , CheckLoc , HintLocInner , and <nl> - AssertLoc . <nl> + particular : LdLoc , StLoc , LdLocAddr , CheckLoc , and AssertLoc . <nl> <nl> Currently only EagerSyncVMRegs , CallBuiltin , and Call can be adjusted <nl> to use the parent frame . <nl> bool canConvertToStack ( IRInstruction & inst ) { <nl> auto const id = inst . marker ( ) . func ( ) - > lookupVarId ( s_86metadata . get ( ) ) ; <nl> return inst . extra < StLoc > ( ) - > locId ! = id ; <nl> } <nl> - return inst . is ( LdLoc , CheckLoc , AssertLoc , HintLocInner , LdLocAddr ) ; <nl> + return inst . is ( LdLoc , CheckLoc , AssertLoc , LdLocAddr ) ; <nl> } <nl> <nl> / * <nl> mmm a / hphp / runtime / vm / jit / refcount - opts . cpp <nl> ppp b / hphp / runtime / vm / jit / refcount - opts . cpp <nl> void observe_all ( Env & env , RCState & state , PreAdder add_node ) { <nl> } <nl> } <nl> <nl> - / * <nl> - * When we call builtin functions , we need to make sure that we don ' t change <nl> - * the return value of VRefParam : : isReferenced on any possibly - KindOfRef <nl> - * arguments . We accomplish this with req nodes at level 2 for all asets that <nl> - * could be boxed before we see builtin calls ( we could do it only to the ones <nl> - * that could be args , but we don ' t bother ) . <nl> - * <nl> - * The reason we have this unusual case only when dealing with builtin calls is <nl> - * that in that situation , we ' re actually tracking references and memory <nl> - * locations associated with the call . This means it doesn ' t fall into the <nl> - * usual category of not changing whether an " unknown pointer " could be the <nl> - * last reference ( as described in the " More about memory " section at the top <nl> - * of this file ) mmmwe need to avoid changing whether a known pointer ( the one <nl> - * in memory for the CallBuiltin arg ) is the last reference . Basically , <nl> - * CallBuiltin observes the reference count ( at level 2 ) for their <nl> - * possibly - boxed args , even though they can ' t decref the pointer through the <nl> - * memory locations for those args . <nl> - * / <nl> - void observe_for_is_referenced ( Env & env , RCState & state , PreAdder add_node ) { <nl> - FTRACE ( 3 , " observe_for_is_referenced \ n " ) ; <nl> - for ( auto asetID = uint32_t { 0 } ; asetID < state . asets . size ( ) ; + + asetID ) { <nl> - if ( env . asets [ asetID ] . widestType . maybe ( TBoxedCell ) ) { <nl> - add_node ( asetID , NReq { 2 } ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> void may_decref ( Env & env , RCState & state , ASetID asetID , PreAdder add_node ) { <nl> auto & aset = state . asets [ asetID ] ; <nl> <nl> void analyze_mem_effects ( Env & env , <nl> if ( inst . is ( CallBuiltin ) ) { <nl> observe_unbalanced_decrefs ( env , state , add_node ) ; <nl> kill_unsupported_refs ( state , add_node ) ; <nl> - observe_for_is_referenced ( env , state , add_node ) ; <nl> } <nl> <nl> / / Locations that are killed don ' t need to be tracked as memory support <nl> mmm a / hphp / runtime / vm / jit / reg - alloc . cpp <nl> ppp b / hphp / runtime / vm / jit / reg - alloc . cpp <nl> bool loadsCell ( const IRInstruction & inst ) { <nl> <nl> switch ( inst . op ( ) ) { <nl> case LdMem : <nl> - return arch_allows & & ( ! wide_tv_val | | inst . src ( 0 ) - > isA ( TPtrToGen ) ) ; <nl> + return arch_allows & & ( ! wide_tv_val | | inst . src ( 0 ) - > isA ( TPtrToCell ) ) ; <nl> <nl> case LdVecElem : <nl> case LdPackedElem : <nl> bool loadsCell ( const IRInstruction & inst ) { <nl> case LdLoc : <nl> case LdContField : <nl> case LdElem : <nl> - case LdRef : <nl> case InitClsCns : <nl> case CGetProp : <nl> case ArrayGet : <nl> mmm a / hphp / runtime / vm / jit / region - guards . cpp <nl> ppp b / hphp / runtime / vm / jit / region - guards . cpp <nl> LocationTypeWeights findLocationTypes ( const BlockDataVec & blockData ) { <nl> * We consider relaxation profitable if there ' s not a single dominating type <nl> * that accounts for RuntimeOption : : EvalJitPGORelaxPercent or more of the time <nl> * during profiling . Besides that , if ` guardCategory ' is DataTypeCountness , we <nl> - * also consider relaxing all the way to Gen , in which case ` guardCategory ' is <nl> + * also consider relaxing all the way to Cell , in which case ` guardCategory ' is <nl> * updated to DataTypeGeneric . <nl> * / <nl> bool relaxIsProfitable ( const jit : : hash_map < Type , int64_t > & typeWeights , <nl> Type guardType , <nl> DataTypeCategory & guardCategory ) { <nl> - assertx ( guardType < = TGen ) ; <nl> + assertx ( guardType < = TCell ) ; <nl> auto relaxedType = relaxType ( guardType , guardCategory ) ; <nl> <nl> int64_t totalWgt = 0 ; / / sum of all the block weights <nl> bool relaxIsProfitable ( const jit : : hash_map < Type , int64_t > & typeWeights , <nl> for ( auto & typeWgt : typeWeights ) { <nl> auto type = typeWgt . first ; <nl> auto weight = typeWgt . second ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> const bool fitsConstraint = guardCategory = = DataTypeSpecialized <nl> ? type . isSpecialized ( ) <nl> : typeFitsConstraint ( type , guardCategory ) ; <nl> bool relaxIsProfitable ( const jit : : hash_map < Type , int64_t > & typeWeights , <nl> if ( relaxType ( type , guardCategory ) = = relaxedType ) relaxWgt + = weight ; <nl> } <nl> <nl> - / / Consider relaxing Countness to Gen , which we do if the sum of the weights <nl> + / / Consider relaxing Countness to Cell , which we do if the sum of the weights <nl> / / of all the blocks that would pass the relaxed guard would be less than a <nl> / / certain threshold . We use different thresholds for counted versus <nl> - / / uncounted types , because incref / decref are much more expensive for Gen than <nl> - / / for uncounted ( where it ' s a no - op ) . For counted types , the difference <nl> + / / uncounted types , because incref / decref are much more expensive for Cell <nl> + / / than for uncounted ( where it ' s a no - op ) . For counted types , the difference <nl> / / between generic and specialized incref / decrefs is much smaller , so we ' re <nl> - / / willing to relax to Gen more often for counted types . <nl> + / / willing to relax to Cell more often for counted types . <nl> bool profitable = false ; <nl> auto newCategory = guardCategory ; <nl> if ( guardCategory = = DataTypeCountness ) { <nl> bool relaxIsProfitable ( const jit : : hash_map < Type , int64_t > & typeWeights , <nl> } <nl> } <nl> <nl> - / / If we didn ' t relax to Gen , consider relaxing to the input guardCategory . <nl> + / / If we didn ' t relax to Cell , consider relaxing to the input guardCategory . <nl> if ( ! profitable ) { <nl> if ( noRelaxWgt * 100 < relaxWgt * RuntimeOption : : EvalJitPGORelaxPercent ) { <nl> profitable = true ; <nl> void relaxGuards ( BlockDataVec & blockData ) { <nl> } <nl> <nl> auto oldType = guard . type ; <nl> - always_assert_flog ( oldType < = TGen , " oldType = { } " , oldType ) ; <nl> + always_assert_flog ( oldType < = TCell , " oldType = { } " , oldType ) ; <nl> guard . type = relaxType ( guard . type , guard . category ) ; <nl> if ( oldType ! = guard . type ) { <nl> bd . relaxed = true ; <nl> void updateRegion ( RegionDesc & region , <nl> / / Actually update the type guards . <nl> newBlock - > clearPreConditions ( ) ; <nl> for ( auto & guard : bd . guards ) { <nl> - if ( guard . type < TGen ) { <nl> + if ( guard . type < TCell ) { <nl> newBlock - > addPreCondition ( guard ) ; <nl> } <nl> } <nl> void optimizeGuards ( RegionDesc & region , bool simple ) { <nl> auto & oldPreConds = block - > typePreConditions ( ) ; <nl> <nl> for ( auto & preCond : oldPreConds ) { <nl> - assertx ( preCond . type < = TGen ) ; <nl> + assertx ( preCond . type < = TCell ) ; <nl> auto category = preCond . category ; <nl> if ( simple & & category > DataTypeGeneric & & category < DataTypeSpecific ) { <nl> category = DataTypeSpecific ; <nl> } <nl> auto newType = relaxType ( preCond . type , category ) ; <nl> <nl> - if ( newType ! = TGen ) { <nl> + if ( newType ! = TCell ) { <nl> newPreConds . push_back ( { preCond . location , newType , preCond . category } ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / region - prune - arcs . cpp <nl> ppp b / hphp / runtime / vm / jit / region - prune - arcs . cpp <nl> namespace { <nl> struct State { <nl> bool initialized { false } ; <nl> std : : vector < Type > locals ; <nl> - Type mbase { TGen } ; <nl> + Type mbase { TCell } ; <nl> } ; <nl> <nl> struct BlockInfo { <nl> std : : string DEBUG_ONLY show ( const State & state ) { <nl> <nl> for ( auto locID = uint32_t { 0 } ; locID < state . locals . size ( ) ; + + locID ) { <nl> auto const ty = state . locals [ locID ] ; <nl> - if ( ty < TGen ) folly : : format ( & ret , " L { } : { } \ n " , locID , ty . toString ( ) ) ; <nl> + if ( ty < TCell ) folly : : format ( & ret , " L { } : { } \ n " , locID , ty . toString ( ) ) ; <nl> } <nl> auto const ty = state . mbase ; <nl> - if ( ty < TGen ) folly : : format ( & ret , " M { { } } : { } \ n " , ty . toString ( ) ) ; <nl> + if ( ty < TCell ) folly : : format ( & ret , " M { { } } : { } \ n " , ty . toString ( ) ) ; <nl> return ret ; <nl> } <nl> <nl> State entry_state ( const RegionDesc & region , std : : vector < Type > * input ) { <nl> <nl> if ( input ) ret . locals = * input ; <nl> auto const func = region . start ( ) . func ( ) ; <nl> - ret . locals . resize ( func - > numLocals ( ) , TGen ) ; <nl> + ret . locals . resize ( func - > numLocals ( ) , TCell ) ; <nl> <nl> return ret ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / region - selection . cpp <nl> ppp b / hphp / runtime / vm / jit / region - selection . cpp <nl> void RegionDesc : : Block : : truncateAfter ( SrcKey final ) { <nl> void RegionDesc : : Block : : addPredicted ( TypedLocation locType ) { <nl> FTRACE ( 2 , " Block : : addPredicted ( { } ) \ n " , show ( locType ) ) ; <nl> assertx ( locType . type ! = TBottom ) ; <nl> - assertx ( locType . type < = TGen ) ; <nl> + assertx ( locType . type < = TCell ) ; <nl> / / type predictions should be added in order of location <nl> assertx ( m_typePredictions . size ( ) = = 0 | | <nl> ( m_typePredictions . back ( ) . location < locType . location ) ) ; <nl> void RegionDesc : : Block : : addPredicted ( TypedLocation locType ) { <nl> void RegionDesc : : Block : : addPreCondition ( const GuardedLocation & locGuard ) { <nl> FTRACE ( 2 , " Block : : addPreCondition ( { } ) \ n " , show ( locGuard ) ) ; <nl> assertx ( locGuard . type ! = TBottom ) ; <nl> - assertx ( locGuard . type < = TGen ) ; <nl> + assertx ( locGuard . type < = TCell ) ; <nl> assertx ( locGuard . type . isSpecialized ( ) | | <nl> typeFitsConstraint ( locGuard . type , locGuard . category ) ) ; <nl> m_typePreConditions . push_back ( locGuard ) ; <nl> mmm a / hphp / runtime / vm / jit / region - selection . h <nl> ppp b / hphp / runtime / vm / jit / region - selection . h <nl> void optimizeProfiledGuards ( RegionDesc & region , const ProfData & profData ) ; <nl> <nl> / * <nl> * Optimize the guards of ` region ' , optionally in ` simple ' mode ( where <nl> - * guards are only relaxed if they can be relaxed all the way to TGen ) . <nl> + * guards are only relaxed if they can be relaxed all the way to TCell ) . <nl> * / <nl> void optimizeGuards ( RegionDesc & region , bool simple ) ; <nl> <nl> mmm a / hphp / runtime / vm / jit / region - tracelet . cpp <nl> ppp b / hphp / runtime / vm / jit / region - tracelet . cpp <nl> bool consumeInput ( Env & env , const InputInfo & input ) { <nl> if ( input . dontGuard ) return true ; <nl> auto const type = irgen : : predictedType ( env . irgs , input . loc ) ; <nl> <nl> - if ( / * env . profiling & & <nl> - * FIXME : T21872803 : <nl> - * This check is only intended for profiling translations . We enabled it <nl> - * for live translations to avoid a bug tracking type dependences for <nl> - * boxed values . * / <nl> - type < = TBoxedCell & & <nl> - ( env . region - > blocks ( ) . size ( ) > 1 | | ! env . region - > entry ( ) - > empty ( ) ) ) { <nl> - / / We don ' t want side exits when profiling , so only allow instructions that <nl> - / / consume refs at the beginning of the region . <nl> - return false ; <nl> - } <nl> - <nl> if ( ! input . dontBreak & & ! type . isKnownDataType ( ) ) { <nl> / / Trying to consume a value without a precise enough type . <nl> FTRACE ( 1 , " selectTracelet : { } tried to consume { } , type { } \ n " , <nl> bool consumeInput ( Env & env , const InputInfo & input ) { <nl> return false ; <nl> } <nl> <nl> - if ( ! ( type < = TBoxedCell ) | | <nl> - input . dontGuardInner | | <nl> - opcodeIgnoresInnerType ( env . inst . op ( ) ) ) { <nl> - return true ; <nl> - } <nl> - <nl> - if ( ! type . inner ( ) . isKnownDataType ( ) ) { <nl> - / / Trying to consume a boxed value without a guess for the inner type . <nl> - FTRACE ( 1 , " selectTracelet : { } tried to consume ref { } \ n " , <nl> - env . inst . toString ( ) , show ( input . loc ) ) ; <nl> - return false ; <nl> - } <nl> - <nl> return true ; <nl> } <nl> <nl> bool isThisSelfOrParent ( Op op ) { <nl> <nl> / * <nl> * For every instruction in trace representing a tracelet guard , call func with <nl> - * its location and type , and whether or not it ' s an inner hint . <nl> + * its location and type . <nl> * / <nl> template < typename F > <nl> void visitGuards ( IRUnit & unit , F func ) { <nl> void visitGuards ( IRUnit & unit , F func ) { <nl> switch ( inst . op ( ) ) { <nl> case EndGuards : <nl> return ; <nl> - case HintLocInner : <nl> case CheckLoc : <nl> func ( & inst , <nl> Location : : Local { inst . extra < LocalId > ( ) - > locId } , <nl> - inst . typeParam ( ) , <nl> - inst . is ( HintLocInner ) ) ; <nl> + inst . typeParam ( ) ) ; <nl> break ; <nl> - case HintStkInner : <nl> case CheckStk : { <nl> auto const irSPRel = inst . extra < IRSPRelOffsetData > ( ) - > offset ; <nl> <nl> void visitGuards ( IRUnit & unit , F func ) { <nl> <nl> func ( & inst , <nl> Location : : Stack { irSPRel . to < FPInvOffset > ( irSPOff ) } , <nl> - inst . typeParam ( ) , <nl> - inst . is ( HintStkInner ) ) ; <nl> + inst . typeParam ( ) ) ; <nl> break ; <nl> } <nl> - case HintMBaseInner : <nl> case CheckMBase : <nl> - func ( & inst , Location : : MBase { } , inst . typeParam ( ) , <nl> - inst . is ( HintMBaseInner ) ) ; <nl> + func ( & inst , Location : : MBase { } , inst . typeParam ( ) ) ; <nl> break ; <nl> default : break ; <nl> } <nl> void recordDependencies ( Env & env ) { <nl> auto & unit = env . irgs . unit ; <nl> auto guardMap = std : : map < Location , Type > { } ; <nl> ITRACE ( 2 , " Visiting guards \ n " ) ; <nl> - auto hintMap = std : : map < Location , Type > { } ; <nl> auto catMap = std : : map < Location , DataTypeCategory > { } ; <nl> const auto & guards = env . irgs . irb - > guards ( ) - > guards ; <nl> - auto predictionMap = std : : map < Location , Type > { } ; <nl> visitGuards ( unit , [ & ] ( const IRInstruction * guard , <nl> const Location & loc , <nl> - Type type , bool hint ) { <nl> + Type type ) { <nl> Trace : : Indent indent ; <nl> ITRACE ( 3 , " { } : { } \ n " , show ( loc ) , type ) ; <nl> - assertx ( type < = TGen ) ; <nl> - auto & whichMap = hint ? hintMap : guardMap ; <nl> + assertx ( type < = TCell ) ; <nl> + auto & whichMap = guardMap ; <nl> auto inret = whichMap . insert ( std : : make_pair ( loc , type ) ) ; <nl> - / / Unconstrained pseudo - main guards will be relaxed to Gen by the guard <nl> - / / relaxation pass . Since we don ' t allow loading TGen locals <nl> - / / in pseudo - main , save the predicted type here . <nl> - if ( guard - > marker ( ) . func ( ) - > isPseudoMain ( ) ) { <nl> - auto ret = predictionMap . insert ( std : : make_pair ( loc , type ) ) ; <nl> - if ( ret . second ) { <nl> - FTRACE ( 1 , " selectTracelet saving prediction for PseudoMain { } \ n " , <nl> - show ( RegionDesc : : TypedLocation { loc , type } ) ) ; <nl> - } else { <nl> - auto & oldTy = ret . first - > second ; <nl> - oldTy & = type ; <nl> - } <nl> - } <nl> if ( inret . second ) { <nl> - if ( ! hint ) { <nl> - catMap [ loc ] = folly : : get_default ( guards , guard ) . category ; <nl> - } <nl> + catMap [ loc ] = folly : : get_default ( guards , guard ) . category ; <nl> return ; <nl> } <nl> auto & oldTy = inret . first - > second ; <nl> oldTy & = type ; <nl> - if ( ! hint ) { <nl> - auto & oldCat = catMap [ loc ] ; <nl> - auto newCat = folly : : get_default ( guards , guard ) . category ; <nl> - oldCat = std : : max ( oldCat , newCat ) ; <nl> - } <nl> + <nl> + auto & oldCat = catMap [ loc ] ; <nl> + auto newCat = folly : : get_default ( guards , guard ) . category ; <nl> + oldCat = std : : max ( oldCat , newCat ) ; <nl> } ) ; <nl> <nl> for ( auto & kv : guardMap ) { <nl> - auto const hint_it = hintMap . find ( kv . first ) ; <nl> - / / If we have a hinted type that ' s better than the guarded type , we want to <nl> - / / keep it around . This can really only when a guard is relaxed away to <nl> - / / Gen because we knew something was a BoxedCell statically , but we may <nl> - / / need to keep information about what inner type we were predicting . <nl> - if ( hint_it ! = end ( hintMap ) & & hint_it - > second < kv . second ) { <nl> - FTRACE ( 1 , " selectTracelet adding prediction { } \ n " , <nl> - show ( RegionDesc : : TypedLocation { hint_it - > first , hint_it - > second } ) ) ; <nl> - predictionMap . insert ( * hint_it ) ; <nl> - } <nl> - if ( kv . second = = TGen ) { <nl> - / / Guard was relaxed to Genmmmdon ' t record it . But if there ' s a hint , we <nl> - / / may have needed that ( recorded already above ) . <nl> + if ( kv . second = = TCell ) { <nl> + / / Guard was relaxed to Cellmmmdon ' t record it . <nl> continue ; <nl> } <nl> auto const preCond = RegionDesc : : GuardedLocation { <nl> void recordDependencies ( Env & env ) { <nl> ITRACE ( 1 , " selectTracelet adding guard { } \ n " , show ( preCond ) ) ; <nl> firstBlock . addPreCondition ( preCond ) ; <nl> } <nl> - <nl> - / / Predictions are already sorted by location , so we can simply compare <nl> - / / the type - prediction vectors for different blocks later . <nl> - for ( auto & pred : predictionMap ) { <nl> - firstBlock . addPredicted ( RegionDesc : : TypedLocation { pred . first , pred . second } ) ; <nl> - } <nl> } <nl> <nl> void truncateLiterals ( Env & env ) { <nl> RegionDescPtr form_region ( Env & env ) { <nl> <nl> for ( auto const & lt : env . ctx . liveTypes ) { <nl> auto t = lt . type ; <nl> - assertx ( t < = TGen ) ; <nl> + assertx ( t < = TCell ) ; <nl> irgen : : checkType ( env . irgs , lt . location , t , env . ctx . sk . offset ( ) , <nl> true / * outerOnly * / ) ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / simplify . cpp <nl> ppp b / hphp / runtime / vm / jit / simplify . cpp <nl> SSATmp * mergeBranchDests ( State & env , const IRInstruction * inst ) { <nl> CheckMixedArrayKeys , <nl> CheckMixedArrayOffset , <nl> CheckDictOffset , <nl> - CheckKeysetOffset , <nl> - CheckRefInner ) ) ; <nl> + CheckKeysetOffset ) ) ; <nl> if ( inst - > next ( ) ! = nullptr & & inst - > next ( ) = = inst - > taken ( ) ) { <nl> return gen ( env , Jmp , inst - > next ( ) ) ; <nl> } <nl> SSATmp * simplifyCeil ( State & env , const IRInstruction * inst ) { <nl> return roundImpl ( env , inst , ceil ) ; <nl> } <nl> <nl> - SSATmp * simplifyUnboxPtr ( State & / * env * / , const IRInstruction * inst ) { <nl> - if ( inst - > src ( 0 ) - > isA ( TMemToCell ) ) { <nl> - return inst - > src ( 0 ) ; <nl> - } <nl> - return nullptr ; <nl> - } <nl> - <nl> - SSATmp * simplifyBoxPtr ( State & / * env * / , const IRInstruction * inst ) { <nl> - if ( inst - > src ( 0 ) - > isA ( TMemToBoxedCell ) ) { <nl> - return inst - > src ( 0 ) ; <nl> - } <nl> - return nullptr ; <nl> - } <nl> - <nl> SSATmp * simplifyCheckInit ( State & env , const IRInstruction * inst ) { <nl> auto const srcType = inst - > src ( 0 ) - > type ( ) ; <nl> - assertx ( ! srcType . maybe ( TMemToGen ) ) ; <nl> + assertx ( ! srcType . maybe ( TMemToCell ) ) ; <nl> assertx ( inst - > taken ( ) ) ; <nl> if ( ! srcType . maybe ( TUninit ) ) return gen ( env , Nop ) ; <nl> return mergeBranchDests ( env , inst ) ; <nl> SSATmp * simplifyCheckInOuts ( State & env , const IRInstruction * inst ) { <nl> return gen ( env , Nop ) ; <nl> } <nl> <nl> - SSATmp * simplifyCheckRefInner ( State & env , const IRInstruction * inst ) { <nl> - / / Ref inner cells are at worst InitCell , so don ' t bother checking for that . <nl> - if ( TInitCell < = inst - > typeParam ( ) ) { <nl> - return gen ( env , Nop ) ; <nl> - } <nl> - return mergeBranchDests ( env , inst ) ; <nl> - } <nl> - <nl> SSATmp * simplifyDefLabel ( State & env , const IRInstruction * inst ) { <nl> if ( inst - > numDsts ( ) = = 0 ) { <nl> return gen ( env , Nop ) ; <nl> SSATmp * simplifyWork ( State & env , const IRInstruction * inst ) { <nl> X ( Lshr ) <nl> X ( AbsDbl ) <nl> X ( AssertNonNull ) <nl> - X ( BoxPtr ) <nl> X ( CallBuiltin ) <nl> X ( Ceil ) <nl> X ( CheckInit ) <nl> SSATmp * simplifyWork ( State & env , const IRInstruction * inst ) { <nl> X ( CheckLoc ) <nl> X ( CheckMBase ) <nl> X ( CheckInOuts ) <nl> - X ( CheckRefInner ) <nl> X ( CheckStk ) <nl> X ( CheckType ) <nl> X ( CheckTypeMem ) <nl> SSATmp * simplifyWork ( State & env , const IRInstruction * inst ) { <nl> X ( LdObjClass ) <nl> X ( LdObjInvoke ) <nl> X ( Mov ) <nl> - X ( UnboxPtr ) <nl> X ( JmpZero ) <nl> X ( JmpNZero ) <nl> X ( Select ) <nl> mmm a / hphp / runtime / vm / jit / ssa - tmp . cpp <nl> ppp b / hphp / runtime / vm / jit / ssa - tmp . cpp <nl> int typeNeededWords ( Type t ) { <nl> if ( t . maybe ( TNullptr ) ) { <nl> return typeNeededWords ( t - TNullptr ) ; <nl> } <nl> - if ( t < = TPtrToGen ) { <nl> + if ( t < = TPtrToCell ) { <nl> / / PtrTo * may be statically unknown but always need just one <nl> / / register . <nl> return 1 ; <nl> } <nl> - if ( t < = TLvalToGen ) { <nl> + if ( t < = TLvalToCell ) { <nl> / / If tv_val < > is ever anything other than 1 or more normal pointers , this <nl> / / will need to change . <nl> static_assert ( sizeof ( tv_lval ) % 8 = = 0 , " " ) ; <nl> int typeNeededWords ( Type t ) { <nl> } <nl> if ( ! t . isUnion ( ) ) { <nl> / / Not a union type and not a special case : 1 register . <nl> - assertx ( IMPLIES ( t < = TGen , t . isKnownDataType ( ) ) ) ; <nl> + assertx ( IMPLIES ( t < = TCell , t . isKnownDataType ( ) ) ) ; <nl> return 1 ; <nl> } <nl> <nl> - assertx ( t < = TGen ) ; <nl> + assertx ( t < = TCell ) ; <nl> <nl> / / XXX ( t4592459 ) : This will return 2 for TNull , even though it only <nl> / / needs 1 register ( one for the type , none for the value ) . This is to work <nl> mmm a / hphp / runtime / vm / jit / translate - region . cpp <nl> ppp b / hphp / runtime / vm / jit / translate - region . cpp <nl> void emitPredictionsAndPreConditions ( irgen : : IRGS & irgs , <nl> for ( auto const & pred : typePredictions ) { <nl> auto type = pred . type ; <nl> auto loc = pred . location ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> irgen : : predictType ( irgs , loc , type ) ; <nl> } <nl> <nl> void emitPredictionsAndPreConditions ( irgen : : IRGS & irgs , <nl> for ( auto const & preCond : typePreConditions ) { <nl> auto type = preCond . type ; <nl> auto loc = preCond . location ; <nl> - assertx ( type < = TGen ) ; <nl> + assertx ( type < = TCell ) ; <nl> irgen : : checkType ( irgs , loc , type , bcOff , checkOuterTypeOnly ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / type - array - elem . cpp <nl> ppp b / hphp / runtime / vm / jit / type - array - elem . cpp <nl> std : : pair < Type , bool > arrElemType ( Type arr , Type idx , const Class * ctx ) { <nl> } <nl> } <nl> <nl> - auto type = ( arr < = TPersistentArr ) ? TUncountedInit : TInitGen ; <nl> + auto type = ( arr < = TPersistentArr ) ? TUncountedInit : TInitCell ; <nl> <nl> auto const arrTy = arr . arrSpec ( ) . type ( ) ; <nl> if ( ! arrTy ) return { type , false } ; <nl> std : : pair < Type , bool > vecFirstLastType ( Type arr , <nl> auto type = [ & ] { <nl> if ( arr < = TUncounted ) return TUncountedInit ; <nl> if ( arr < = TVec ) return TInitCell ; <nl> - return TInitGen ; <nl> + return TInitCell ; <nl> } ( ) ; <nl> <nl> auto const arrTy = arr . arrSpec ( ) . type ( ) ; <nl> std : : pair < Type , bool > dictFirstLastType ( Type arr , bool isFirst , bool isKey ) { <nl> auto const type = [ & ] { <nl> if ( arr < = TUncounted ) return TUncountedInit ; <nl> if ( arr < = TDict ) return TInitCell ; <nl> - return TInitGen ; <nl> + return TInitCell ; <nl> } ( ) ; <nl> return { type , false } ; <nl> } <nl> mmm a / hphp / runtime / vm / jit / type - inl . h <nl> ppp b / hphp / runtime / vm / jit / type - inl . h <nl> constexpr inline Type : : Type ( bits_t bits , Ptr ptr , Mem mem ) <nl> constexpr Type T # # name { Type : : bits , Ptr : : ptr , Mem : : Mem } ; <nl> # define IRTX ( name , x , bits ) \ <nl> constexpr Type T # # name { Type : : bits , Ptr : : x , Mem : : x } ; <nl> - IRT_PHP ( IRT_BOXES_PTRS_LVALS ) <nl> - IRT_PHP_UNIONS ( IRT_BOXES_PTRS_LVALS ) <nl> + IRT_PHP ( IRT_PTRS_LVALS ) <nl> + IRT_PHP_UNIONS ( IRT_PTRS_LVALS ) <nl> IRT_SPECIAL <nl> # undef IRT <nl> # undef IRTP <nl> inline Type : : Type ( ) <nl> , m_extra ( 0 ) <nl> { } <nl> <nl> - inline Type : : Type ( DataType outer , DataType inner ) <nl> - : m_bits ( bitsFromDataType ( outer , inner ) ) <nl> + inline Type : : Type ( DataType outer ) <nl> + : m_bits ( bitsFromDataType ( outer ) ) <nl> , m_ptr ( Ptr : : NotPtr ) <nl> , m_mem ( Mem : : NotMem ) <nl> , m_hasConstVal ( false ) <nl> inline bool Type : : isUnion ( ) const { <nl> } <nl> <nl> inline bool Type : : isKnownDataType ( ) const { <nl> - assertx ( * this < = TGen ) ; <nl> + assertx ( * this < = TCell ) ; <nl> <nl> / / Some unions correspond to single KindOfs . <nl> - return subtypeOfAny ( TStr , TArr , TVec , TDict , <nl> - TKeyset , TBoxedCell ) | | ! isUnion ( ) ; <nl> + return subtypeOfAny ( TStr , TArr , TVec , TDict , TKeyset ) | | ! isUnion ( ) ; <nl> } <nl> <nl> inline bool Type : : needsReg ( ) const { <nl> - return * this < = TGen & & ! isKnownDataType ( ) ; <nl> + return * this < = TCell & & ! isKnownDataType ( ) ; <nl> } <nl> <nl> inline bool Type : : isSimpleType ( ) const { <nl> inline Type Type : : dropConstVal ( ) const { <nl> <nl> / / A constant pointer iterator type will still have a target that ' s a union <nl> / / of possible values for the array it points into . <nl> - assertx ( * this < = TPtrToElemGen | | ! isUnion ( ) ) ; <nl> + assertx ( * this < = TPtrToElemCell | | ! isUnion ( ) ) ; <nl> <nl> if ( * this < = TStaticArr ) return Type : : StaticArray ( arrVal ( ) - > kind ( ) ) ; <nl> if ( * this < = TStaticVec ) return TStaticVec ; <nl> IMPLEMENT_CNS_VAL ( TRecDesc , rec , const RecordDesc * ) <nl> IMPLEMENT_CNS_VAL ( TClsMeth , clsmeth , ClsMethDataRef ) <nl> IMPLEMENT_CNS_VAL ( TTCA , tca , jit : : TCA ) <nl> IMPLEMENT_CNS_VAL ( TRDSHandle , rdsHandle , rds : : Handle ) <nl> - IMPLEMENT_CNS_VAL ( TMemToGen , ptr , const TypedValue * ) <nl> + IMPLEMENT_CNS_VAL ( TMemToCell , ptr , const TypedValue * ) <nl> <nl> # undef IMPLEMENT_CNS_VAL <nl> <nl> inline TypeSpec Type : : spec ( ) const { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Inner types . <nl> <nl> - inline Type Type : : box ( ) const { <nl> - assertx ( * this < = TCell ) ; <nl> - / / Boxing Uninit returns InitNull but that logic doesn ' t belong here . <nl> - assertx ( ! maybe ( TUninit ) | | * this = = TCell ) ; <nl> - return Type ( m_bits < < kBoxShift , ptrKind ( ) , memKind ( ) ) . specialize ( spec ( ) ) ; <nl> - } <nl> - <nl> - inline Type Type : : inner ( ) const { <nl> - assertx ( * this < = TBoxedCell ) ; <nl> - return Type ( m_bits > > kBoxShift , ptrKind ( ) , memKind ( ) , false , m_extra ) ; <nl> - } <nl> - <nl> - inline Type Type : : unbox ( ) const { <nl> - assertx ( * this < = TGen ) ; <nl> - return ( * this & TCell ) | ( * this & TBoxedCell ) . inner ( ) ; <nl> - } <nl> - <nl> inline Type Type : : ptr ( Ptr kind ) const { <nl> return mem ( Mem : : Ptr , kind ) ; <nl> } <nl> inline Type Type : : lval ( Ptr kind ) const { <nl> } <nl> <nl> inline Type Type : : mem ( Mem mem , Ptr ptr ) const { <nl> - assertx ( * this < = TGen ) ; <nl> + assertx ( * this < = TCell ) ; <nl> assertx ( ptr < = Ptr : : Ptr ) ; <nl> assertx ( mem < = Mem : : Mem ) ; <nl> / / Enforce a canonical representation for Bottom . <nl> inline Type Type : : mem ( Mem mem , Ptr ptr ) const { <nl> } <nl> <nl> inline Type Type : : deref ( ) const { <nl> - assertx ( * this < = TMemToGen ) ; <nl> + assertx ( * this < = TMemToCell ) ; <nl> if ( m_bits = = kBottom ) return TBottom ; <nl> auto const extra = isSpecialized ( ) ? m_extra : 0 ; <nl> return Type ( m_bits , Ptr : : NotPtr , Mem : : NotMem , false , extra ) ; <nl> } <nl> <nl> inline Type Type : : derefIfPtr ( ) const { <nl> - assertx ( * this < = ( TGen | TMemToGen ) ) ; <nl> - return * this < = TMemToGen ? deref ( ) : * this ; <nl> - } <nl> - <nl> - inline Type Type : : strip ( ) const { <nl> - return derefIfPtr ( ) . unbox ( ) ; <nl> + assertx ( * this < = ( TCell | TMemToCell ) ) ; <nl> + return * this < = TMemToCell ? deref ( ) : * this ; <nl> } <nl> <nl> inline Ptr Type : : ptrKind ( ) const { <nl> mmm a / hphp / runtime / vm / jit / type . cpp <nl> ppp b / hphp / runtime / vm / jit / type . cpp <nl> constexpr Type : : bits_t Type : : kTop ; <nl> # undef IRTM <nl> # undef IRTX <nl> <nl> - constexpr Type : : bits_t Type : : kAnyArr ; <nl> - constexpr Type : : bits_t Type : : kAnyVec ; <nl> - constexpr Type : : bits_t Type : : kAnyDict ; <nl> - constexpr Type : : bits_t Type : : kAnyKeyset ; <nl> - constexpr Type : : bits_t Type : : kAnyArrLike ; <nl> constexpr Type : : bits_t Type : : kArrSpecBits ; <nl> - constexpr Type : : bits_t Type : : kAnyObj ; <nl> constexpr Type : : bits_t Type : : kClsSpecBits ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> std : : string Type : : constValString ( ) const { <nl> if ( * this < = TRDSHandle ) { <nl> return folly : : format ( " rds : : Handle ( { : # x } ) " , m_rdsHandleVal ) . str ( ) ; <nl> } <nl> - if ( * this < = TPtrToGen ) { <nl> + if ( * this < = TPtrToCell ) { <nl> return folly : : sformat ( " TV : { } " , m_ptrVal ) ; <nl> } <nl> - if ( * this < = TLvalToGen ) { <nl> + if ( * this < = TLvalToCell ) { <nl> return folly : : sformat ( " Lval : { } " , m_ptrVal ) ; <nl> } <nl> - if ( * this < = TMemToGen ) { <nl> + if ( * this < = TMemToCell ) { <nl> return folly : : sformat ( " Mem : { } " , m_ptrVal ) ; <nl> } <nl> <nl> std : : string Type : : toString ( ) const { <nl> ) ; <nl> } <nl> <nl> - if ( * this < = TBoxedCell ) { <nl> - return folly : : to < std : : string > ( " Boxed " , inner ( ) . toString ( ) ) ; <nl> - } <nl> - <nl> if ( m_hasConstVal ) { <nl> if ( * this < = TCls ) { <nl> return folly : : sformat ( " Cls = { } " , m_clsVal - > name ( ) - > data ( ) ) ; <nl> std : : string Type : : toString ( ) const { <nl> <nl> auto t = * this ; <nl> <nl> - if ( t . maybe ( TPtrToGen ) ) { <nl> + if ( t . maybe ( TPtrToCell ) ) { <nl> assertx ( ! t . m_hasConstVal ) ; <nl> auto ret = " PtrTo " + <nl> show ( t . ptrKind ( ) & Ptr : : Ptr ) + <nl> - ( t & TPtrToGen ) . deref ( ) . toString ( ) ; <nl> + ( t & TPtrToCell ) . deref ( ) . toString ( ) ; <nl> <nl> - t - = TPtrToGen ; <nl> + t - = TPtrToCell ; <nl> if ( t ! = TBottom ) ret + = " | " + t . toString ( ) ; <nl> return ret ; <nl> } <nl> <nl> - if ( t . maybe ( TLvalToGen ) ) { <nl> + if ( t . maybe ( TLvalToCell ) ) { <nl> assertx ( ! t . m_hasConstVal ) ; <nl> auto ret = " LvalTo " + <nl> show ( t . ptrKind ( ) & Ptr : : Ptr ) + <nl> - ( t & TLvalToGen ) . deref ( ) . toString ( ) ; <nl> + ( t & TLvalToCell ) . deref ( ) . toString ( ) ; <nl> <nl> - t - = TLvalToGen ; <nl> + t - = TLvalToCell ; <nl> if ( t ! = TBottom ) ret + = " | " + t . toString ( ) ; <nl> return ret ; <nl> } <nl> std : : string Type : : toString ( ) const { <nl> auto const partStr = folly : : to < std : : string > ( base . toString ( ) , exact , name ) ; <nl> <nl> parts . push_back ( partStr ) ; <nl> - t - = TAnyObj ; <nl> + t - = TObj ; <nl> } else if ( auto arrSpec = t . arrSpec ( ) ) { <nl> auto str = Type { <nl> m_bits & kArrSpecBits , <nl> std : : string Type : : toString ( ) const { <nl> str + = folly : : to < std : : string > ( ' : ' , show ( * ty ) ) ; <nl> } <nl> parts . push_back ( str ) ; <nl> - t - = TAnyArr ; <nl> + t - = TArr ; <nl> } else { <nl> not_reached ( ) ; <nl> } <nl> void Type : : serialize ( ProfDataSerializer & ser ) const { <nl> <nl> Type t = * this ; <nl> if ( t . maybe ( TNullptr ) ) t = t - TNullptr ; <nl> - if ( t < = TBoxedCell ) t = inner ( ) ; <nl> <nl> auto const key = m_hasConstVal ? TypeKey : : Const : <nl> t . clsSpec ( ) ? ( t . clsSpec ( ) . exact ( ) ? TypeKey : : ClsExact : TypeKey : : ClsSub ) : <nl> bool Type : : checkValid ( ) const { <nl> m_bits . hexStr ( ) , m_ptrVal , m_hasConstVal , m_extra ) ; <nl> } <nl> <nl> - / / m_ptr and m_mem should be Bottom iff we have no kGen bits . <nl> - assertx ( ( ( m_bits & kGen ) = = kBottom ) = = ( m_ptr = = Ptr : : Bottom ) ) ; <nl> - assertx ( ( ( m_bits & kGen ) = = kBottom ) = = ( m_mem = = Mem : : Bottom ) ) ; <nl> + / / m_ptr and m_mem should be Bottom iff we have no kCell bits . <nl> + assertx ( ( ( m_bits & kCell ) = = kBottom ) = = ( m_ptr = = Ptr : : Bottom ) ) ; <nl> + assertx ( ( ( m_bits & kCell ) = = kBottom ) = = ( m_mem = = Mem : : Bottom ) ) ; <nl> <nl> / / Ptr : : NotPtr and Mem : : NotMem should imply one another . <nl> assertx ( ( m_ptr = = Ptr : : NotPtr ) = = ( m_mem = = Mem : : NotMem ) ) ; <nl> bool Type : : checkValid ( ) const { <nl> return true ; <nl> } <nl> <nl> - Type : : bits_t Type : : bitsFromDataType ( DataType outer , DataType inner ) { <nl> - assertx ( ! isRefType ( inner ) ) ; <nl> - assertx ( inner = = KindOfUninit | | isRefType ( outer ) ) ; <nl> - <nl> + Type : : bits_t Type : : bitsFromDataType ( DataType outer ) { <nl> switch ( outer ) { <nl> case KindOfUninit : return kUninit ; <nl> case KindOfNull : return kInitNull ; <nl> Type : : bits_t Type : : bitsFromDataType ( DataType outer , DataType inner ) { <nl> case KindOfClsMeth : return kClsMeth ; <nl> case KindOfRecord : return kRecord ; <nl> case KindOfRef : <nl> - assertx ( inner ! = KindOfUninit ) ; <nl> - return bitsFromDataType ( inner , KindOfUninit ) < < kBoxShift ; <nl> + always_assert ( false ) ; <nl> } <nl> not_reached ( ) ; <nl> } <nl> <nl> DataType Type : : toDataType ( ) const { <nl> - assertx ( ! maybe ( TMemToGen ) | | m_bits = = kBottom ) ; <nl> + assertx ( ! maybe ( TMemToCell ) | | m_bits = = kBottom ) ; <nl> assertx ( isKnownDataType ( ) ) ; <nl> <nl> / / Order is important here : types must progress from more specific <nl> DataType Type : : toDataType ( ) const { <nl> if ( * this < = TCls ) return KindOfClass ; <nl> if ( * this < = TClsMeth ) return KindOfClsMeth ; <nl> if ( * this < = TRecord ) return KindOfRecord ; <nl> - if ( * this < = TBoxedCell ) return KindOfRef ; <nl> always_assert_flog ( false , <nl> " Bad Type { } in Type : : toDataType ( ) " , * this ) ; <nl> } <nl> Type Type : : operator & ( Type rhs ) const { <nl> / / each set of such " interfering " components , if any component goes to <nl> / / Bottom , we have to Bottom out the other components in the set as well . <nl> <nl> - / / Gen bits depend on both Ptr and Mem . <nl> - if ( ptr = = Ptr : : Bottom | | mem = = Mem : : Bottom ) bits & = ~ kGen ; <nl> + / / Cell bits depend on both Ptr and Mem . <nl> + if ( ptr = = Ptr : : Bottom | | mem = = Mem : : Bottom ) bits & = ~ kCell ; <nl> <nl> / / Arr / Cls bits and specs . <nl> if ( arrSpec = = ArraySpec : : Bottom ) bits & = ~ kArrSpecBits ; <nl> Type Type : : operator & ( Type rhs ) const { <nl> if ( ! supports ( bits , SpecKind : : Array ) ) arrSpec = ArraySpec : : Bottom ; <nl> if ( ! supports ( bits , SpecKind : : Class ) ) clsSpec = ClassSpec : : Bottom ; <nl> <nl> - / / Ptr and Mem also depend on Gen bits . This must come after all possible <nl> + / / Ptr and Mem also depend on Cell bits . This must come after all possible <nl> / / fixups of bits . <nl> - if ( ( bits & kGen ) = = kBottom ) { <nl> + if ( ( bits & kCell ) = = kBottom ) { <nl> ptr = Ptr : : Bottom ; <nl> mem = Mem : : Bottom ; <nl> } else { <nl> Type Type : : operator - ( Type rhs ) const { <nl> auto arrSpec = lhs . arrSpec ( ) - rhs . arrSpec ( ) ; <nl> auto clsSpec = lhs . clsSpec ( ) - rhs . clsSpec ( ) ; <nl> <nl> - auto const have_gen_bits = ( bits & kGen ) ! = kBottom ; <nl> + auto const have_gen_bits = ( bits & kCell ) ! = kBottom ; <nl> <nl> auto const have_ptr = ( ptr & Ptr : : Ptr ) ! = Ptr : : Bottom ; <nl> auto const have_not_ptr = ( ptr & Ptr : : NotPtr ) ! = Ptr : : Bottom ; <nl> Type Type : : operator - ( Type rhs ) const { <nl> auto const have_cls_spec = clsSpec ! = ClassSpec : : Bottom ; <nl> <nl> / / ptr and mem can only interact with clsSpec if lhs . m_bits has at least one <nl> - / / kGen member of kClsSpecBits . <nl> - auto const have_ptr_cls = supports ( lhs . m_bits & kGen , SpecKind : : Class ) ; <nl> + / / kCell member of kClsSpecBits . <nl> + auto const have_ptr_cls = supports ( lhs . m_bits & kCell , SpecKind : : Class ) ; <nl> <nl> / / bits , ptr , and mem <nl> if ( have_any_ptr ) { <nl> - bits | = lhs . m_bits & kGen ; <nl> + bits | = lhs . m_bits & kCell ; <nl> / / The Not { Ptr , Mem } and { Ptr , Mem } components of Ptr and Mem don ' t interfere <nl> / / with one another , so keep them separate . <nl> if ( have_ptr ) mem | = ( lhs . memKind ( ) & Mem : : Mem ) ; <nl> if ( have_not_ptr ) mem | = ( lhs . memKind ( ) & Mem : : NotMem ) ; <nl> } <nl> if ( have_any_mem ) { <nl> - bits | = lhs . m_bits & kGen ; <nl> + bits | = lhs . m_bits & kCell ; <nl> if ( have_mem ) ptr | = ( lhs . ptrKind ( ) & Ptr : : Ptr ) ; <nl> if ( have_not_mem ) ptr | = ( lhs . ptrKind ( ) & Ptr : : NotPtr ) ; <nl> } <nl> Type typeFromTV ( tv_rval tv , const Class * ctx ) { <nl> if ( tvIsArray ( tv ) ) return Type : : Array ( val ( tv ) . parr - > kind ( ) ) ; <nl> <nl> auto outer = type ( tv ) ; <nl> - auto inner = KindOfUninit ; <nl> <nl> if ( outer = = KindOfPersistentString ) outer = KindOfString ; <nl> else if ( outer = = KindOfPersistentVec ) outer = KindOfVec ; <nl> else if ( outer = = KindOfPersistentDict ) outer = KindOfDict ; <nl> else if ( outer = = KindOfPersistentKeyset ) outer = KindOfKeyset ; <nl> <nl> - if ( isRefType ( outer ) ) { <nl> - inner = val ( tv ) . pref - > cell ( ) - > m_type ; <nl> - if ( inner = = KindOfPersistentString ) inner = KindOfString ; <nl> - else if ( inner = = KindOfPersistentArray ) inner = KindOfArray ; <nl> - else if ( inner = = KindOfPersistentVec ) inner = KindOfVec ; <nl> - else if ( inner = = KindOfPersistentDict ) inner = KindOfDict ; <nl> - else if ( inner = = KindOfPersistentKeyset ) inner = KindOfKeyset ; <nl> - } <nl> - return Type ( outer , inner ) ; <nl> + return Type ( outer ) ; <nl> } <nl> <nl> Type typeFromRAT ( RepoAuthType ty , const Class * ctx ) { <nl> Type typeFromPropTC ( const HPHP : : TypeConstraint & tc , <nl> bool isSProp ) { <nl> assertx ( tc . validForProp ( ) ) ; <nl> <nl> - if ( ! tc . isCheckable ( ) | | tc . isSoft ( ) ) return TGen ; <nl> + if ( ! tc . isCheckable ( ) | | tc . isSoft ( ) ) return TCell ; <nl> <nl> using A = AnnotType ; <nl> auto const atToType = [ & ] ( AnnotType at ) { <nl> Type typeFromPropTC ( const HPHP : : TypeConstraint & tc , <nl> / / So if we ' re here and we have AnnotType : : Object , we don ' t know what the <nl> / / type - hint is , so be conservative . <nl> case A : : Object : <nl> - case A : : Mixed : return TGen ; <nl> + case A : : Mixed : return TCell ; <nl> case A : : Resource : return TRes ; <nl> case A : : Dict : return TDict ; <nl> case A : : Vec : return TVec ; <nl> Type typeFromPropTC ( const HPHP : : TypeConstraint & tc , <nl> } <nl> <nl> / / It could be an alias to mixed so we might have refs <nl> - return TGen ; <nl> + return TCell ; <nl> } ( ) ; <nl> if ( tc . isNullable ( ) ) base | = TInitNull ; <nl> return base ; <nl> Type typeFromPropTC ( const HPHP : : TypeConstraint & tc , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - Type ldRefReturn ( Type typeParam ) { <nl> - / / Guarding on specialized types and uncommon unions like { Int | Bool } is <nl> - / / expensive enough that we only want to do it in situations where we ' ve <nl> - / / manually confirmed the benefit . <nl> - typeParam = relaxToGuardable ( typeParam ) ; <nl> - always_assert ( typeParam < = TCell ) ; <nl> - <nl> - / / Refs can never contain Uninit , so this lets us return UncountedInit rather <nl> - / / than Uncounted , and InitCell rather than Cell . <nl> - return typeParam - TUninit ; <nl> - } <nl> - <nl> Type negativeCheckType ( Type srcType , Type typeParam ) { <nl> if ( srcType < = typeParam ) return TBottom ; <nl> if ( ! srcType . maybe ( typeParam ) ) return srcType ; <nl> Type negativeCheckType ( Type srcType , Type typeParam ) { <nl> return tmp ; <nl> } <nl> <nl> - Type boxType ( Type t ) { <nl> - / / If t contains Uninit , replace it with InitNull . <nl> - t = t . maybe ( TUninit ) ? ( t - TUninit ) | TInitNull : t ; <nl> - / / We don ' t try to track when a BoxedStaticStr might be converted to <nl> - / / a BoxedStr , and we never guard on staticness for strings , so <nl> - / / boxing a string needs to forget this detail . Same thing for <nl> - / / arrays . <nl> - if ( t < = TStr ) { <nl> - t = TStr ; <nl> - } else if ( t < = TArr ) { <nl> - t = TArr ; <nl> - } else if ( t < = TVec ) { <nl> - t = TVec ; <nl> - } else if ( t < = TDict ) { <nl> - t = TDict ; <nl> - } else if ( t < = TKeyset ) { <nl> - t = TKeyset ; <nl> - } else if ( t < = TArrLike ) { <nl> - t = TArrLike ; <nl> - } <nl> - / / When boxing an Object , if the inner class does not have AttrNoOverride , <nl> - / / drop the class specialization . <nl> - if ( t < TObj & & t . clsSpec ( ) & & <nl> - ! ( t . clsSpec ( ) . cls ( ) - > attrs ( ) & AttrNoOverride ) ) { <nl> - t = t . unspecialize ( ) ; <nl> - } <nl> - / / Everything else is just a pure type - system boxing operation . <nl> - return t . box ( ) ; <nl> - } <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - static Type relaxCell ( Type t , DataTypeCategory cat ) { <nl> + Type relaxType ( Type t , DataTypeCategory cat ) { <nl> assertx ( t < = TCell ) ; <nl> <nl> switch ( cat ) { <nl> case DataTypeGeneric : <nl> - return TGen ; <nl> + return TCell ; <nl> <nl> case DataTypeCountness : <nl> case DataTypeBoxAndCountness : <nl> static Type relaxCell ( Type t , DataTypeCategory cat ) { <nl> not_reached ( ) ; <nl> } <nl> <nl> - Type relaxType ( Type t , DataTypeCategory cat ) { <nl> - always_assert_flog ( t < = TGen , " t = { } " , t ) ; <nl> - if ( cat = = DataTypeGeneric ) return TGen ; <nl> - auto const relaxed = <nl> - ( t & TCell ) < = TBottom ? TBottom : relaxCell ( t & TCell , cat ) ; <nl> - return t < = TCell ? relaxed : relaxed | TBoxedInitCell ; <nl> - } <nl> - <nl> Type relaxToGuardable ( Type ty ) { <nl> - assertx ( ty < = TGen ) ; <nl> + assertx ( ty < = TCell ) ; <nl> ty = ty . unspecialize ( ) ; <nl> <nl> / / ty is unspecialized and we don ' t support guarding on CountedArr or <nl> Type relaxToGuardable ( Type ty ) { <nl> / / We can guard on StaticStr but not CountedStr . <nl> if ( ty < = TCountedStr ) return TStr ; <nl> <nl> - if ( ty < = TBoxedCell ) return TBoxedCell ; <nl> if ( ty . isKnownDataType ( ) ) return ty ; <nl> if ( ty < = TUncountedInit ) return TUncountedInit ; <nl> if ( ty < = TUncounted ) return TUncounted ; <nl> if ( ty < = TCell ) return TCell ; <nl> - if ( ty < = TGen ) return TGen ; <nl> not_reached ( ) ; <nl> } <nl> <nl> mmm a / hphp / runtime / vm / jit / type . h <nl> ppp b / hphp / runtime / vm / jit / type . h <nl> struct ProfDataDeserializer ; <nl> * We have a pointer kind for each of the major segregated locations in which <nl> * php values can live ( eval stack , frame slots , properties , etc . . . ) . For most <nl> * of the primitive kinds , we have a predefined union of the kind and " inside a <nl> - * Ref " , so PtrToRStkGen is exactly the same as PtrTo { Ref | Stk } Gen . These <nl> + * Ref " , so PtrToRStkCell is exactly the same as PtrTo { Ref | Stk } Cell . These <nl> * classify PtrTo * types into some categories that cannot possibly alias , <nl> * without any smarter analysis needed to prove it . There is also a union for <nl> * the various locations things can point after a fully generic member <nl> struct ProfDataDeserializer ; <nl> * <nl> * At this point , t2 is a pointer into either an object property or a inner <nl> * RefData , which will be a PtrToRPropCell , which means it still can ' t alias , <nl> - * for example , a PtrToStkGen or a PtrToGblGen ( although it could generally <nl> - * alias a PtrToRGblGen because both could be inside the same RefData . ) . Note <nl> - * that PtrToRFooGen is just shorthand for PtrTo { Ref | Foo } Gen . <nl> + * for example , a PtrToStkCell or a PtrToGblCell ( although it could generally <nl> + * alias a PtrToRGblCell because both could be inside the same RefData . ) . Note <nl> + * that PtrToRFooCell is just shorthand for PtrTo { Ref | Foo } Cell . <nl> * <nl> * Memb is a number of different locations that result from the more generic <nl> * types of member operations : Prop , Elem , MIS , MMisc , and Other . MMisc <nl> struct ProfDataDeserializer ; <nl> <nl> enum class Ptr : uint16_t { <nl> / * <nl> - * The Ptr kinds here are kept out of PTR_TYPES to avoid generating names <nl> - * like TPtrToNotPtrGen or TPtrToPtrGen . Note that those types do exist , just <nl> - * with less ridiculous names : TGen and TPtrToGen , respectively . <nl> + * The Ptr kinds here are kept out of PTR_TYPES to avoid generating names like <nl> + * TPtrToNotPtrCell or TPtrToPtrCell . Note that those types do exist , just <nl> + * with less ridiculous names : TCell and TPtrToCell , respectively . <nl> * / <nl> Bottom = 0 , <nl> Top = 0x1fffU , / / Keep this in sync with the number of bits used in <nl> enum class Mem : uint8_t { <nl> / * Bottom : No other components of the type are compatible with Mem : TCls , <nl> * TRDSHandle , etc . . . * / <nl> Bottom = 0 , <nl> - / * NotMem : Normal values like TInt or TGen . * / <nl> + / * NotMem : Normal values like TInt or TCell . * / <nl> NotMem = 1U < < 0 , <nl> - / * Ptr : TypedValue * : TPtrToInt , TPtrToGen , etc . . . * / <nl> + / * Ptr : TypedValue * : TPtrToInt , TPtrToCell , etc . . . * / <nl> Ptr = 1U < < 1 , <nl> - / * Lval : tv_lval : TLvalToInt , TLvalToGen , etc . . . * / <nl> + / * Lval : tv_lval : TLvalToInt , TLvalToCell , etc . . . * / <nl> Lval = 1U < < 2 , <nl> / * Mem : Either Ptr or Lval . No concrete values can have this type because <nl> * there is no way to distinguish between TPtrToFoo and TLvalToFoo at <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> # define IRTM_FROM_PTR ( ptr , ptr_bits , name ) \ <nl> IRTM ( MemTo # # ptr # # name , ptr , k # # name ) <nl> <nl> - # define IRT_BOXES_PTRS_LVALS ( name , bits ) \ <nl> + # define IRT_PTRS_LVALS ( name , bits ) \ <nl> IRT ( name , ( bits ) ) \ <nl> - IRT ( Boxed # # name , ( bits ) < < kBoxShift ) \ <nl> IRTP ( PtrTo # # name , Ptr , k # # name ) \ <nl> - IRTP ( PtrToBoxed # # name , Ptr , kBoxed # # name ) \ <nl> PTR_TYPES ( IRTP_FROM_PTR , PTR_R , name ) \ <nl> - PTR_TYPES ( IRTP_FROM_PTR , PTR_NO_R , Boxed # # name ) \ <nl> IRTL ( LvalTo # # name , Ptr , k # # name ) \ <nl> - IRTL ( LvalToBoxed # # name , Ptr , kBoxed # # name ) \ <nl> PTR_TYPES ( IRTL_FROM_PTR , PTR_R , name ) \ <nl> - PTR_TYPES ( IRTL_FROM_PTR , PTR_NO_R , Boxed # # name ) \ <nl> IRTM ( MemTo # # name , Ptr , k # # name ) \ <nl> - IRTM ( MemToBoxed # # name , Ptr , kBoxed # # name ) \ <nl> PTR_TYPES ( IRTM_FROM_PTR , PTR_R , name ) \ <nl> - PTR_TYPES ( IRTM_FROM_PTR , PTR_NO_R , Boxed # # name ) <nl> + / * * / <nl> <nl> # define IRT_PHP ( c ) \ <nl> c ( Uninit , bits_t : : bit < 0 > ( ) ) \ <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> c ( ClsMeth , bits_t : : bit < 24 > ( ) ) \ <nl> c ( Record , bits_t : : bit < 25 > ( ) ) \ <nl> c ( RecDesc , bits_t : : bit < 26 > ( ) ) \ <nl> - / / Boxed * : 27 - 55 <nl> + / * * / <nl> <nl> / * <nl> * This list should be in non - decreasing order of specificity . <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> / * bits above this are unused * / <nl> <nl> / * <nl> - * Gen , Counted , Init , PtrToGen , etc . . . are here instead of IRT_PHP_UNIONS <nl> - * because boxing them ( e . g . , BoxedGen , PtrToBoxedGen ) would yield nonsense <nl> - * types . <nl> + * Cell , Counted , Init , PtrToCell , etc . . . <nl> * / <nl> # ifdef USE_LOWPTR <nl> # define COUNTED_INIT_UNION \ <nl> - kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord <nl> + kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kRecord <nl> # else <nl> # define COUNTED_INIT_UNION \ <nl> - kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kBoxedCell | kRecord | kClsMeth <nl> + kCountedStr | kCountedArr | kCountedVec | kCountedDict | kCountedKeyset | kObj | kRes | kRecord | kClsMeth <nl> # endif <nl> <nl> # define IRT_SPECIAL \ <nl> / * Bottom and Top use IRTX to specify a custom Ptr kind * / \ <nl> - IRTX ( Bottom , Bottom , kBottom ) \ <nl> - IRTX ( Top , Top , kTop ) \ <nl> - IRTX ( AnyObj , Top , kAnyObj ) \ <nl> - IRTX ( AnyArr , Top , kAnyArr ) \ <nl> - IRTX ( AnyVec , Top , kAnyVec ) \ <nl> - IRTX ( AnyDict , Top , kAnyDict ) \ <nl> - IRTX ( AnyKeyset , Top , kAnyKeyset ) \ <nl> - IRTX ( AnyArrLike , Top , kAnyArrLike ) \ <nl> - IRT ( Counted , COUNTED_INIT_UNION ) \ <nl> - IRTP ( PtrToCounted , Ptr , kCounted ) \ <nl> - IRTL ( LvalToCounted , Ptr , kCounted ) \ <nl> - IRTM ( MemToCounted , Ptr , kCounted ) \ <nl> - IRT ( Gen , kCell | kBoxedCell ) \ <nl> - IRT ( InitGen , kGen & ~ kUninit ) \ <nl> - IRTP ( PtrToGen , Ptr , kGen ) \ <nl> - IRTP ( PtrToInitGen , Ptr , kInitGen ) \ <nl> - IRTL ( LvalToGen , Ptr , kGen ) \ <nl> - IRTL ( LvalToInitGen , Ptr , kInitGen ) \ <nl> - IRTM ( MemToGen , Ptr , kGen ) \ <nl> - IRTM ( MemToInitGen , Ptr , kInitGen ) \ <nl> - PTR_TYPES ( IRTP_FROM_PTR , PTR_R , Gen ) \ <nl> - PTR_TYPES ( IRTP_FROM_PTR , PTR_R , InitGen ) \ <nl> - PTR_TYPES ( IRTL_FROM_PTR , PTR_R , Gen ) \ <nl> - PTR_TYPES ( IRTL_FROM_PTR , PTR_R , InitGen ) \ <nl> - PTR_TYPES ( IRTM_FROM_PTR , PTR_R , Gen ) \ <nl> - PTR_TYPES ( IRTM_FROM_PTR , PTR_R , InitGen ) <nl> + IRTX ( Bottom , Bottom , kBottom ) \ <nl> + IRTX ( Top , Top , kTop ) \ <nl> + IRT ( Counted , COUNTED_INIT_UNION ) \ <nl> + IRTP ( PtrToCounted , Ptr , kCounted ) \ <nl> + IRTL ( LvalToCounted , Ptr , kCounted ) \ <nl> + IRTM ( MemToCounted , Ptr , kCounted ) \ <nl> + / * * / <nl> <nl> / * <nl> * All types that represent a non - union type . <nl> * / <nl> - # define IRT_PRIMITIVE IRT_PHP ( IRT_BOXES_PTRS_LVALS ) IRT_RUNTIME <nl> + # define IRT_PRIMITIVE IRT_PHP ( IRT_PTRS_LVALS ) IRT_RUNTIME <nl> <nl> / * <nl> * All types . <nl> * / <nl> - # define IR_TYPES \ <nl> - IRT_PHP ( IRT_BOXES_PTRS_LVALS ) \ <nl> - IRT_PHP_UNIONS ( IRT_BOXES_PTRS_LVALS ) \ <nl> - IRT_RUNTIME \ <nl> + # define IR_TYPES \ <nl> + IRT_PHP ( IRT_PTRS_LVALS ) \ <nl> + IRT_PHP_UNIONS ( IRT_PTRS_LVALS ) \ <nl> + IRT_RUNTIME \ <nl> IRT_SPECIAL <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> constexpr bool operator > ( Mem a , Mem b ) { <nl> * / <nl> struct Type { <nl> private : <nl> - static constexpr size_t kBoxShift = 27 ; <nl> - static constexpr size_t kRuntime = kBoxShift * 2 ; <nl> + static constexpr size_t kRuntime = 27 ; <nl> static constexpr size_t numRuntime = 11 ; <nl> using bits_t = BitSet < kRuntime + numRuntime > ; <nl> <nl> struct Type { <nl> # undef IRTM <nl> # undef IRTX <nl> <nl> - static constexpr bits_t kAnyArr = kArr | kBoxedArr ; <nl> - static constexpr bits_t kAnyVec = kVec | kBoxedVec ; <nl> - static constexpr bits_t kAnyDict = kDict | kBoxedDict ; <nl> - static constexpr bits_t kAnyKeyset = kKeyset | kBoxedKeyset ; <nl> - static constexpr bits_t kAnyArrLike = kAnyArr | kAnyVec | kAnyDict | <nl> - kAnyKeyset ; <nl> - static constexpr bits_t kArrSpecBits = kAnyArrLike ; <nl> - static constexpr bits_t kAnyObj = kObj | kBoxedObj ; <nl> - static constexpr bits_t kAnyRecord = kRecord | kBoxedRecord ; <nl> - static constexpr bits_t kClsSpecBits = kAnyObj | kCls ; <nl> + static constexpr bits_t kArrSpecBits = kArrLike ; <nl> + static constexpr bits_t kClsSpecBits = kObj | kCls ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Basic methods . <nl> struct Type { <nl> / * <nl> * Construct from a DataType . <nl> * / <nl> - explicit Type ( DataType outer , DataType inner = KindOfUninit ) ; <nl> + explicit Type ( DataType outer ) ; <nl> <nl> / * <nl> * Return true iff there exists a DataType in the range [ KindOfUninit , <nl> * KindOfRef ] that represents a non - strict supertype of this type . <nl> * <nl> - * @ requires : * this < = Gen <nl> + * @ requires : * this < = Cell <nl> * / <nl> bool isKnownDataType ( ) const ; <nl> <nl> struct Type { <nl> * Whether this type can meaningfully specialize along ` kind ' . <nl> * <nl> * For example , a Type only supports SpecKind : : Class if its bits intersect <nl> - * nontrivially with kAnyObj . <nl> + * nontrivially with kObj . <nl> * / <nl> bool supports ( SpecKind kind ) const ; <nl> <nl> struct Type { <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Inner types . [ const ] <nl> <nl> - / * <nl> - * Box or unbox a Type . <nl> - * <nl> - * The box ( ) and inner ( ) methods are inversesmmmthey ( respectively ) take the <nl> - * the { Cell , BoxedCell } bits of the Type and coerce them into the <nl> - * { BoxedCell , Cell } sides of the lattice , replacing whatever was there <nl> - * before ; e . g . , <nl> - * <nl> - * box ( Int | BoxedDbl ) - > BoxedInt <nl> - * inner ( BoxedInt | Dbl ) - > Int <nl> - * <nl> - * Meanwhile , unbox ( ) is like inner ( ) , but rather than replacing the Cell <nl> - * component of the Type , it unions it with the shifted BoxedCell bits , e . g . , <nl> - * <nl> - * unbox ( BoxedInt | Dbl ) - > Int | Dbl <nl> - * <nl> - * @ requires : <nl> - * box : * this < = Cell <nl> - * ! maybe ( Uninit ) | | * this = = Cell <nl> - * inner : * this < = BoxedCell <nl> - * unbox : * this < = Gen <nl> - * / <nl> - Type box ( ) const ; <nl> - Type inner ( ) const ; <nl> - Type unbox ( ) const ; <nl> - <nl> / * <nl> * Get a pointer to , or dereference , a Type . <nl> * <nl> * @ requires : <nl> - * ptr , lval : * this < = Gen & & kind < = Ptr : : Ptr <nl> - * mem : * this < = Gen & & kind < = Ptr : : Ptr & & mem < = Mem : : Mem <nl> - * deref : * this < = MemToGen <nl> - * derefIfPtr : * this < = ( Gen | MemToGen ) <nl> + * ptr , lval : * this < = Cell & & kind < = Ptr : : Ptr <nl> + * mem : * this < = Cell & & kind < = Ptr : : Ptr & & mem < = Mem : : Mem <nl> + * deref : * this < = MemToCell <nl> + * derefIfPtr : * this < = ( Cell | MemToCell ) <nl> * / <nl> Type ptr ( Ptr kind ) const ; <nl> Type lval ( Ptr kind ) const ; <nl> struct Type { <nl> Type deref ( ) const ; <nl> Type derefIfPtr ( ) const ; <nl> <nl> - / * <nl> - * Return a Type stripped of boxing and pointerness . <nl> - * / <nl> - Type strip ( ) const ; <nl> - <nl> / * <nl> * Return the pointer or memory category of a Type . <nl> * / <nl> struct Type { <nl> Type ( Type t , ClassSpec classSpec ) ; <nl> <nl> / * <nl> - * Bit - pack an ` outer ' and an ` inner ' DataType for a Type . <nl> + * Bit - pack a DataType <nl> * / <nl> - static bits_t bitsFromDataType ( DataType outer , DataType inner ) ; <nl> + static bits_t bitsFromDataType ( DataType outer ) ; <nl> <nl> / * <nl> * Check invariants and return false if the type is malformed . <nl> Type typeFromPropTC ( const HPHP : : TypeConstraint & tc , <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - / * <nl> - * Return the boxed version of the input type , taking into account PHP <nl> - * semantics and subtle implementation details . <nl> - * / <nl> - Type boxType ( Type ) ; <nl> - <nl> - / * <nl> - * Return the dest type for a LdRef with the given typeParam . <nl> - * <nl> - * @ requires : typeParam < = TCell <nl> - * / <nl> - Type ldRefReturn ( Type typeParam ) ; <nl> - <nl> / * <nl> * Returns the type that a value may have if it had type ` srcType ' and failed a <nl> * CheckType with ` typeParam ' . Not all typeParams for CheckTypes are precise , <nl> mmm a / hphp / runtime / vm / jit / vasm - util . cpp <nl> ppp b / hphp / runtime / vm / jit / vasm - util . cpp <nl> Vloc make_const ( Vunit & unit , Type type ) { <nl> assertx ( type . hasConstVal ( ) ) ; <nl> if ( type < = TBool ) return Vloc { unit . makeConst ( type . boolVal ( ) ) } ; <nl> if ( type < = TDbl ) return Vloc { unit . makeConst ( type . dblVal ( ) ) } ; <nl> - if ( wide_tv_val & & type < = TLvalToGen ) { <nl> + if ( wide_tv_val & & type < = TLvalToCell ) { <nl> auto const rval = tv_rval { type . ptrVal ( ) } ; <nl> auto const typeReg = unit . makeConst ( & rval . type ( ) ) ; <nl> auto const valReg = unit . makeConst ( & rval . val ( ) ) ; <nl>
Remove Gen types from JIT
facebook/hhvm
6a9a71625a497685ae0e38d747ca8ad15d0feae2
2019-10-25T22:45:05Z
mmm a / src / runtime . cc <nl> ppp b / src / runtime . cc <nl> static MaybeObject * Allocate ( Isolate * isolate , <nl> bool double_align , <nl> AllocationSpace space ) { <nl> Heap * heap = isolate - > heap ( ) ; <nl> + if ( double_align ) size + = kPointerSize ; <nl> RUNTIME_ASSERT ( IsAligned ( size , kPointerSize ) ) ; <nl> RUNTIME_ASSERT ( size > 0 ) ; <nl> RUNTIME_ASSERT ( size < = heap - > MaxRegularSpaceAllocationSize ( ) ) ; <nl> static MaybeObject * Allocate ( Isolate * isolate , <nl> MemoryChunk * chunk = MemoryChunk : : FromAddress ( allocation - > address ( ) ) ; <nl> ASSERT ( chunk - > owner ( ) - > identity ( ) = = space ) ; <nl> # endif <nl> + if ( double_align ) { <nl> + allocation = heap - > EnsureDoubleAligned ( allocation , size ) ; <nl> + } <nl> heap - > CreateFillerObjectAt ( allocation - > address ( ) , size ) ; <nl> return allocation ; <nl> } <nl>
MIPS : Ensure double aligned allocations through runtime routines .
v8/v8
4e7084ebcf17843a13020b9cd88e3e76f5a2e5c4
2013-11-17T17:23:58Z
mmm a / Marlin / Makefile <nl> ppp b / Marlin / Makefile <nl> applet / % . o : % . c Configuration . h Configuration_adv . h $ ( MAKEFILE ) <nl> $ ( Pecho ) " CC $ @ " <nl> $ P $ ( CC ) - MMD - c $ ( ALL_CFLAGS ) $ < - o $ @ <nl> <nl> - applet / Marlin . o : applet / Marlin . cpp Configuration . h Configuration_adv . h $ ( MAKEFILE ) <nl> + applet / % . o : applet / % . cpp Configuration . h Configuration_adv . h $ ( MAKEFILE ) <nl> $ ( Pecho ) " CXX $ @ " <nl> $ P $ ( CXX ) - MMD - c $ ( ALL_CXXFLAGS ) $ < - o $ @ <nl> <nl>
Generalize .
MarlinFirmware/Marlin
e4a5e333efaeede05b041311d05091a6cb9f5b5a
2012-10-16T01:34:33Z
deleted file mode 100644 <nl> index 733f258fbbb89 . . 0000000000000 <nl> mmm a / tensorflow / lite / micro / tools / make / targets / himax_we1_evb_makefile . inc ~ <nl> ppp / dev / null <nl> <nl> - # Settings for himax WE_1 evb . <nl> - ifeq ( $ ( TARGET ) , himax_we1_evb ) <nl> - <nl> - CC_TOOL = ccac <nl> - AR_TOOL = arac <nl> - CXX_TOOL = ccac <nl> - LD_TOOL : = ccac <nl> - TARGET_ARCH : = arc <nl> - # ARC_TOOLCHAIN : = mwdt <nl> - <nl> - BUILD_ARC_MLI : = false <nl> - ARC_MLI_PRE_COMPILED_TARGET : = himax_arcem9d_r16 <nl> - <nl> - include $ ( MAKEFILE_DIR ) / targets / arc / arc_common . inc <nl> - # download SDK & MLI <nl> - HIMAX_WE1_SDK_NAME : = himax_we1_sdk <nl> - # MLI_LIB_DIR = arc_mli_package <nl> - # MLI_LIB_DIR = arc_mli_package <nl> - # $ ( eval $ ( call add_third_party_download , $ ( EMBARC_MLI_PRE_COMPILED_URL ) , $ ( EMBARC_MLI_PRE_COMPILED_MD5 ) , $ ( MLI_LIB_DIR ) , ) ) <nl> - $ ( eval $ ( call add_third_party_download , $ ( HIMAX_WE1_SDK_URL ) , $ ( HIMAX_WE1_SDK_MD5 ) , $ ( HIMAX_WE1_SDK_NAME ) , ) ) <nl> - <nl> - # export path of toolchain <nl> - # export PATH : = $ ( MAKEFILE_DIR ) / downloads / $ ( HIMAX_WE1_SDK_NAME ) / image_gen_linux_v3 / : $ ( PATH ) <nl> - <nl> - TCF_FILE : = $ ( PWD ) / $ ( MAKEFILE_DIR ) / downloads / $ ( HIMAX_WE1_SDK_NAME ) / arcem9d_wei_r16 . tcf <nl> - LCF_FILE : = $ ( PWD ) / $ ( MAKEFILE_DIR ) / downloads / $ ( HIMAX_WE1_SDK_NAME ) / memory . lcf <nl> - ARCLIB_FILE : = $ ( PWD ) / $ ( MAKEFILE_DIR ) / downloads / $ ( HIMAX_WE1_SDK_NAME ) / libembarc . a <nl> - LIB_HEADER_FILE : = $ ( PWD ) / $ ( MAKEFILE_DIR ) / downloads / $ ( HIMAX_WE1_SDK_NAME ) / hx_drv_tflm . h <nl> - <nl> - <nl> - DEFAULT_HEAPSZ : = 8192 <nl> - DEFAULT_STACKSZ : = 8192 <nl> - <nl> - TCF_FILE_NAME = $ ( notdir $ ( TCF_FILE ) ) <nl> - ARC_TARGET_FILES_DIRS = $ ( dir $ ( TCF_FILE_NAME ) ) <nl> - MAKE_PROJECT_FILES + = $ ( TCF_FILE_NAME ) <nl> - <nl> - LCF_FILE_NAME = $ ( notdir $ ( LCF_FILE ) ) <nl> - ARC_TARGET_FILES_DIRS + = $ ( dir $ ( LCF_FILE ) ) <nl> - MAKE_PROJECT_FILES + = $ ( LCF_FILE_NAME ) <nl> - <nl> - ARCLIB_FILE_NAME = $ ( notdir $ ( ARCLIB_FILE ) ) <nl> - ARC_TARGET_FILES_DIRS + = $ ( dir $ ( ARCLIB_FILE ) ) <nl> - MAKE_PROJECT_FILES + = $ ( ARCLIB_FILE_NAME ) <nl> - <nl> - LIB_HEADER_FILE_NAME = $ ( notdir $ ( LIB_HEADER_FILE ) ) <nl> - ARC_TARGET_FILES_DIRS + = $ ( dir $ ( LIB_HEADER_FILE ) ) <nl> - MAKE_PROJECT_FILES + = $ ( LIB_HEADER_FILE_NAME ) <nl> - <nl> - <nl> - <nl> - # Need a pointer to the TCF and lcf file <nl> - <nl> - PLATFORM_FLAGS = \ <nl> - - DNDEBUG \ <nl> - - g \ <nl> - - DCPU_ARC \ <nl> - - Hnosdata \ <nl> - - DTF_LITE_STATIC_MEMORY \ <nl> - - tcf = $ ( TCF_FILE_NAME ) \ <nl> - - Hnocopyr \ <nl> - - Hpurge \ <nl> - - Hcl \ <nl> - - fslp - vectorize - aggressive \ <nl> - - ffunction - sections \ <nl> - - fdata - sections \ <nl> - - tcf_core_config \ <nl> - <nl> - CXXFLAGS + = - fno - rtti - DSCRATCH_MEM_Z_SIZE = 0x10000 $ ( PLATFORM_FLAGS ) <nl> - CCFLAGS + = $ ( PLATFORM_FLAGS ) <nl> - <nl> - INCLUDES + = \ <nl> - - I $ ( MAKEFILE_DIR ) / downloads / $ ( WEI_SDK_NAME ) \ <nl> - - I $ ( MAKEFILE_DIR ) / downloads / kissfft <nl> - <nl> - GENERATED_PROJECT_INCLUDES + = \ <nl> - - I . \ <nl> - - I . / third_party / kissfft <nl> - <nl> - LDFLAGS + = \ <nl> - - Hheap = 8192 \ <nl> - - tcf = $ ( TCF_FILE_NAME ) \ <nl> - - Hnocopyr \ <nl> - - m \ <nl> - - Hldopt = - Coutput = $ ( TARGET ) . map \ <nl> - $ ( LCF_FILE_NAME ) \ <nl> - - Hldopt = - Bgrouplib $ ( ARCLIB_FILE_NAME ) <nl> - <nl> - CXXFLAGS : = $ ( filter - out - std = c + + 11 , $ ( CXXFLAGS ) ) <nl> - CCFLAGS : = $ ( filter - out - std = c11 , $ ( CCFLAGS ) ) <nl> - MICROLITE_LIBS : = $ ( filter - out - lm , $ ( MICROLITE_LIBS ) ) <nl> - <nl> - endif <nl>
remove temp makefile in target
tensorflow/tensorflow
39e65d52e400f8c343195e2f8ac34f286648a415
2020-06-04T10:19:58Z
mmm a / tensorflow / core / grappler / clusters / virtual_cluster . cc <nl> ppp b / tensorflow / core / grappler / clusters / virtual_cluster . cc <nl> Status VirtualCluster : : Run ( const GraphDef & graph , <nl> if ( metadata ) { <nl> metadata - > clear_step_stats ( ) ; <nl> metadata - > clear_cost_graph ( ) ; <nl> + metadata - > clear_partition_graphs ( ) ; <nl> } <nl> <nl> Costs node_costs ; <nl> Status VirtualCluster : : Run ( const GraphDef & graph , <nl> } while ( scheduler . MarkCurrNodeExecuted ( node_costs ) ) ; <nl> <nl> if ( metadata ) { <nl> - scheduler . Summary ( metadata - > mutable_step_stats ( ) ) ; <nl> + scheduler . Summary ( metadata ) ; <nl> } <nl> return Status : : OK ( ) ; <nl> } <nl> mmm a / tensorflow / core / grappler / costs / virtual_scheduler . cc <nl> ppp b / tensorflow / core / grappler / costs / virtual_scheduler . cc <nl> Costs VirtualScheduler : : Summary ( ) const { <nl> return critical_path_costs ; <nl> } <nl> <nl> - Costs VirtualScheduler : : Summary ( StepStats * stepstats ) { <nl> - if ( stepstats ! = nullptr ) { <nl> + Costs VirtualScheduler : : Summary ( RunMetadata * metadata ) { <nl> + if ( metadata ! = nullptr ) { <nl> + StepStats * stepstats = metadata - > mutable_step_stats ( ) ; <nl> for ( const auto & device : device_ ) { <nl> + GraphDef * device_partition_graph = <nl> + metadata - > mutable_partition_graphs ( ) - > Add ( ) ; <nl> DeviceStepStats * device_stepstats = stepstats - > add_dev_stats ( ) ; <nl> device_stepstats - > set_device ( device . first ) ; <nl> for ( const auto & node_def : device . second . nodes_executed ) { <nl> const NodeState & nodestate = node_map_ . at ( node_def ) ; <nl> NodeExecStats * node_stats = device_stepstats - > add_node_stats ( ) ; <nl> - node_stats - > set_node_name ( node_def - > op ( ) ) ; <nl> - node_stats - > set_timeline_label ( node_def - > name ( ) ) ; <nl> + node_stats - > set_timeline_label ( node_def - > op ( ) ) ; <nl> + node_stats - > set_node_name ( node_def - > name ( ) ) ; <nl> node_stats - > set_op_start_rel_micros ( 0 ) ; <nl> node_stats - > set_all_start_micros ( <nl> nodestate . time_scheduled . asMicroSeconds ( ) . count ( ) ) ; <nl> Costs VirtualScheduler : : Summary ( StepStats * stepstats ) { <nl> node_stats - > set_all_end_rel_micros ( <nl> nodestate . time_finished . asMicroSeconds ( ) . count ( ) - <nl> nodestate . time_scheduled . asMicroSeconds ( ) . count ( ) ) ; <nl> + * device_partition_graph - > mutable_node ( ) - > Add ( ) = * node_def ; <nl> } <nl> } <nl> } <nl> mmm a / tensorflow / core / grappler / costs / virtual_scheduler . h <nl> ppp b / tensorflow / core / grappler / costs / virtual_scheduler . h <nl> class VirtualScheduler { <nl> <nl> / / Prints out summary of execution ( timing , memory usage , etc . ) <nl> Costs Summary ( ) const ; <nl> - / / Like the above , but writes detailed stats to stepstats . <nl> - / / If stepstats is nullptr , then just calls and return Summary ( ) . <nl> - Costs Summary ( StepStats * stepstats ) ; <nl> + / / Like the above , but writes detailed stats to RunMetadata . <nl> + / / If metadata is nullptr , then just calls and return Summary ( ) . <nl> + Costs Summary ( RunMetadata * metadata ) ; <nl> <nl> protected : <nl> / / GetDeviceStates and GetNodeStates are currently for testing purpuse only . <nl> mmm a / tensorflow / core / grappler / costs / virtual_scheduler_test . cc <nl> ppp b / tensorflow / core / grappler / costs / virtual_scheduler_test . cc <nl> TEST_F ( VirtualSchedulerTest , SummaryCostStepStatsTest ) { <nl> CreateGrapplerItemWithMatmulChain ( ) ; <nl> InitScheduler ( ) ; <nl> auto ops_executed = RunScheduler ( " " ) ; <nl> - StepStats stepstats ; <nl> - Costs c = scheduler_ - > Summary ( & stepstats ) ; <nl> + RunMetadata metadata ; <nl> + Costs c = scheduler_ - > Summary ( & metadata ) ; <nl> + StepStats stepstats = metadata . step_stats ( ) ; <nl> EXPECT_EQ ( 13000000 , c . execution_time . asMicroSeconds ( ) . count ( ) ) ; <nl> <nl> / / Should only be 1 device ! <nl> TEST_F ( VirtualSchedulerTest , SummaryCostStepStatsTest ) { <nl> std : : map < string , std : : pair < int64 , int64 > > start_end_times ; <nl> for ( const auto & device_step_stats : stepstats . dev_stats ( ) ) { <nl> for ( const auto & stats : device_step_stats . node_stats ( ) ) { <nl> - / / The node name is actually in the timeline_label . <nl> int64 start = stats . all_start_micros ( ) ; <nl> int64 end = start + stats . all_end_rel_micros ( ) ; <nl> - start_end_times [ stats . timeline_label ( ) ] = <nl> - std : : pair < int64 , int64 > ( start , end ) ; <nl> + start_end_times [ stats . node_name ( ) ] = std : : pair < int64 , int64 > ( start , end ) ; <nl> } <nl> } <nl> <nl>
Add item ' s graph to partition_graphs in virtual cluster ' s run method .
tensorflow/tensorflow
749af0fea513be3016981e6601ac7e94eb588bbb
2017-06-23T20:37:11Z
mmm a / CHANGELOG <nl> ppp b / CHANGELOG <nl> <nl> - v2 . 3 . 0 - alpha4 ( XXXX - XX - XX ) <nl> + v2 . 3 . 0 - alpha5 ( XXXX - XX - XX ) <nl> mmmmmmmmmmmmmmmmmm - <nl> <nl> * fixed issue # 1027 : Stack traces are off - by - one <nl> mmm a / VERSION <nl> ppp b / VERSION <nl> @ @ - 1 + 1 @ @ <nl> - 2 . 3 . 0 - alpha4 <nl> + 2 . 3 . 0 - alpha5 <nl> mmm a / build . h <nl> ppp b / build . h <nl> @ @ - 1 + 1 @ @ <nl> - # define TRI_VERSION " 2 . 3 . 0 - alpha4 " <nl> + # define TRI_VERSION " 2 . 3 . 0 - alpha5 " <nl> mmm a / configure <nl> ppp b / configure <nl> <nl> # ! / bin / sh <nl> # Guess values for system - dependent variables and create Makefiles . <nl> - # Generated by GNU Autoconf 2 . 69 for triAGENS ArangoDB 2 . 3 . 0 - alpha4 . <nl> + # Generated by GNU Autoconf 2 . 69 for triAGENS ArangoDB 2 . 3 . 0 - alpha5 . <nl> # <nl> # Report bugs to < info @ triagens . de > . <nl> # <nl> MAKEFLAGS = <nl> # Identity of this package . <nl> PACKAGE_NAME = ' triAGENS ArangoDB ' <nl> PACKAGE_TARNAME = ' arangodb ' <nl> - PACKAGE_VERSION = ' 2 . 3 . 0 - alpha4 ' <nl> - PACKAGE_STRING = ' triAGENS ArangoDB 2 . 3 . 0 - alpha4 ' <nl> + PACKAGE_VERSION = ' 2 . 3 . 0 - alpha5 ' <nl> + PACKAGE_STRING = ' triAGENS ArangoDB 2 . 3 . 0 - alpha5 ' <nl> PACKAGE_BUGREPORT = ' info @ triagens . de ' <nl> PACKAGE_URL = ' http : / / www . arangodb . org ' <nl> <nl> if test " $ ac_init_help " = " long " ; then <nl> # Omit some internal or obsolete options to make the list less imposing . <nl> # This message is too long to be a string in the A / UX 3 . 1 sh . <nl> cat < < _ACEOF <nl> - \ ` configure ' configures triAGENS ArangoDB 2 . 3 . 0 - alpha4 to adapt to many kinds of systems . <nl> + \ ` configure ' configures triAGENS ArangoDB 2 . 3 . 0 - alpha5 to adapt to many kinds of systems . <nl> <nl> Usage : $ 0 [ OPTION ] . . . [ VAR = VALUE ] . . . <nl> <nl> fi <nl> <nl> if test - n " $ ac_init_help " ; then <nl> case $ ac_init_help in <nl> - short | recursive ) echo " Configuration of triAGENS ArangoDB 2 . 3 . 0 - alpha4 : " ; ; <nl> + short | recursive ) echo " Configuration of triAGENS ArangoDB 2 . 3 . 0 - alpha5 : " ; ; <nl> esac <nl> cat < < \ _ACEOF <nl> <nl> fi <nl> test - n " $ ac_init_help " & & exit $ ac_status <nl> if $ ac_init_version ; then <nl> cat < < \ _ACEOF <nl> - triAGENS ArangoDB configure 2 . 3 . 0 - alpha4 <nl> + triAGENS ArangoDB configure 2 . 3 . 0 - alpha5 <nl> generated by GNU Autoconf 2 . 69 <nl> <nl> Copyright ( C ) 2012 Free Software Foundation , Inc . <nl> cat > config . log < < _ACEOF <nl> This file contains any messages produced by compilers while <nl> running configure , to aid debugging if configure makes a mistake . <nl> <nl> - It was created by triAGENS ArangoDB $ as_me 2 . 3 . 0 - alpha4 , which was <nl> + It was created by triAGENS ArangoDB $ as_me 2 . 3 . 0 - alpha5 , which was <nl> generated by GNU Autoconf 2 . 69 . Invocation command line was <nl> <nl> $ $ 0 $ @ <nl> fi <nl> <nl> # Define the identity of the package . <nl> PACKAGE = ' arangodb ' <nl> - VERSION = ' 2 . 3 . 0 - alpha4 ' <nl> + VERSION = ' 2 . 3 . 0 - alpha5 ' <nl> <nl> <nl> cat > > confdefs . h < < _ACEOF <nl> cat > > $ CONFIG_STATUS < < \ _ACEOF | | ac_write_fail = 1 <nl> # report actual input values of CONFIG_FILES etc . instead of their <nl> # values after options handling . <nl> ac_log = " <nl> - This file was extended by triAGENS ArangoDB $ as_me 2 . 3 . 0 - alpha4 , which was <nl> + This file was extended by triAGENS ArangoDB $ as_me 2 . 3 . 0 - alpha5 , which was <nl> generated by GNU Autoconf 2 . 69 . Invocation command line was <nl> <nl> CONFIG_FILES = $ CONFIG_FILES <nl> _ACEOF <nl> cat > > $ CONFIG_STATUS < < _ACEOF | | ac_write_fail = 1 <nl> ac_cs_config = " ` $ as_echo " $ ac_configure_args " | sed ' s / ^ / / ; s / [ \ \ " " \ ` \ $ ] / \ \ \ \ & / g ' ` " <nl> ac_cs_version = " \ \ <nl> - triAGENS ArangoDB config . status 2 . 3 . 0 - alpha4 <nl> + triAGENS ArangoDB config . status 2 . 3 . 0 - alpha5 <nl> configured by $ 0 , generated by GNU Autoconf 2 . 69 , <nl> with options \ \ " \ $ ac_cs_config \ \ " <nl> <nl> mmm a / configure . ac <nl> ppp b / configure . ac <nl> dnl = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> dnl - - SECTION - - triAGENS GmbH Build Environment <nl> dnl = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> - AC_INIT ( [ triAGENS ArangoDB ] , [ 2 . 3 . 0 - alpha4 ] , [ info @ triagens . de ] , [ arangodb ] , [ http : / / www . arangodb . org ] ) <nl> + AC_INIT ( [ triAGENS ArangoDB ] , [ 2 . 3 . 0 - alpha5 ] , [ info @ triagens . de ] , [ arangodb ] , [ http : / / www . arangodb . org ] ) <nl> <nl> dnl mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> dnl auxillary directory for install - sh and missing <nl> mmm a / js / apps / system / aardvark / api - docs . json <nl> ppp b / js / apps / system / aardvark / api - docs . json <nl> <nl> { <nl> " swaggerVersion " : " 1 . 1 " , <nl> - " apiVersion " : " 2 . 3 . 0 - alpha4 " , <nl> + " apiVersion " : " 2 . 3 . 0 - alpha5 " , <nl> " apis " : [ <nl> { <nl> " path " : " api - docs / aqlfunction . { format } " , <nl> mmm a / js / apps / system / aardvark / api - docs / batch . json <nl> ppp b / js / apps / system / aardvark / api - docs / batch . json <nl> <nl> " notes " : " Executes a batch request . A batch request can contain any number of other requests that can be sent to ArangoDB in isolation . The benefit of using batch requests is that batching requests requires less client / server roundtrips than when sending isolated requests . < br > < br > All parts of a batch request are executed serially on the server . The server will return the results of all parts in a single response when all parts are finished . < br > < br > Technically , a batch request is a multipart HTTP request , with content - type < em > multipart / form - data < / em > . A batch request consists of an envelope and the individual batch part actions . Batch part actions are \ " regular \ " HTTP requests , including full header and an optional body . Multiple batch parts are separated by a boundary identifier . The boundary identifier is declared in the batch envelope . The MIME content - type for each individual batch part must be < em > application / x - arango - batchpart < / em > . < br > < br > Please note that when constructing the individual batch parts , you must use CRLF ( < em > \ \ r \ \ n < / em > ) as the line terminator as in regular HTTP messages . < br > < br > The response sent by the server will be an < em > HTTP 200 < / em > response , with an optional error summary header < em > x - arango - errors < / em > . This header contains the number of batch part operations that failed with an HTTP error code of at least 400 . This header is only present in the response if the number of errors is greater than zero . < br > < br > The response sent by the server is a multipart response , too . It contains the individual HTTP responses for all batch parts , including the full HTTP result header ( with status code and other potential headers ) and an optional result body . The individual batch parts in the result are seperated using the same boundary value as specified in the request . < br > < br > The order of batch parts in the response will be the same as in the original client request . Client can additionally use the < em > Content - Id < / em > MIME header in a batch part to define an individual id for each batch part . The server will return this id is the batch part responses , too . < br > < br > " , <nl> " summary " : " executes a batch request " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Sending a batch request with five batch parts : < br > < br > < ul class = \ " swagger - list \ " > < li > GET / _api / version < li > DELETE / _api / collection / products < li > POST / _api / collection / products < li > GET / _api / collection / products / figures < li > DELETE / _api / collection / products < / ul > The boundary ( < em > SomeBoundaryValue < / em > ) is passed to the server in the HTTP < em > Content - Type < / em > HTTP header . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - header ' Content - Type : multipart / form - data ; boundary = SomeBoundaryValue ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / batch \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : myId1 \ r \ n \ r \ nGET / _api / version HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : myId2 \ r \ n \ r \ nDELETE / _api / collection / products HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : someId \ r \ n \ r \ nPOST / _api / collection / products HTTP / 1 . 1 \ r \ n \ r \ n { \ " name \ " : \ " products \ " } \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : nextId \ r \ n \ r \ nGET / _api / collection / products / figures HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : otherId \ r \ n \ r \ nDELETE / _api / collection / products HTTP / 1 . 1 \ r \ n - - SomeBoundaryValue - - \ r \ n \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : multipart / form - data ; boundary = SomeBoundaryValue \ nx - arango - errors : 1 \ n \ n \ " - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : myId1 \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 44 \ \ r \ \ n \ \ r \ \ n { \ \ \ " server \ \ \ " : \ \ \ " arango \ \ \ " , \ \ \ " version \ \ \ " : \ \ \ " 2 . 3 . 0 - alpha4 \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : myId2 \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 88 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' products ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : someId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ nlocation : / _db / _system / _api / collection / products \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 138 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278683223 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " waitForSync \ \ \ " : false , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " isSystem \ \ \ " : false , \ \ \ " status \ \ \ " : 3 , \ \ \ " type \ \ \ " : 2 , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : nextId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ nlocation : / _db / _system / _api / collection / products / figures \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 618 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278683223 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isSystem \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " journalSize \ \ \ " : 1048576 , \ \ \ " keyOptions \ \ \ " : { \ \ \ " type \ \ \ " : \ \ \ " traditional \ \ \ " , \ \ \ " allowUserKeys \ \ \ " : true } , \ \ \ " waitForSync \ \ \ " : false , \ \ \ " count \ \ \ " : 0 , \ \ \ " figures \ \ \ " : { \ \ \ " alive \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " dead \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 , \ \ \ " deletion \ \ \ " : 0 } , \ \ \ " datafiles \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " journals \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " compactors \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " shapefiles \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " shapes \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " attributes \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " indexes \ \ \ " : { \ \ \ " count \ \ \ " : 1 , \ \ \ " size \ \ \ " : 2008 } , \ \ \ " lastTick \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " uncollectedLogfileEntries \ \ \ " : 0 } , \ \ \ " status \ \ \ " : 3 , \ \ \ " type \ \ \ " : 2 , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : otherId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 44 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278683223 \ \ \ " , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue - - \ " \ n < / code > < / pre > < br > < br > < br > Sending a batch request , setting the boundary implicitly ( the server will in this case try to find the boundary at the beginning of the request body ) . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / batch \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ n \ r \ nDELETE / _api / collection / notexisting1 HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ n \ r \ nDELETE / _api / collection / notexisting2 HTTP / 1 . 1 \ r \ n - - SomeBoundaryValue - - \ r \ n \ n \ nHTTP / 1 . 1 200 OK \ nx - arango - errors : 2 \ n \ n \ " - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 92 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' notexisting1 ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 92 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' notexisting2 ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue - - \ " \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > Sending a batch request with five batch parts : < br > < br > < ul class = \ " swagger - list \ " > < li > GET / _api / version < li > DELETE / _api / collection / products < li > POST / _api / collection / products < li > GET / _api / collection / products / figures < li > DELETE / _api / collection / products < / ul > The boundary ( < em > SomeBoundaryValue < / em > ) is passed to the server in the HTTP < em > Content - Type < / em > HTTP header . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - header ' Content - Type : multipart / form - data ; boundary = SomeBoundaryValue ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / batch \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : myId1 \ r \ n \ r \ nGET / _api / version HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : myId2 \ r \ n \ r \ nDELETE / _api / collection / products HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : someId \ r \ n \ r \ nPOST / _api / collection / products HTTP / 1 . 1 \ r \ n \ r \ n { \ " name \ " : \ " products \ " } \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : nextId \ r \ n \ r \ nGET / _api / collection / products / figures HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ nContent - Id : otherId \ r \ n \ r \ nDELETE / _api / collection / products HTTP / 1 . 1 \ r \ n - - SomeBoundaryValue - - \ r \ n \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : multipart / form - data ; boundary = SomeBoundaryValue \ nx - arango - errors : 1 \ n \ n \ " - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : myId1 \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 44 \ \ r \ \ n \ \ r \ \ n { \ \ \ " server \ \ \ " : \ \ \ " arango \ \ \ " , \ \ \ " version \ \ \ " : \ \ \ " 2 . 3 . 0 - alpha5 \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : myId2 \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 88 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' products ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : someId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ nlocation : / _db / _system / _api / collection / products \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 138 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278283910 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " waitForSync \ \ \ " : false , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " isSystem \ \ \ " : false , \ \ \ " status \ \ \ " : 3 , \ \ \ " type \ \ \ " : 2 , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : nextId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ nlocation : / _db / _system / _api / collection / products / figures \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 618 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278283910 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isSystem \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " journalSize \ \ \ " : 1048576 , \ \ \ " keyOptions \ \ \ " : { \ \ \ " type \ \ \ " : \ \ \ " traditional \ \ \ " , \ \ \ " allowUserKeys \ \ \ " : true } , \ \ \ " waitForSync \ \ \ " : false , \ \ \ " count \ \ \ " : 0 , \ \ \ " figures \ \ \ " : { \ \ \ " alive \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " dead \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 , \ \ \ " deletion \ \ \ " : 0 } , \ \ \ " datafiles \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " journals \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " compactors \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " shapefiles \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " fileSize \ \ \ " : 0 } , \ \ \ " shapes \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " attributes \ \ \ " : { \ \ \ " count \ \ \ " : 0 , \ \ \ " size \ \ \ " : 0 } , \ \ \ " indexes \ \ \ " : { \ \ \ " count \ \ \ " : 1 , \ \ \ " size \ \ \ " : 2008 } , \ \ \ " lastTick \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " uncollectedLogfileEntries \ \ \ " : 0 } , \ \ \ " status \ \ \ " : 3 , \ \ \ " type \ \ \ " : 2 , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ nContent - Id : otherId \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 200 OK \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 44 \ \ r \ \ n \ \ r \ \ n { \ \ \ " id \ \ \ " : \ \ \ " 1278283910 \ \ \ " , \ \ \ " error \ \ \ " : false , \ \ \ " code \ \ \ " : 200 } \ \ r \ \ n - - SomeBoundaryValue - - \ " \ n < / code > < / pre > < br > < br > < br > Sending a batch request , setting the boundary implicitly ( the server will in this case try to find the boundary at the beginning of the request body ) . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / batch \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ n \ r \ nDELETE / _api / collection / notexisting1 HTTP / 1 . 1 \ r \ n \ r \ n - - SomeBoundaryValue \ r \ nContent - Type : application / x - arango - batchpart \ r \ n \ r \ nDELETE / _api / collection / notexisting2 HTTP / 1 . 1 \ r \ n - - SomeBoundaryValue - - \ r \ n \ n \ nHTTP / 1 . 1 200 OK \ nx - arango - errors : 2 \ n \ n \ " - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 92 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' notexisting1 ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue \ \ r \ \ nContent - Type : application / x - arango - batchpart \ \ r \ \ n \ \ r \ \ nHTTP / 1 . 1 404 Not Found \ \ r \ \ ncontent - type : application / json ; charset = utf - 8 \ \ r \ \ ncontent - length : 92 \ \ r \ \ n \ \ r \ \ n { \ \ \ " error \ \ \ " : true , \ \ \ " code \ \ \ " : 404 , \ \ \ " errorNum \ \ \ " : 1203 , \ \ \ " errorMessage \ \ \ " : \ \ \ " unknown collection ' notexisting2 ' \ \ \ " } \ \ r \ \ n - - SomeBoundaryValue - - \ " \ n < / code > < / pre > < br > " , <nl> " nickname " : " executesABatchRequest " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / collection . json <nl> ppp b / js / apps / system / aardvark / api - docs / collection . json <nl> <nl> " notes " : " Creates an new collection with a given name . The request must contain an object with the following attributes . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > name < / em > : The name of the collection . < li > < em > waitForSync < / em > ( optional , default : false ) : If < em > true < / em > then the data is synchronised to disk before returning from a create or update of a document . < li > < em > doCompact < / em > ( optional , default is < em > true < / em > ) : whether or not the collection will be compacted . < li > < em > journalSize < / em > ( optional , default is a configuration parameter ) : The maximal size of a journal or datafile . < / ul > < em > * Note * < / em > : This also limits the maximal size of a single object . Must be at least 1MB . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > isSystem < / em > ( optional , default is < em > false < / em > ) : If < em > true < / em > , create a system collection . In this case < em > collection - name < / em > should start with an underscore . End users should normally create non - system collections only . API implementors may be required to create system collections in very special occasions , but normally a regular collection will do . < li > < em > isVolatile < / em > ( optional , default is < em > false < / em > ) : If < em > true < / em > then the collection data is kept in - memory only and not made persistent . Unloading the collection will cause the collection data to be discarded . Stopping or re - starting the server will also cause full loss of data in the collection . Setting this option will make the resulting collection be slightly faster than regular collections because ArangoDB does not enforce any synchronisation to disk and does not calculate any CRC checksums for datafiles ( as there are no datafiles ) . This option should threrefore be used for cache - type collections only , and not for data that cannot be re - created otherwise . < li > < em > keyOptions < / em > ( optional ) additional options for key generation . If specified , then < em > keyOptions < / em > should be a JSON array containing the following attributes ( note : some of them are optional ) : - < em > type < / em > : specifies the type of the key generator . The currently available generators are < em > traditional < / em > and < em > autoincrement < / em > . - < em > allowUserKeys < / em > : if set to < em > true < / em > , then it is allowed to supply own key values in the < em > _key < / em > attribute of a document . If set to < em > false < / em > , then the key generator will solely be responsible for generating keys and supplying own key values in the < em > _key < / em > attribute of documents is considered an error . - < em > increment < / em > : increment value for < em > autoincrement < / em > key generator . Not used for other key generator types . - < em > offset < / em > : initial offset value for < em > autoincrement < / em > key generator . Not used for other key generator types . < li > < em > type < / em > ( optional , default is < em > 2 < / em > ) : the type of the collection to create . The following values for < em > type < / em > are valid : - < em > 2 < / em > : document collection - < em > 3 < / em > : edges collection < li > < em > numberOfShards < / em > ( optional , default is < em > 1 < / em > ) : in a cluster , this value determines the number of shards to create for the collection . In a single server setup , this option is meaningless . < li > < em > shardKeys < / em > ( optional , default is < em > [ \ " _key \ " ] < / em > ) : in a cluster , this attribute determines which document attributes are used to determine the target shard for documents . Documents are sent to shards based on the values of their shard key attributes . The values of all shard key attributes in a document are hashed , and the hash value is used to determine the target shard . < / ul > < em > * Note * < / em > : Values of shard key attributes cannot be changed once set . This option is meaningless in a single server setup . " , <nl> " summary " : " Create collection " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionBasics \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionBasics \ n \ n { \ n \ " id \ " : \ " 834349143 \ " , \ n \ " name \ " : \ " testCollectionBasics \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ nshell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionEdges \ " , \ " type \ " : 3 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionEdges \ n \ n { \ n \ " id \ " : \ " 834480215 \ " , \ n \ " name \ " : \ " testCollectionEdges \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionUsers \ " , \ " keyOptions \ " : { \ " type \ " : \ " autoincrement \ " , \ " increment \ " : 5 , \ " allowUserKeys \ " : true } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionUsers \ n \ n { \ n \ " id \ " : \ " 834742359 \ " , \ n \ " name \ " : \ " testCollectionUsers \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionBasics \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionBasics \ n \ n { \ n \ " id \ " : \ " 834343046 \ " , \ n \ " name \ " : \ " testCollectionBasics \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ nshell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionEdges \ " , \ " type \ " : 3 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionEdges \ n \ n { \ n \ " id \ " : \ " 834474118 \ " , \ n \ " name \ " : \ " testCollectionEdges \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n { \ " name \ " : \ " testCollectionUsers \ " , \ " keyOptions \ " : { \ " type \ " : \ " autoincrement \ " , \ " increment \ " : 5 , \ " allowUserKeys \ " : true } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / testCollectionUsers \ n \ n { \ n \ " id \ " : \ " 834736262 \ " , \ n \ " name \ " : \ " testCollectionUsers \ " , \ n \ " waitForSync \ " : false , \ n \ " isVolatile \ " : false , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns an object with an attribute < em > collections < / em > containing a list of all collection descriptions . The same information is also available in the < em > names < / em > as hash map with the collection names as keys . < br > < br > By providing the optional URL parameter < em > excludeSystem < / em > with a value of < em > true < / em > , all system collections will be excluded from the response . < br > < br > " , <nl> " summary " : " reads all collections " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Return information about all collections : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " id \ " : \ " 742533207 \ " , \ n \ " name \ " : \ " Company \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827467863 \ " , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 5187671 \ " , \ n \ " name \ " : \ " _statistics \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742926423 \ " , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 5580887 \ " , \ n \ " name \ " : \ " _statistics15 \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 4663383 \ " , \ n \ " name \ " : \ " _aqlfunctions \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2435159 \ " , \ n \ " name \ " : \ " _modules \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 6432855 \ " , \ n \ " name \ " : \ " _jobs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 4204631 \ " , \ n \ " name \ " : \ " _aal \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 803350615 \ " , \ n \ " name \ " : \ " female \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 832710743 \ " , \ n \ " name \ " : \ " animals \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 831793239 \ " , \ n \ " name \ " : \ " demo \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742402135 \ " , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 827336791 \ " , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 141399 \ " , \ n \ " name \ " : \ " _configuration \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742795351 \ " , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2304087 \ " , \ n \ " name \ " : \ " _graphs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 6301783 \ " , \ n \ " name \ " : \ " _queues \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742271063 \ " , \ n \ " name \ " : \ " Customer \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827205719 \ " , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 646195287 \ " , \ n \ " name \ " : \ " better - example \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2697303 \ " , \ n \ " name \ " : \ " _cluster_kickstarter_plans \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 803612759 \ " , \ n \ " name \ " : \ " relation \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 742664279 \ " , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827598935 \ " , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 403543 \ " , \ n \ " name \ " : \ " _users \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 26159191 \ " , \ n \ " name \ " : \ " sessions \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827074647 \ " , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 4794455 \ " , \ n \ " name \ " : \ " _statisticsRaw \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2566231 \ " , \ n \ " name \ " : \ " _routing \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 803481687 \ " , \ n \ " name \ " : \ " male \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } \ n ] , \ n \ " names \ " : { \ n \ " Company \ " : { \ n \ " id \ " : \ " 742533207 \ " , \ n \ " name \ " : \ " Company \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " frenchHighway \ " : { \ n \ " id \ " : \ " 827467863 \ " , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " _statistics \ " : { \ n \ " id \ " : \ " 5187671 \ " , \ n \ " name \ " : \ " _statistics \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " has_bought \ " : { \ n \ " id \ " : \ " 742926423 \ " , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " _statistics15 \ " : { \ n \ " id \ " : \ " 5580887 \ " , \ n \ " name \ " : \ " _statistics15 \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _aqlfunctions \ " : { \ n \ " id \ " : \ " 4663383 \ " , \ n \ " name \ " : \ " _aqlfunctions \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _modules \ " : { \ n \ " id \ " : \ " 2435159 \ " , \ n \ " name \ " : \ " _modules \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _jobs \ " : { \ n \ " id \ " : \ " 6432855 \ " , \ n \ " name \ " : \ " _jobs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _aal \ " : { \ n \ " id \ " : \ " 4204631 \ " , \ n \ " name \ " : \ " _aal \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " female \ " : { \ n \ " id \ " : \ " 803350615 \ " , \ n \ " name \ " : \ " female \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " animals \ " : { \ n \ " id \ " : \ " 832710743 \ " , \ n \ " name \ " : \ " animals \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " demo \ " : { \ n \ " id \ " : \ " 831793239 \ " , \ n \ " name \ " : \ " demo \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " friend_of \ " : { \ n \ " id \ " : \ " 742402135 \ " , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " frenchCity \ " : { \ n \ " id \ " : \ " 827336791 \ " , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _configuration \ " : { \ n \ " id \ " : \ " 141399 \ " , \ n \ " name \ " : \ " _configuration \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Electronics \ " : { \ n \ " id \ " : \ " 742795351 \ " , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _graphs \ " : { \ n \ " id \ " : \ " 2304087 \ " , \ n \ " name \ " : \ " _graphs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _queues \ " : { \ n \ " id \ " : \ " 6301783 \ " , \ n \ " name \ " : \ " _queues \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Customer \ " : { \ n \ " id \ " : \ " 742271063 \ " , \ n \ " name \ " : \ " Customer \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " germanHighway \ " : { \ n \ " id \ " : \ " 827205719 \ " , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " better - example \ " : { \ n \ " id \ " : \ " 646195287 \ " , \ n \ " name \ " : \ " better - example \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _cluster_kickstarter_plans \ " : { \ n \ " id \ " : \ " 2697303 \ " , \ n \ " name \ " : \ " _cluster_kickstarter_plans \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " relation \ " : { \ n \ " id \ " : \ " 803612759 \ " , \ n \ " name \ " : \ " relation \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " Groceries \ " : { \ n \ " id \ " : \ " 742664279 \ " , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " internationalHighway \ " : { \ n \ " id \ " : \ " 827598935 \ " , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " _users \ " : { \ n \ " id \ " : \ " 403543 \ " , \ n \ " name \ " : \ " _users \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " sessions \ " : { \ n \ " id \ " : \ " 26159191 \ " , \ n \ " name \ " : \ " sessions \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " germanCity \ " : { \ n \ " id \ " : \ " 827074647 \ " , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _statisticsRaw \ " : { \ n \ " id \ " : \ " 4794455 \ " , \ n \ " name \ " : \ " _statisticsRaw \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _routing \ " : { \ n \ " id \ " : \ " 2566231 \ " , \ n \ " name \ " : \ " _routing \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " male \ " : { \ n \ " id \ " : \ " 803481687 \ " , \ n \ " name \ " : \ " male \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Return information about all collections : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " id \ " : \ " 5574790 \ " , \ n \ " name \ " : \ " _statistics15 \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827396230 \ " , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 4657286 \ " , \ n \ " name \ " : \ " _aqlfunctions \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2429062 \ " , \ n \ " name \ " : \ " _modules \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 6426758 \ " , \ n \ " name \ " : \ " _jobs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 4198534 \ " , \ n \ " name \ " : \ " _aal \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742396038 \ " , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 826871942 \ " , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 135302 \ " , \ n \ " name \ " : \ " _configuration \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 803278982 \ " , \ n \ " name \ " : \ " male \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742789254 \ " , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827265158 \ " , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 2297990 \ " , \ n \ " name \ " : \ " _graphs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 6295686 \ " , \ n \ " name \ " : \ " _queues \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742264966 \ " , \ n \ " name \ " : \ " Customer \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2691206 \ " , \ n \ " name \ " : \ " _cluster_kickstarter_plans \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 832508038 \ " , \ n \ " name \ " : \ " animals \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 803147910 \ " , \ n \ " name \ " : \ " female \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742658182 \ " , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 831590534 \ " , \ n \ " name \ " : \ " demo \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827134086 \ " , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 397446 \ " , \ n \ " name \ " : \ " _users \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 26153094 \ " , \ n \ " name \ " : \ " sessions \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 4788358 \ " , \ n \ " name \ " : \ " _statisticsRaw \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 2560134 \ " , \ n \ " name \ " : \ " _routing \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 742527110 \ " , \ n \ " name \ " : \ " Company \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 5181574 \ " , \ n \ " name \ " : \ " _statistics \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 646451334 \ " , \ n \ " name \ " : \ " better - example \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n { \ n \ " id \ " : \ " 827003014 \ " , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 803410054 \ " , \ n \ " name \ " : \ " relation \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n { \ n \ " id \ " : \ " 742920326 \ " , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } \ n ] , \ n \ " names \ " : { \ n \ " _statistics15 \ " : { \ n \ " id \ " : \ " 5574790 \ " , \ n \ " name \ " : \ " _statistics15 \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " internationalHighway \ " : { \ n \ " id \ " : \ " 827396230 \ " , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " _aqlfunctions \ " : { \ n \ " id \ " : \ " 4657286 \ " , \ n \ " name \ " : \ " _aqlfunctions \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _modules \ " : { \ n \ " id \ " : \ " 2429062 \ " , \ n \ " name \ " : \ " _modules \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _jobs \ " : { \ n \ " id \ " : \ " 6426758 \ " , \ n \ " name \ " : \ " _jobs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _aal \ " : { \ n \ " id \ " : \ " 4198534 \ " , \ n \ " name \ " : \ " _aal \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " friend_of \ " : { \ n \ " id \ " : \ " 742396038 \ " , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " germanCity \ " : { \ n \ " id \ " : \ " 826871942 \ " , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _configuration \ " : { \ n \ " id \ " : \ " 135302 \ " , \ n \ " name \ " : \ " _configuration \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " male \ " : { \ n \ " id \ " : \ " 803278982 \ " , \ n \ " name \ " : \ " male \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Electronics \ " : { \ n \ " id \ " : \ " 742789254 \ " , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " frenchHighway \ " : { \ n \ " id \ " : \ " 827265158 \ " , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " _graphs \ " : { \ n \ " id \ " : \ " 2297990 \ " , \ n \ " name \ " : \ " _graphs \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _queues \ " : { \ n \ " id \ " : \ " 6295686 \ " , \ n \ " name \ " : \ " _queues \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Customer \ " : { \ n \ " id \ " : \ " 742264966 \ " , \ n \ " name \ " : \ " Customer \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _cluster_kickstarter_plans \ " : { \ n \ " id \ " : \ " 2691206 \ " , \ n \ " name \ " : \ " _cluster_kickstarter_plans \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " animals \ " : { \ n \ " id \ " : \ " 832508038 \ " , \ n \ " name \ " : \ " animals \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " female \ " : { \ n \ " id \ " : \ " 803147910 \ " , \ n \ " name \ " : \ " female \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Groceries \ " : { \ n \ " id \ " : \ " 742658182 \ " , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " demo \ " : { \ n \ " id \ " : \ " 831590534 \ " , \ n \ " name \ " : \ " demo \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " frenchCity \ " : { \ n \ " id \ " : \ " 827134086 \ " , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _users \ " : { \ n \ " id \ " : \ " 397446 \ " , \ n \ " name \ " : \ " _users \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " sessions \ " : { \ n \ " id \ " : \ " 26153094 \ " , \ n \ " name \ " : \ " sessions \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _statisticsRaw \ " : { \ n \ " id \ " : \ " 4788358 \ " , \ n \ " name \ " : \ " _statisticsRaw \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _routing \ " : { \ n \ " id \ " : \ " 2560134 \ " , \ n \ " name \ " : \ " _routing \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " Company \ " : { \ n \ " id \ " : \ " 742527110 \ " , \ n \ " name \ " : \ " Company \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " _statistics \ " : { \ n \ " id \ " : \ " 5181574 \ " , \ n \ " name \ " : \ " _statistics \ " , \ n \ " isSystem \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " better - example \ " : { \ n \ " id \ " : \ " 646451334 \ " , \ n \ " name \ " : \ " better - example \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 \ n } , \ n \ " germanHighway \ " : { \ n \ " id \ " : \ " 827003014 \ " , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " relation \ " : { \ n \ " id \ " : \ " 803410054 \ " , \ n \ " name \ " : \ " relation \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } , \ n \ " has_bought \ " : { \ n \ " id \ " : \ " 742920326 \ " , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 3 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " readsAllCollections " <nl> } <nl> ] , <nl> <nl> " notes " : " In addition to the above , the result will always contain the < em > waitForSync < / em > , < em > doCompact < / em > , < em > journalSize < / em > , and < em > isVolatile < / em > attributes . This is achieved by forcing a load of the underlying collection . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > waitForSync < / em > : If < em > true < / em > then creating or changing a document will wait until the data has been synchronised to disk . < li > < em > doCompact < / em > : Whether or not the collection will be compacted . < li > < em > journalSize < / em > : The maximal size setting for journals / datafiles . < li > < em > isVolatile < / em > : If < em > true < / em > then the collection data will be kept in memory only and ArangoDB will not write or sync the data to disk . < / ul > In a cluster setup , the result will also contain the following attributes : < ul class = \ " swagger - list \ " > < li > < em > numberOfShards < / em > : the number of shards of the collection . < li > < em > shardKeys < / em > : contains the names of document attributes that are used to determine the target shard for documents . " , <nl> " summary " : " Read properties of a collection " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Using an identifier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / 834938967 / properties \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / properties \ n \ n { \ n \ " id \ " : \ " 834938967 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using a name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / properties \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / properties \ n \ n { \ n \ " id \ " : \ " 835135575 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Using an identifier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / 834932870 / properties \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / properties \ n \ n { \ n \ " id \ " : \ " 834932870 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using a name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / properties \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / properties \ n \ n { \ n \ " id \ " : \ " 835129478 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadPropertiesOfACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " In addition to the above , the result also contains the number of documents . < em > * Note * < / em > that this will always load the collection into memory . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > count < / em > : The number of documents inside the collection . " , <nl> " summary " : " Return number of documents in a collection " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Requesting the number of documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / count \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / count \ n \ n { \ n \ " id \ " : \ " 835332183 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " count \ " : 100 , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Requesting the number of documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / count \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / count \ n \ n { \ n \ " id \ " : \ " 835326086 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " count \ " : 100 , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnNumberOfDocumentsInACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " In addition to the above , the result also contains the number of documents and additional statistical information about the collection . < em > * Note * < / em > : This will always load the collection into memory . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > count < / em > : The number of documents currently present in the collection . < / ul > < em > * figures . alive . count < / em > : The number of curretly active documents in all datafiles and journals of the collection . Documents that are contained in the write - ahead log only are not reported in this figure . < br > < br > < em > * figures . alive . size < / em > : The total size in bytes used by all active documents of the collection . Documents that are contained in the write - ahead log only are not reported in this figure . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > figures . dead . count < / em > : The number of dead documents . This includes document versions that have been deleted or replaced by a newer version . Documents deleted or replaced that are contained the write - ahead log only are not reported in this figure . < / ul > < em > * figures . dead . size < / em > : The total size in bytes used by all dead documents . < br > < br > < em > * figures . dead . deletion < / em > : The total number of deletion markers . Deletion markers only contained in the write - ahead log are not reporting in this figure . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > figures . datafiles . count < / em > : The number of datafiles . < li > < em > figures . datafiles . fileSize < / em > : The total filesize of datafiles ( in bytes ) . < li > < em > figures . journals . count < / em > : The number of journal files . < li > < em > figures . journals . fileSize < / em > : The total filesize of all journal files ( in bytes ) . < li > < em > figures . compactors . count < / em > : The number of compactor files . < li > < em > figures . compactors . fileSize < / em > : The total filesize of all compactor files ( in bytes ) . < / ul > < em > * figures . shapefiles . count < / em > : The number of shape files . This value is deprecated and kept for compatibility reasons only . The value will always be 0 since ArangoDB 2 . 0 and higher . < em > * figures . shapefiles . fileSize < / em > : The total filesize of the shape files . This value is deprecated and kept for compatibility reasons only . The value will always be 0 in ArangoDB 2 . 0 and higher . < br > < br > < em > * figures . shapes . count < / em > : The total number of shapes used in the collection . This includes shapes that are not in use anymore . Shapes that are contained in the write - ahead log only are not reported in this figure . < em > * figures . shapes . size < / em > : The total size of all shapes ( in bytes ) . This includes shapes that are not in use anymore . Shapes that are contained in the write - ahead log only are not reported in this figure . < br > < br > < em > * figures . attributes . count < / em > : The total number of attributes used in the collection . Note : the value includes data of attributes that are not in use anymore . Attributes that are contained in the write - ahead log only are not reported in this figure . < em > * figures . attributes . size < / em > : The total size of the attribute data ( in bytes ) . Note : the value includes data of attributes that are not in use anymore . Attributes that are contained in the write - ahead log only are not reported in this figure . < br > < br > < em > * figures . indexes . count < / em > : The total number of indexes defined for the collection , including the pre - defined indexes ( e . g . primary index ) . < br > < br > < em > * figures . indexes . size < / em > : The total memory allocated for indexes in bytes . < br > < br > < em > * figures . maxTick < / em > : The tick of the last marker that was stored in a journal of the collection . This might be 0 if the collection does not yet have a journal . < br > < br > < em > * figures . uncollectedLogfileEntries < / em > : The number of markers in the write - ahead log for this collection that have not been transferred to journals or datafiles . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > journalSize < / em > : The maximal size of the journal in bytes . < / ul > < em > * Note * < / em > : collection data that are stored in the write - ahead log only are not reported in the results . When the write - ahead log is collected , documents might be added to journals and datafiles of the collection , which may modify the figures of the collection . < br > < br > Additionally , the filesizes of collection and index parameter JSON files are not reported . These files should normally have a size of a few bytes each . Please also note that the < em > fileSize < / em > values are reported in bytes and reflect the logical file sizes . Some filesystems may use optimisations ( e . g . sparse files ) so that the actual physical file size is somewhat different . Directories and sub - directories may also require space in the file system , but this space is not reported in the < em > fileSize < / em > results . < br > < br > That means that the figures reported do not reflect the actual disk usage of the collection with 100 % accuracy . The actual disk usage of a collection is normally slightly higher than the sum of the reported < em > fileSize < / em > values . Still the sum of the < em > fileSize < / em > values can still be used as a lower bound approximation of the disk usage . < br > < br > " , <nl> " summary " : " Return statistics for a collection " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Using an identifier and requesting the figures of the collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / figures \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / figures \ n \ n { \ n \ " id \ " : \ " 855779415 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : false , \ n \ " count \ " : 1 , \ n \ " figures \ " : { \ n \ " alive \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 88 \ n } , \ n \ " dead \ " : { \ n \ " count \ " : 0 , \ n \ " size \ " : 0 , \ n \ " deletion \ " : 0 \ n } , \ n \ " datafiles \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " journals \ " : { \ n \ " count \ " : 1 , \ n \ " fileSize \ " : 1048576 \ n } , \ n \ " compactors \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " shapefiles \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " shapes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 104 \ n } , \ n \ " attributes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 48 \ n } , \ n \ " indexes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 2008 \ n } , \ n \ " lastTick \ " : \ " 856172631 \ " , \ n \ " uncollectedLogfileEntries \ " : 0 \ n } , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Using an identifier and requesting the figures of the collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / figures \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ nlocation : / _db / _system / _api / collection / products / figures \ n \ n { \ n \ " id \ " : \ " 855576710 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : false , \ n \ " count \ " : 1 , \ n \ " figures \ " : { \ n \ " alive \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 88 \ n } , \ n \ " dead \ " : { \ n \ " count \ " : 0 , \ n \ " size \ " : 0 , \ n \ " deletion \ " : 0 \ n } , \ n \ " datafiles \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " journals \ " : { \ n \ " count \ " : 1 , \ n \ " fileSize \ " : 1048576 \ n } , \ n \ " compactors \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " shapefiles \ " : { \ n \ " count \ " : 0 , \ n \ " fileSize \ " : 0 \ n } , \ n \ " shapes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 104 \ n } , \ n \ " attributes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 48 \ n } , \ n \ " indexes \ " : { \ n \ " count \ " : 1 , \ n \ " size \ " : 2008 \ n } , \ n \ " lastTick \ " : \ " 855969926 \ " , \ n \ " uncollectedLogfileEntries \ " : 0 \ n } , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnStatisticsForACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " In addition to the above , the result will also contain the collection ' s revision id . The revision id is a server - generated string that clients can use to check whether data in a collection has changed since the last revision check . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > revision < / em > : The collection revision id as a string . " , <nl> " summary " : " Return collection revision id " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Retrieving the revision of a collection < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / revision \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 857221207 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " revision \ " : \ " 0 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Retrieving the revision of a collection < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / revision \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 857477254 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " revision \ " : \ " 0 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnCollectionRevisionId " <nl> } <nl> ] , <nl> <nl> " notes " : " Will calculate a checksum of the meta - data ( keys and optionally revision ids ) and optionally the document data in the collection . < br > < br > The checksum can be used to compare if two collections on different ArangoDB instances contain the same contents . The current revision of the collection is returned too so one can make sure the checksums are calculated for the same state of data . < br > < br > By default , the checksum will only be calculated on the < em > _key < / em > system attribute of the documents contained in the collection . For edge collections , the system attributes < em > _from < / em > and < em > _to < / em > will also be included in the calculation . < br > < br > By setting the optional URL parameter < em > withRevisions < / em > to < em > true < / em > , then revision ids ( < em > _rev < / em > system attributes ) are included in the checksumming . < br > < br > By providing the optional URL parameter < em > withData < / em > with a value of < em > true < / em > , the user - defined document attributes will be included in the calculation too . < em > * Note * < / em > : Including user - defined attributes will make the checksumming slower . < br > < br > The response is a JSON object with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > checksum < / em > : The calculated checksum as a number . < li > < em > revision < / em > : The collection revision id as a string . < / ul > < em > * Note * < / em > : this method is not available in a cluster . < br > < br > " , <nl> " summary " : " Return checksum for the collection " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Retrieving the checksum of a collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / checksum \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 857483351 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " checksum \ " : 1848163081 , \ n \ " revision \ " : \ " 857811031 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the checksum of a collection including the collection data , but not the revisions : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / checksum ? withRevisions = false & withData = true \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 858073175 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " checksum \ " : 3932762915 , \ n \ " revision \ " : \ " 858400855 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Retrieving the checksum of a collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / checksum \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 857739398 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " checksum \ " : 4274567137 , \ n \ " revision \ " : \ " 858067078 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the checksum of a collection including the collection data , but not the revisions : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / checksum ? withRevisions = false & withData = true \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 858329222 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " checksum \ " : 752273625 , \ n \ " revision \ " : \ " 858656902 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnChecksumForTheCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Loads a collection into memory . Returns the collection on success . < br > < br > The request might optionally contain the following attribute : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > count < / em > : If set , this controls whether the return value should include the number of documents in the collection . Setting < em > count < / em > to < em > false < / em > may speed up loading a collection . The default value for < em > count < / em > is < em > true < / em > . < / ul > On success an object with the following attributes is returned : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > id < / em > : The identifier of the collection . < li > < em > name < / em > : The name of the collection . < li > < em > count < / em > : The number of documents inside the collection . This is only returned if the < em > count < / em > input parameters is set to < em > true < / em > or has not been specified . < li > < em > status < / em > : The status of the collection as number . < li > < em > type < / em > : The collection type . Valid types are : - 2 : document collection - 3 : edges collection " , <nl> " summary " : " Load collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / load \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 858662999 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " count \ " : 0 , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / load \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 858919046 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " count \ " : 0 , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " LoadCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Removes a collection from memory . This call does not delete any documents . You can use the collection afterwards ; in which case it will be loaded into memory , again . On success an object with the following attributes is returned : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > id < / em > : The identifier of the collection . < li > < em > name < / em > : The name of the collection . < li > < em > status < / em > : The status of the collection as number . < li > < em > type < / em > : The collection type . Valid types are : - 2 : document collection - 3 : edges collection " , <nl> " summary " : " Unload collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / unload \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 858925143 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 2 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / unload \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859377798 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 2 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " UnloadCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Removes all documents from the collection , but leaves the indexes intact . < br > < br > " , <nl> " summary " : " Truncate collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / truncate \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859121751 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / collection / products / truncate \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859574406 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " TruncateCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Changes the properties of a collection . Expects an object with the attribute ( s ) < br > < br > < ul class = \ " swagger - list \ " > < li > < em > waitForSync < / em > : If < em > true < / em > then creating or changing a document will wait until the data has been synchronised to disk . < li > < em > journalSize < / em > : Size ( in bytes ) for new journal files that are created for the collection . < / ul > If returns an object with the attributes < br > < br > < ul class = \ " swagger - list \ " > < li > < em > id < / em > : The identifier of the collection . < li > < em > name < / em > : The name of the collection . < li > < em > waitForSync < / em > : The new value . < li > < em > journalSize < / em > : The new value . < li > < em > status < / em > : The status of the collection as number . < li > < em > type < / em > : The collection type . Valid types are : - 2 : document collection - 3 : edges collection < / ul > < em > * Note * < / em > : some other collection properties , such as < em > type < / em > , < em > isVolatile < / em > , < em > numberOfShards < / em > or < em > shardKeys < / em > cannot be changed once a collection is created . < br > < br > " , <nl> " summary " : " Change properties of a collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / properties \ n { \ n \ " waitForSync \ " : true \ n } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859514967 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products / properties \ n { \ n \ " waitForSync \ " : true \ n } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859967622 \ " , \ n \ " name \ " : \ " products \ " , \ n \ " isSystem \ " : false , \ n \ " doCompact \ " : true , \ n \ " isVolatile \ " : false , \ n \ " journalSize \ " : 1048576 , \ n \ " keyOptions \ " : { \ n \ " type \ " : \ " traditional \ " , \ n \ " allowUserKeys \ " : true \ n } , \ n \ " waitForSync \ " : true , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ChangePropertiesOfACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Renames a collection . Expects an object with the attribute ( s ) < br > < br > < ul class = \ " swagger - list \ " > < li > < em > name < / em > : The new name . < / ul > If returns an object with the attributes < br > < br > < ul class = \ " swagger - list \ " > < li > < em > id < / em > : The identifier of the collection . < li > < em > name < / em > : The new name of the collection . < li > < em > status < / em > : The status of the collection as number . < li > < em > type < / em > : The collection type . Valid types are : - 2 : document collection - 3 : edges collection " , <nl> " summary " : " Rename collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products1 / rename \ n { \ n \ " name \ " : \ " newname \ " \ n } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 859777111 \ " , \ n \ " name \ " : \ " newname \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products1 / rename \ n { \ n \ " name \ " : \ " newname \ " \ n } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 860229766 \ " , \ n \ " name \ " : \ " newname \ " , \ n \ " isSystem \ " : false , \ n \ " status \ " : 3 , \ n \ " type \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " RenameCollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Deletes a collection identified by < em > collection - name < / em > . < br > < br > If the collection was successfully deleted then , an object is returned with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > error < / em > : < em > false < / em > < li > < em > id < / em > : The identifier of the deleted collection . " , <nl> " summary " : " Delete collection " , <nl> " httpMethod " : " DELETE " , <nl> - " examples " : " < br > < br > Using an identifier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / 861808727 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 861808727 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using a name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 862005335 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Using an identifier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / 861802630 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 861802630 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using a name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / collection / products1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " 861999238 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " DeleteCollection " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / cursor . json <nl> ppp b / js / apps / system / aardvark / api - docs / cursor . json <nl> <nl> " notes " : " The query details include the query string plus optional query options and bind parameters . These values need to be passed in a JSON representation in the body of the POST request . < br > < br > The following attributes can be used inside the JSON object : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > query < / em > : contains the query string to be executed ( mandatory ) < li > < em > count < / em > : boolean flag that indicates whether the number of documents in the result set should be returned in the \ " count \ " attribute of the result ( optional ) . Calculating the \ " count \ " attribute might in the future have a performance impact for some queries so this option is turned off by default , and \ " count \ " is only returned when requested . < li > < em > batchSize < / em > : maximum number of result documents to be transferred from the server to the client in one roundtrip ( optional ) . If this attribute is not set , a server - controlled default value will be used . < li > < em > ttl < / em > : an optional time - to - live for the cursor ( in seconds ) . The cursor will be removed on the server automatically after the specified amount of time . This is useful to ensure garbage collection of cursors that are not fully fetched by clients . If not set , a server - defined value will be used . < li > < em > bindVars < / em > : key / value list of bind parameters ( optional ) . < li > < em > options < / em > : key / value list of extra options for the query ( optional ) . < / ul > The following options are supported at the moment : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > fullCount < / em > : if set to < em > true < / em > and the query contains a < em > LIMIT < / em > clause , then the result will contain an extra attribute < em > extra < / em > with a sub - attribute < em > fullCount < / em > . This sub - attribute will contain the number of documents in the result before the last LIMIT in the query was applied . It can be used to count the number of documents that match certain filter criteria , but only return a subset of them , in one go . It is thus similar to MySQL ' s < em > SQL_CALC_FOUND_ROWS < / em > hint . Note that setting the option will disable a few LIMIT optimizations and may lead to more documents being processed , and thus make queries run longer . Note that the < em > fullCount < / em > sub - attribute will only be present in the result if the query has a LIMIT clause and the LIMIT clause is actually used in the query . < / ul > If the result set can be created by the server , the server will respond with < em > HTTP 201 < / em > . The body of the response will contain a JSON object with the result set . < br > < br > The returned JSON object has the following properties : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > error < / em > : boolean flag to indicate that an error occurred ( < em > false < / em > in this case ) < li > < em > code < / em > : the HTTP status code < li > < em > result < / em > : an array of result documents ( might be empty if query has no results ) < li > < em > hasMore < / em > : a boolean indicator whether there are more results available for the cursor on the server < li > < em > count < / em > : the total number of result documents available ( only available if the query was executed with the < em > count < / em > attribute set ) < li > < em > id < / em > : id of temporary cursor created on the server ( optional , see above ) < li > < em > extra < / em > : an optional JSON object with extra information about the query result . For data - modification queries , the < em > extra < / em > attribute will contain the number of modified documents and the number of documents that could not be modified due to an error ( if < em > ignoreErrors < / em > query option is specified ) < / ul > If the JSON representation is malformed or the query specification is missing from the request , the server will respond with < em > HTTP 400 < / em > . < br > < br > The body of the response will contain a JSON object with additional error details . The object has the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > error < / em > : boolean flag to indicate that an error occurred ( < em > true < / em > in this case ) < li > < em > code < / em > : the HTTP status code < li > < em > errorNum < / em > : the server error number < li > < em > errorMessage < / em > : a descriptive error message < / ul > If the query specification is complete , the server will process the query . If an error occurs during query processing , the server will respond with < em > HTTP 400 < / em > . Again , the body of the response will contain details about the error . < br > < br > A list of query errors can be found ( . . / ArangoErrors / README . md ) here . < br > < br > " , <nl> " summary " : " Create cursor " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Executes a query and extract the result in a single go : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 2 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 862857303 \ " , \ n \ " _key \ " : \ " 862857303 \ " , \ n \ " _rev \ " : \ " 862857303 \ " , \ n \ " hello2 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 862529623 \ " , \ n \ " _key \ " : \ " 862529623 \ " , \ n \ " _rev \ " : \ " 862529623 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a query and extracts part of the result : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 863774807 \ " , \ n \ " _key \ " : \ " 863774807 \ " , \ n \ " _rev \ " : \ " 863774807 \ " , \ n \ " hello2 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 864102487 \ " , \ n \ " _key \ " : \ " 864102487 \ " , \ n \ " _rev \ " : \ " 864102487 \ " , \ n \ " hello3 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 864954455 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Using a query option : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR i IN 1 . . 1000 FILTER i > 500 LIMIT 10 RETURN i \ " , \ " count \ " : true , \ " options \ " : { \ " fullCount \ " : true } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n 501 , \ n 502 , \ n 503 , \ n 504 , \ n 505 , \ n 506 , \ n 507 , \ n 508 , \ n 509 , \ n 510 \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 10 , \ n \ " extra \ " : { \ n \ " fullCount \ " : 500 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query and retrieves the number of modified documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products REMOVE p IN products \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ ] , \ n \ " hasMore \ " : false , \ n \ " extra \ " : { \ n \ " operations \ " : { \ n \ " executed \ " : 2 , \ n \ " ignored \ " : 0 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query with option < em > ignoreErrors < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " REMOVE ' bar ' IN products OPTIONS { ignoreErrors : true } \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ ] , \ n \ " hasMore \ " : false , \ n \ " extra \ " : { \ n \ " operations \ " : { \ n \ " executed \ " : 0 , \ n \ " ignored \ " : 1 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Bad queries : < br > < br > Missing body : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - dump - http : / / localhost : 8529 / _api / cursor \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 1502 , \ n \ " errorMessage \ " : \ " query is empty \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR u IN unknowncoll LIMIT 2 RETURN u \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 , \ n \ " errorMessage \ " : \ " cannot execute query : collection not found : ' unknowncoll ' \ " \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query that attempts to remove a non - existing document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " REMOVE ' foo ' IN products \ " } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 , \ n \ " errorMessage \ " : \ " document not found \ " \ n } \ n < / code > < / pre > < br > < br > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Executes a query and extract the result in a single go : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 2 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 862523526 \ " , \ n \ " _key \ " : \ " 862523526 \ " , \ n \ " _rev \ " : \ " 862523526 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 862851206 \ " , \ n \ " _key \ " : \ " 862851206 \ " , \ n \ " _rev \ " : \ " 862851206 \ " , \ n \ " hello2 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a query and extracts part of the result : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 864096390 \ " , \ n \ " _key \ " : \ " 864096390 \ " , \ n \ " _rev \ " : \ " 864096390 \ " , \ n \ " hello3 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 863441030 \ " , \ n \ " _key \ " : \ " 863441030 \ " , \ n \ " _rev \ " : \ " 863441030 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 864948358 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Using a query option : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR i IN 1 . . 1000 FILTER i > 500 LIMIT 10 RETURN i \ " , \ " count \ " : true , \ " options \ " : { \ " fullCount \ " : true } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n 501 , \ n 502 , \ n 503 , \ n 504 , \ n 505 , \ n 506 , \ n 507 , \ n 508 , \ n 509 , \ n 510 \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 10 , \ n \ " extra \ " : { \ n \ " fullCount \ " : 500 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query and retrieves the number of modified documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products REMOVE p IN products \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ ] , \ n \ " hasMore \ " : false , \ n \ " extra \ " : { \ n \ " operations \ " : { \ n \ " executed \ " : 2 , \ n \ " ignored \ " : 0 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query with option < em > ignoreErrors < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " REMOVE ' bar ' IN products OPTIONS { ignoreErrors : true } \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ ] , \ n \ " hasMore \ " : false , \ n \ " extra \ " : { \ n \ " operations \ " : { \ n \ " executed \ " : 0 , \ n \ " ignored \ " : 1 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Bad queries : < br > < br > Missing body : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - dump - http : / / localhost : 8529 / _api / cursor \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 1502 , \ n \ " errorMessage \ " : \ " query is empty \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR u IN unknowncoll LIMIT 2 RETURN u \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 , \ n \ " errorMessage \ " : \ " cannot execute query : collection not found : ' unknowncoll ' \ " \ n } \ n < / code > < / pre > < br > < br > < br > Executes a data - modification query that attempts to remove a non - existing document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " REMOVE ' foo ' IN products \ " } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 , \ n \ " errorMessage \ " : \ " document not found \ " \ n } \ n < / code > < / pre > < br > < br > < br > @ endDocuBlock " , <nl> " nickname " : " CreateCursor " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > If the cursor is still alive , returns an object with the following attributes . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > id < / em > : the < em > cursor - identifier < / em > < li > < em > result < / em > : a list of documents for the current batch < li > < em > hasMore < / em > : < em > false < / em > if this was the last batch < li > < em > count < / em > : if present the total number of elements < / ul > Note that even if < em > hasMore < / em > returns < em > true < / em > , the next call might still return no documents . If , however , < em > hasMore < / em > is < em > false < / em > , then the cursor is exhausted . Once the < em > hasMore < / em > attribute has a value of < em > false < / em > , the client can stop . < br > < br > " , <nl> " summary " : " Read next batch from cursor " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Valid request for next batch : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nshell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 869935191 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 869083223 \ " , \ n \ " _key \ " : \ " 869083223 \ " , \ n \ " _rev \ " : \ " 869083223 \ " , \ n \ " hello3 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 868755543 \ " , \ n \ " _key \ " : \ " 868755543 \ " , \ n \ " _rev \ " : \ " 868755543 \ " , \ n \ " hello2 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 869935191 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Missing identifier < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 400 , \ n \ " errorMessage \ " : \ " bad parameter \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown identifier < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 123123 \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1600 , \ n \ " errorMessage \ " : \ " cursor not found \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Valid request for next batch : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nshell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 869732486 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 868225158 \ " , \ n \ " _key \ " : \ " 868225158 \ " , \ n \ " _rev \ " : \ " 868225158 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 869208198 \ " , \ n \ " _key \ " : \ " 869208198 \ " , \ n \ " _rev \ " : \ " 869208198 \ " , \ n \ " hello4 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 869732486 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Missing identifier < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 400 , \ n \ " errorMessage \ " : \ " bad parameter \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown identifier < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / cursor / 123123 \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1600 , \ n \ " errorMessage \ " : \ " cursor not found \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadNextBatchFromCursor " <nl> } <nl> ] , <nl> <nl> " notes " : " Deletes the cursor and frees the resources associated with it . < br > < br > The cursor will automatically be destroyed on the server when the client has retrieved all documents from it . The client can also explicitly destroy the cursor at any earlier time using an HTTP DELETE request . The cursor id must be included as part of the URL . < br > < br > Note : the server will also destroy abandoned cursors automatically after a certain server - controlled timeout to avoid resource leakage . < br > < br > " , <nl> " summary " : " Delete cursor " , <nl> " httpMethod " : " DELETE " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 871704663 \ " , \ n \ " _key \ " : \ " 871704663 \ " , \ n \ " _rev \ " : \ " 871704663 \ " , \ n \ " hello5 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 870393943 \ " , \ n \ " _key \ " : \ " 870393943 \ " , \ n \ " _rev \ " : \ " 870393943 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 871901271 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ nshell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor / 871901271 \ n \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor \ n { \ " query \ " : \ " FOR p IN products LIMIT 5 RETURN p \ " , \ " count \ " : true , \ " batchSize \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 870191238 \ " , \ n \ " _key \ " : \ " 870191238 \ " , \ n \ " _rev \ " : \ " 870191238 \ " , \ n \ " hello1 \ " : \ " world1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 871174278 \ " , \ n \ " _key \ " : \ " 871174278 \ " , \ n \ " _rev \ " : \ " 871174278 \ " , \ n \ " hello4 \ " : \ " world1 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 871698566 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ nshell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / cursor / 871698566 \ n \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " DeleteCursor " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / database . json <nl> ppp b / js / apps / system / aardvark / api - docs / database . json <nl> <nl> " notes " : " Retrieves information about the current database < br > < br > The response is a JSON object with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > name < / em > : the name of the current database < li > < em > id < / em > : the id of the current database < li > < em > path < / em > : the filesystem path of the current database < li > < em > isSystem < / em > : whether or not the current database is the < em > _system < / em > database " , <nl> " summary " : " Information of the database " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / database / current \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " name \ " : \ " _system \ " , \ n \ " id \ " : \ " 75863 \ " , \ n \ " path \ " : \ " / tmp / vocdir . 60795 / databases / database - 75863 \ " , \ n \ " isSystem \ " : true \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / database / current \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " name \ " : \ " _system \ " , \ n \ " id \ " : \ " 69766 \ " , \ n \ " path \ " : \ " / tmp / vocdir . 73382 / databases / database - 69766 \ " , \ n \ " isSystem \ " : true \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " InformationOfTheDatabase " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / document . json <nl> ppp b / js / apps / system / aardvark / api - docs / document . json <nl> <nl> " notes " : " Creates a new document in the collection named < em > collection < / em > . A JSON representation of the document must be passed as the body of the POST request . < br > < br > If the document was created successfully , then the \ " Location \ " header contains the path to the newly created document . The \ " ETag \ " header field contains the revision of the document . < br > < br > The body of the response contains a JSON object with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > _id < / em > contains the document handle of the newly created document < li > < em > _key < / em > contains the document key < li > < em > _rev < / em > contains the document revision < / ul > If the collection parameter < em > waitForSync < / em > is < em > false < / em > , then the call returns as soon as the document has been accepted . It will not wait until the document has been synced to disk . < br > < br > Optionally , the URL parameter < em > waitForSync < / em > can be used to force synchronisation of the document creation operation to disk even in case that the < em > waitForSync < / em > flag had been disabled for the entire collection . Thus , the < em > waitForSync < / em > URL parameter can be used to force synchronisation of just this specific operations . To use this , set the < em > waitForSync < / em > parameter to < em > true < / em > . If the < em > waitForSync < / em > parameter is not specified or set to < em > false < / em > , then the collection ' s default < em > waitForSync < / em > behavior is applied . The < em > waitForSync < / em > URL parameter cannot be used to disable synchronisation for collections that have a default < em > waitForSync < / em > value of < em > true < / em > . < br > < br > " , <nl> " summary " : " Create document " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Create a document given a collection named < em > products < / em > . Note that the revision identifier might or might not by equal to the auto - generated key . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1279338583 \ " \ nlocation : / _db / _system / _api / document / products / 1279338583 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1279338583 \ " , \ n \ " _rev \ " : \ " 1279338583 \ " , \ n \ " _key \ " : \ " 1279338583 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a collection named < em > products < / em > with a collection - level < em > waitForSync < / em > value of < em > false < / em > . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1279862871 \ " \ nlocation : / _db / _system / _api / document / products / 1279862871 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1279862871 \ " , \ n \ " _rev \ " : \ " 1279862871 \ " , \ n \ " _key \ " : \ " 1279862871 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a collection with a collection - level < em > waitForSync < / em > value of < em > false < / em > , but using the < em > waitForSync < / em > URL parameter . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products & waitForSync = true \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1280387159 \ " \ nlocation : / _db / _system / _api / document / products / 1280387159 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1280387159 \ " , \ n \ " _rev \ " : \ " 1280387159 \ " , \ n \ " _key \ " : \ " 1280387159 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a new , named collection < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products & createCollection = true \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1280911447 \ " \ nlocation : / _db / _system / _api / document / products / 1280911447 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1280911447 \ " , \ n \ " _rev \ " : \ " 1280911447 \ " , \ n \ " _key \ " : \ " 1280911447 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown collection name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " collection ' products ' not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 \ n } \ n < / code > < / pre > < br > < br > < br > Illegal document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { 1 : \ " World \ " } \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " expecting attribute name \ " , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 600 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Create a document given a collection named < em > products < / em > . Note that the revision identifier might or might not by equal to the auto - generated key . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1278939270 \ " \ nlocation : / _db / _system / _api / document / products / 1278939270 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1278939270 \ " , \ n \ " _rev \ " : \ " 1278939270 \ " , \ n \ " _key \ " : \ " 1278939270 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a collection named < em > products < / em > with a collection - level < em > waitForSync < / em > value of < em > false < / em > . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1279463558 \ " \ nlocation : / _db / _system / _api / document / products / 1279463558 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1279463558 \ " , \ n \ " _rev \ " : \ " 1279463558 \ " , \ n \ " _key \ " : \ " 1279463558 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a collection with a collection - level < em > waitForSync < / em > value of < em > false < / em > , but using the < em > waitForSync < / em > URL parameter . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products & waitForSync = true \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1279987846 \ " \ nlocation : / _db / _system / _api / document / products / 1279987846 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1279987846 \ " , \ n \ " _rev \ " : \ " 1279987846 \ " , \ n \ " _key \ " : \ " 1279987846 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Create a document in a new , named collection < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products & createCollection = true \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1280512134 \ " \ nlocation : / _db / _system / _api / document / products / 1280512134 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1280512134 \ " , \ n \ " _rev \ " : \ " 1280512134 \ " , \ n \ " _key \ " : \ " 1280512134 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown collection name : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { \ " Hello \ " : \ " World \ " } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " collection ' products ' not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 \ n } \ n < / code > < / pre > < br > < br > < br > Illegal document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document ? collection = products \ n { 1 : \ " World \ " } \ n \ nHTTP / 1 . 1 400 Bad Request \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " expecting attribute name \ " , \ n \ " code \ " : 400 , \ n \ " errorNum \ " : 600 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateDocument " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns the document identified by < em > document - handle < / em > . The returned document contains two special attributes : < em > _id < / em > containing the document handle and < em > _rev < / em > containing the revision . < br > < br > " , <nl> " summary " : " Read document " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Use a document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1281435735 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1281435735 \ " \ n \ n { \ n \ " hello \ " : \ " world \ " , \ n \ " _id \ " : \ " products / 1281435735 \ " , \ n \ " _rev \ " : \ " 1281435735 \ " , \ n \ " _key \ " : \ " 1281435735 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Use a document handle and an etag : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - header ' If - None - Match : \ " 1282025559 \ " ' - - dump - http : / / localhost : 8529 / _api / document / products / 1282025559 \ n \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / unknownhandle \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / unknownhandle not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Use a document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1281036422 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1281036422 \ " \ n \ n { \ n \ " hello \ " : \ " world \ " , \ n \ " _id \ " : \ " products / 1281036422 \ " , \ n \ " _rev \ " : \ " 1281036422 \ " , \ n \ " _key \ " : \ " 1281036422 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Use a document handle and an etag : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - header ' If - None - Match : \ " 1281626246 \ " ' - - dump - http : / / localhost : 8529 / _api / document / products / 1281626246 \ n \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / unknownhandle \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / unknownhandle not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadDocument " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns a list of all keys , ids , or URI paths for all documents in the collection identified by < em > collection < / em > . The type of the result list is determined by the < em > type < / em > attribute . < br > < br > Note that the results have no defined order and thus the order should not be relied on . < br > < br > " , <nl> " summary " : " Read all documents " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Returns all document paths < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = products \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " documents \ " : [ \ n \ " / _api / document / products / 1283336279 \ " , \ n \ " / _api / document / products / 1282680919 \ " , \ n \ " / _api / document / products / 1283008599 \ " \ n ] \ n } \ n < / code > < / pre > < br > < br > < br > Returns all document keys < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = products & type = key \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " documents \ " : [ \ n \ " 1284253783 \ " , \ n \ " 1283926103 \ " , \ n \ " 1284581463 \ " \ n ] \ n } \ n < / code > < / pre > < br > < br > < br > Collection does not exist . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = doesnotexist \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " collection ' doesnotexist ' not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Returns all document paths < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = products \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " documents \ " : [ \ n \ " / _api / document / products / 1282281606 \ " , \ n \ " / _api / document / products / 1282609286 \ " , \ n \ " / _api / document / products / 1282936966 \ " \ n ] \ n } \ n < / code > < / pre > < br > < br > < br > Returns all document keys < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = products & type = key \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " documents \ " : [ \ n \ " 1283526790 \ " , \ n \ " 1283854470 \ " , \ n \ " 1284182150 \ " \ n ] \ n } \ n < / code > < / pre > < br > < br > < br > Collection does not exist . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / ? collection = doesnotexist \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " collection ' doesnotexist ' not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1203 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadAllDocuments " <nl> } <nl> ] , <nl> <nl> " notes " : " Like < em > GET < / em > , but only returns the header fields and not the body . You can use this call to get the current revision of a document or check if the document was deleted . < br > < br > " , <nl> " summary " : " Read document header " , <nl> " httpMethod " : " HEAD " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X HEAD - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1285171287 \ n \ n < / code > < / pre > < br > @ endDocuBlock @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X HEAD - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1284968582 \ n \ n < / code > < / pre > < br > @ endDocuBlock @ endDocuBlock " , <nl> " nickname " : " ReadDocumentHeader " <nl> } <nl> ] , <nl> <nl> " notes " : " Completely updates ( i . e . replaces ) the document identified by < em > document - handle < / em > . If the document exists and can be updated , then a < em > HTTP 201 < / em > is returned and the \ " ETag \ " header field contains the new revision of the document . < br > < br > If the new document passed in the body of the request contains the < em > document - handle < / em > in the attribute < em > _id < / em > and the revision in < em > _rev < / em > , these attributes will be ignored . Only the URI and the \ " ETag \ " header are relevant in order to avoid confusion when using proxies . < br > < br > Optionally , the URL parameter < em > waitForSync < / em > can be used to force synchronisation of the document replacement operation to disk even in case that the < em > waitForSync < / em > flag had been disabled for the entire collection . Thus , the < em > waitForSync < / em > URL parameter can be used to force synchronisation of just specific operations . To use this , set the < em > waitForSync < / em > parameter to < em > true < / em > . If the < em > waitForSync < / em > parameter is not specified or set to < em > false < / em > , then the collection ' s default < em > waitForSync < / em > behavior is applied . The < em > waitForSync < / em > URL parameter cannot be used to disable synchronisation for collections that have a default < em > waitForSync < / em > value of < em > true < / em > . < br > < br > The body of the response contains a JSON object with the information about the handle and the revision . The attribute < em > _id < / em > contains the known < em > document - handle < / em > of the updated document , the attribute < em > _rev < / em > contains the new document revision . < br > < br > If the document does not exist , then a < em > HTTP 404 < / em > is returned and the body of the response contains an error document . < br > < br > There are two ways for specifying the targeted document revision id for conditional replacements ( i . e . replacements that will only be executed if the revision id found in the database matches the document revision id specified in the request ) : < ul class = \ " swagger - list \ " > < li > specifying the target revision in the < em > rev < / em > URL query parameter < li > specifying the target revision in the < em > if - match < / em > HTTP header < / ul > Specifying a target revision is optional , however , if done , only one of the described mechanisms must be used ( either the < em > rev < / em > URL parameter or the < em > if - match < / em > HTTP header ) . Regardless which mechanism is used , the parameter needs to contain the target document revision id as returned in the < em > _rev < / em > attribute of a document or by an HTTP < em > etag < / em > header . < br > < br > For example , to conditionally replace a document based on a specific revision id , you can use the following request : < br > < br > < em > PUT / _api / document / document - handle ? rev = etag < / em > < br > < br > If a target revision id is provided in the request ( e . g . via the < em > etag < / em > value in the < em > rev < / em > URL query parameter above ) , ArangoDB will check that the revision id of the document found in the database is equal to the target revision id provided in the request . If there is a mismatch between the revision id , then by default a < em > HTTP 412 < / em > conflict is returned and no replacement is performed . < br > < br > The conditional update behavior can be overriden with the < em > policy < / em > URL query parameter : < br > < br > < em > PUT / _api / document / document - handle ? policy = policy < / em > < br > < br > If < em > policy < / em > is set to < em > error < / em > , then the behavior is as before : replacements will fail if the revision id found in the database does not match the target revision id specified in the request . < br > < br > If < em > policy < / em > is set to < em > last < / em > , then the replacement will succeed , even if the revision id found in the database does not match the target revision id specified in the request . You can use the < em > last < / em > * policy * to force replacements . < br > < br > " , <nl> " summary " : " Replace document " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Using document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1285761111 \ n { \ " Hello \ " : \ " you \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1286088791 \ " \ nlocation : / _db / _system / _api / document / products / 1285761111 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1285761111 \ " , \ n \ " _rev \ " : \ " 1286088791 \ " , \ n \ " _key \ " : \ " 1285761111 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1286613079 \ n { } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / 1286613079 not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > < br > < br > Produce a revision conflict : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' If - Match : \ " 1287858263 \ " ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1287530583 \ n { \ " other \ " : \ " content \ " } \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1287530583 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1287530583 \ " , \ n \ " _rev \ " : \ " 1287530583 \ " , \ n \ " _key \ " : \ " 1287530583 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Last write wins : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' If - Match : \ " 1288972375 \ " ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1288644695 ? policy = last \ n { } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1289234519 \ " \ nlocation : / _db / _system / _api / document / products / 1288644695 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1288644695 \ " , \ n \ " _rev \ " : \ " 1289234519 \ " , \ n \ " _key \ " : \ " 1288644695 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Alternative to header field : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1289758807 ? rev = 1290086487 \ n { \ " other \ " : \ " content \ " } \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1289758807 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1289758807 \ " , \ n \ " _rev \ " : \ " 1289758807 \ " , \ n \ " _key \ " : \ " 1289758807 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Using document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1285558406 \ n { \ " Hello \ " : \ " you \ " } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1285886086 \ " \ nlocation : / _db / _system / _api / document / products / 1285558406 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1285558406 \ " , \ n \ " _rev \ " : \ " 1285886086 \ " , \ n \ " _key \ " : \ " 1285558406 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1286410374 \ n { } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / 1286410374 not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > < br > < br > Produce a revision conflict : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' If - Match : \ " 1287655558 \ " ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1287327878 \ n { \ " other \ " : \ " content \ " } \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1287327878 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1287327878 \ " , \ n \ " _rev \ " : \ " 1287327878 \ " , \ n \ " _key \ " : \ " 1287327878 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Last write wins : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - header ' If - Match : \ " 1288769670 \ " ' - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1288441990 ? policy = last \ n { } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1289031814 \ " \ nlocation : / _db / _system / _api / document / products / 1288441990 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1288441990 \ " , \ n \ " _rev \ " : \ " 1289031814 \ " , \ n \ " _key \ " : \ " 1288441990 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Alternative to header field : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1289556102 ? rev = 1289883782 \ n { \ " other \ " : \ " content \ " } \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1289556102 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1289556102 \ " , \ n \ " _rev \ " : \ " 1289556102 \ " , \ n \ " _key \ " : \ " 1289556102 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReplaceDocument " <nl> } <nl> ] , <nl> <nl> " notes " : " Partially updates the document identified by < em > document - handle < / em > . The body of the request must contain a JSON document with the attributes to patch ( the patch document ) . All attributes from the patch document will be added to the existing document if they do not yet exist , and overwritten in the existing document if they do exist there . < br > < br > Setting an attribute value to < em > null < / em > in the patch document will cause a value of < em > null < / em > be saved for the attribute by default . < br > < br > Optionally , the URL parameter < em > waitForSync < / em > can be used to force synchronisation of the document update operation to disk even in case that the < em > waitForSync < / em > flag had been disabled for the entire collection . Thus , the < em > waitForSync < / em > URL parameter can be used to force synchronisation of just specific operations . To use this , set the < em > waitForSync < / em > parameter to < em > true < / em > . If the < em > waitForSync < / em > parameter is not specified or set to < em > false < / em > , then the collection ' s default < em > waitForSync < / em > behavior is applied . The < em > waitForSync < / em > URL parameter cannot be used to disable synchronisation for collections that have a default < em > waitForSync < / em > value of < em > true < / em > . < br > < br > The body of the response contains a JSON object with the information about the handle and the revision . The attribute < em > _id < / em > contains the known < em > document - handle < / em > of the updated document , the attribute < em > _rev < / em > contains the new document revision . < br > < br > If the document does not exist , then a < em > HTTP 404 < / em > is returned and the body of the response contains an error document . < br > < br > You can conditionally update a document based on a target revision id by using either the < em > rev < / em > URL parameter or the < em > if - match < / em > HTTP header . To control the update behavior in case there is a revision mismatch , you can use the < em > policy < / em > parameter . This is the same as when replacing documents ( see replacing documents for details ) . < br > < br > " , <nl> " summary " : " Patch document " , <nl> " httpMethod " : " PATCH " , <nl> - " examples " : " < br > < br > patches an existing document with new content . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290872919 \ n { \ n \ " hello \ " : \ " world \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1291200599 \ " \ nlocation : / _db / _system / _api / document / products / 1290872919 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290872919 \ " , \ n \ " _rev \ " : \ " 1291200599 \ " , \ n \ " _key \ " : \ " 1290872919 \ " \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290872919 \ n { \ n \ " numbers \ " : { \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 , \ n \ " empty \ " : null \ n } \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1291790423 \ " \ nlocation : / _db / _system / _api / document / products / 1290872919 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290872919 \ " , \ n \ " _rev \ " : \ " 1291790423 \ " , \ n \ " _key \ " : \ " 1290872919 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290872919 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1291790423 \ " \ n \ n { \ n \ " one \ " : \ " world \ " , \ n \ " hello \ " : \ " world \ " , \ n \ " numbers \ " : { \ n \ " empty \ " : null , \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 \ n } , \ n \ " _id \ " : \ " products / 1290872919 \ " , \ n \ " _rev \ " : \ " 1291790423 \ " , \ n \ " _key \ " : \ " 1290872919 \ " \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290872919 ? keepNull = false \ n { \ n \ " hello \ " : null , \ n \ " numbers \ " : { \ n \ " four \ " : 4 \ n } \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1292249175 \ " \ nlocation : / _db / _system / _api / document / products / 1290872919 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290872919 \ " , \ n \ " _rev \ " : \ " 1292249175 \ " , \ n \ " _key \ " : \ " 1290872919 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290872919 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1292249175 \ " \ n \ n { \ n \ " one \ " : \ " world \ " , \ n \ " numbers \ " : { \ n \ " empty \ " : null , \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 , \ n \ " four \ " : 4 \ n } , \ n \ " _id \ " : \ " products / 1290872919 \ " , \ n \ " _rev \ " : \ " 1292249175 \ " , \ n \ " _key \ " : \ " 1290872919 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > patches an existing document with new content . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290670214 \ n { \ n \ " hello \ " : \ " world \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1290997894 \ " \ nlocation : / _db / _system / _api / document / products / 1290670214 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290670214 \ " , \ n \ " _rev \ " : \ " 1290997894 \ " , \ n \ " _key \ " : \ " 1290670214 \ " \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290670214 \ n { \ n \ " numbers \ " : { \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 , \ n \ " empty \ " : null \ n } \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1291587718 \ " \ nlocation : / _db / _system / _api / document / products / 1290670214 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290670214 \ " , \ n \ " _rev \ " : \ " 1291587718 \ " , \ n \ " _key \ " : \ " 1290670214 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290670214 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1291587718 \ " \ n \ n { \ n \ " one \ " : \ " world \ " , \ n \ " hello \ " : \ " world \ " , \ n \ " numbers \ " : { \ n \ " empty \ " : null , \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 \ n } , \ n \ " _id \ " : \ " products / 1290670214 \ " , \ n \ " _rev \ " : \ " 1291587718 \ " , \ n \ " _key \ " : \ " 1290670214 \ " \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290670214 ? keepNull = false \ n { \ n \ " hello \ " : null , \ n \ " numbers \ " : { \ n \ " four \ " : 4 \ n } \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1292046470 \ " \ nlocation : / _db / _system / _api / document / products / 1290670214 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1290670214 \ " , \ n \ " _rev \ " : \ " 1292046470 \ " , \ n \ " _key \ " : \ " 1290670214 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1290670214 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1292046470 \ " \ n \ n { \ n \ " one \ " : \ " world \ " , \ n \ " numbers \ " : { \ n \ " empty \ " : null , \ n \ " one \ " : 1 , \ n \ " two \ " : 2 , \ n \ " three \ " : 3 , \ n \ " four \ " : 4 \ n } , \ n \ " _id \ " : \ " products / 1290670214 \ " , \ n \ " _rev \ " : \ " 1292046470 \ " , \ n \ " _key \ " : \ " 1290670214 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " PatchDocument " <nl> } <nl> ] , <nl> <nl> " notes " : " The body of the response contains a JSON object with the information about the handle and the revision . The attribute < em > _id < / em > contains the known < em > document - handle < / em > of the deleted document , the attribute < em > _rev < / em > contains the document revision . < br > < br > If the < em > waitForSync < / em > parameter is not specified or set to < em > false < / em > , then the collection ' s default < em > waitForSync < / em > behavior is applied . The < em > waitForSync < / em > URL parameter cannot be used to disable synchronisation for collections that have a default < em > waitForSync < / em > value of < em > true < / em > . < br > < br > " , <nl> " summary " : " Deletes document " , <nl> " httpMethod " : " DELETE " , <nl> - " examples " : " < br > < br > Using document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1292838999 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1292838999 \ " , \ n \ " _rev \ " : \ " 1292838999 \ " , \ n \ " _key \ " : \ " 1292838999 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1293559895 \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / 1293559895 not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > < br > < br > Revision conflict : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - header ' If - Match : \ " 1294739543 \ " ' - - dump - http : / / localhost : 8529 / _api / document / products / 1294411863 \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1294411863 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1294411863 \ " , \ n \ " _rev \ " : \ " 1294411863 \ " , \ n \ " _key \ " : \ " 1294411863 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Using document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1292636294 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " products / 1292636294 \ " , \ n \ " _rev \ " : \ " 1292636294 \ " , \ n \ " _key \ " : \ " 1292636294 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Unknown document handle : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / document / products / 1293357190 \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " errorMessage \ " : \ " document / _api / document / products / 1293357190 not found \ " , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 1202 \ n } \ n < / code > < / pre > < br > < br > < br > Revision conflict : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - header ' If - Match : \ " 1294536838 \ " ' - - dump - http : / / localhost : 8529 / _api / document / products / 1294209158 \ n \ nHTTP / 1 . 1 412 Precondition Failed \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1294209158 \ " \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 412 , \ n \ " errorNum \ " : 1200 , \ n \ " errorMessage \ " : \ " precondition failed \ " , \ n \ " _id \ " : \ " products / 1294209158 \ " , \ n \ " _rev \ " : \ " 1294209158 \ " , \ n \ " _key \ " : \ " 1294209158 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " DeletesDocument " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / edge . json <nl> ppp b / js / apps / system / aardvark / api - docs / edge . json <nl> <nl> " notes " : " Creates a new edge document in the collection named < em > collection < / em > . A JSON representation of the document must be passed as the body of the POST request . < br > < br > The < em > from < / em > and < em > to < / em > handles are immutable once the edge has been created . < br > < br > In all other respects the method works like < em > POST / document < / em > . < br > < br > " , <nl> " summary " : " Create edge " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Create an edge and read it back : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edge / ? collection = edges & from = vertices / 1 & to = vertices / 2 \ n { \ n \ " name \ " : \ " Emil \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1297229911 \ " \ nlocation : / _db / _system / _api / document / edges / 1297229911 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " edges / 1297229911 \ " , \ n \ " _rev \ " : \ " 1297229911 \ " , \ n \ " _key \ " : \ " 1297229911 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edge / edges / 1297229911 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1297229911 \ " \ n \ n { \ n \ " name \ " : \ " Emil \ " , \ n \ " _id \ " : \ " edges / 1297229911 \ " , \ n \ " _rev \ " : \ " 1297229911 \ " , \ n \ " _key \ " : \ " 1297229911 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 2 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Create an edge and read it back : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edge / ? collection = edges & from = vertices / 1 & to = vertices / 2 \ n { \ n \ " name \ " : \ " Emil \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1296830598 \ " \ nlocation : / _db / _system / _api / document / edges / 1296830598 \ n \ n { \ n \ " error \ " : false , \ n \ " _id \ " : \ " edges / 1296830598 \ " , \ n \ " _rev \ " : \ " 1296830598 \ " , \ n \ " _key \ " : \ " 1296830598 \ " \ n } \ nshell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edge / edges / 1296830598 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : \ " 1296830598 \ " \ n \ n { \ n \ " name \ " : \ " Emil \ " , \ n \ " _id \ " : \ " edges / 1296830598 \ " , \ n \ " _rev \ " : \ " 1296830598 \ " , \ n \ " _key \ " : \ " 1296830598 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 2 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateEdge " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / edges . json <nl> ppp b / js / apps / system / aardvark / api - docs / edges . json <nl> <nl> " notes " : " Returns the list of edges starting or ending in the vertex identified by < em > vertex - handle < / em > . < br > < br > " , <nl> " summary " : " Read in - or outbound edges " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Any direction < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 6 \ " , \ n \ " _key \ " : \ " 6 \ " , \ n \ " _rev \ " : \ " 969025623 \ " , \ n \ " _from \ " : \ " vertices / 2 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v2 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 7 \ " , \ n \ " _key \ " : \ " 7 \ " , \ n \ " _rev \ " : \ " 969549911 \ " , \ n \ " _from \ " : \ " vertices / 4 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v4 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 5 \ " , \ n \ " _key \ " : \ " 5 \ " , \ n \ " _rev \ " : \ " 968501335 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 3 \ " , \ n \ " $ label \ " : \ " v1 - > v3 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > In edges < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 & direction = in \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 6 \ " , \ n \ " _key \ " : \ " 6 \ " , \ n \ " _rev \ " : \ " 973940823 \ " , \ n \ " _from \ " : \ " vertices / 2 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v2 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 7 \ " , \ n \ " _key \ " : \ " 7 \ " , \ n \ " _rev \ " : \ " 974465111 \ " , \ n \ " _from \ " : \ " vertices / 4 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v4 - > v1 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Out edges < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 & direction = out \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 5 \ " , \ n \ " _key \ " : \ " 5 \ " , \ n \ " _rev \ " : \ " 978135127 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 3 \ " , \ n \ " $ label \ " : \ " v1 - > v3 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Any direction < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 6 \ " , \ n \ " _key \ " : \ " 6 \ " , \ n \ " _rev \ " : \ " 971247750 \ " , \ n \ " _from \ " : \ " vertices / 2 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v2 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 7 \ " , \ n \ " _key \ " : \ " 7 \ " , \ n \ " _rev \ " : \ " 971772038 \ " , \ n \ " _from \ " : \ " vertices / 4 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v4 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 5 \ " , \ n \ " _key \ " : \ " 5 \ " , \ n \ " _rev \ " : \ " 970723462 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 3 \ " , \ n \ " $ label \ " : \ " v1 - > v3 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > In edges < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 & direction = in \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 6 \ " , \ n \ " _key \ " : \ " 6 \ " , \ n \ " _rev \ " : \ " 975966342 \ " , \ n \ " _from \ " : \ " vertices / 2 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v2 - > v1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / 7 \ " , \ n \ " _key \ " : \ " 7 \ " , \ n \ " _rev \ " : \ " 976490630 \ " , \ n \ " _from \ " : \ " vertices / 4 \ " , \ n \ " _to \ " : \ " vertices / 1 \ " , \ n \ " $ label \ " : \ " v4 - > v1 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Out edges < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / edges / edges ? vertex = vertices / 1 & direction = out \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " edges / 5 \ " , \ n \ " _key \ " : \ " 5 \ " , \ n \ " _rev \ " : \ " 980160646 \ " , \ n \ " _from \ " : \ " vertices / 1 \ " , \ n \ " _to \ " : \ " vertices / 3 \ " , \ n \ " $ label \ " : \ " v1 - > v3 \ " \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadIn - OrOutboundEdges " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / endpoint . json <nl> ppp b / js / apps / system / aardvark / api - docs / endpoint . json <nl> <nl> " notes " : " Returns a list of all configured endpoints the server is listening on . For each endpoint , the list of allowed databases is returned too if set . < br > < br > The result is a JSON hash which has the endpoints as keys , and the list of mapped database names as values for each endpoint . < br > < br > If a list of mapped databases is empty , it means that all databases can be accessed via the endpoint . If a list of mapped databases contains more than one database name , this means that any of the databases might be accessed via the endpoint , and the first database in the list will be treated as the default database for the endpoint . The default database will be used when an incoming request does not specify a database name in the request explicitly . < br > < br > < em > * Note * < / em > : retrieving the list of all endpoints is allowed in the system database only . Calling this action in any other database will make the server return an error . < br > < br > " , <nl> " summary " : " Return list of all endpoints " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / endpoint \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n [ \ n { \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 30795 \ " , \ n \ " databases \ " : [ ] \ n } , \ n { \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8532 \ " , \ n \ " databases \ " : [ \ n \ " mydb1 \ " , \ n \ " mydb2 \ " \ n ] \ n } \ n ] \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / endpoint \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n [ \ n { \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 33382 \ " , \ n \ " databases \ " : [ ] \ n } , \ n { \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8532 \ " , \ n \ " databases \ " : [ \ n \ " mydb1 \ " , \ n \ " mydb2 \ " \ n ] \ n } \ n ] \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnListOfAllEndpoints " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / graph . json <nl> ppp b / js / apps / system / aardvark / api - docs / graph . json <nl> <nl> " notes " : " Creates a new graph . < br > < br > Returns an object with an attribute < em > graph < / em > containing a list of all graph properties . < br > < br > " , <nl> " summary " : " create graph " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / \ n { \ n \ " _key \ " : \ " graph \ " , \ n \ " vertices \ " : \ " vertices \ " , \ n \ " edges \ " : \ " edges \ " \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : 983705687 \ n \ n { \ n \ " graph \ " : { \ n \ " _id \ " : \ " _graphs / graph \ " , \ n \ " _key \ " : \ " graph \ " , \ n \ " _rev \ " : \ " 983705687 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges \ " , \ n \ " from \ " : [ \ n \ " vertices \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices \ " \ n ] \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / \ n { \ n \ " _key \ " : \ " graph \ " , \ n \ " vertices \ " : \ " vertices \ " , \ n \ " edges \ " : \ " edges \ " \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ netag : 983502982 \ n \ n { \ n \ " graph \ " : { \ n \ " _id \ " : \ " _graphs / graph \ " , \ n \ " _key \ " : \ " graph \ " , \ n \ " _rev \ " : \ " 983502982 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges \ " , \ n \ " from \ " : [ \ n \ " vertices \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices \ " \ n ] \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " createGraph " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > If < em > graph - name < / em > is specified , returns an object with an attribute < em > graph < / em > containing a JSON hash with all properties of the specified graph . < br > < br > If < em > graph - name < / em > is not specified , returns a list of graph objects . < br > < br > " , <nl> " summary " : " get the properties of a specific or all graphs " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 985016407 \ n \ n { \ n \ " graph \ " : { \ n \ " _id \ " : \ " _graphs / graph \ " , \ n \ " _key \ " : \ " graph \ " , \ n \ " _rev \ " : \ " 985016407 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges \ " , \ n \ " from \ " : [ \ n \ " vertices \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices \ " \ n ] \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > get all graphs < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " graphs \ " : [ \ n { \ n \ " _id \ " : \ " _graphs / graph2 \ " , \ n \ " _key \ " : \ " graph2 \ " , \ n \ " _rev \ " : \ " 987703383 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges2 \ " , \ n \ " from \ " : [ \ n \ " vertices2 \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices2 \ " \ n ] \ n } \ n ] \ n } , \ n { \ n \ " _id \ " : \ " _graphs / graph1 \ " , \ n \ " _key \ " : \ " graph1 \ " , \ n \ " _rev \ " : \ " 986720343 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges1 \ " , \ n \ " from \ " : [ \ n \ " vertices1 \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices1 \ " \ n ] \ n } \ n ] \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 984813702 \ n \ n { \ n \ " graph \ " : { \ n \ " _id \ " : \ " _graphs / graph \ " , \ n \ " _key \ " : \ " graph \ " , \ n \ " _rev \ " : \ " 984813702 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges \ " , \ n \ " from \ " : [ \ n \ " vertices \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices \ " \ n ] \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > get all graphs < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " graphs \ " : [ \ n { \ n \ " _id \ " : \ " _graphs / graph2 \ " , \ n \ " _key \ " : \ " graph2 \ " , \ n \ " _rev \ " : \ " 987500678 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges2 \ " , \ n \ " from \ " : [ \ n \ " vertices2 \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices2 \ " \ n ] \ n } \ n ] \ n } , \ n { \ n \ " _id \ " : \ " _graphs / graph1 \ " , \ n \ " _key \ " : \ " graph1 \ " , \ n \ " _rev \ " : \ " 986517638 \ " , \ n \ " edgeDefinitions \ " : [ \ n { \ n \ " collection \ " : \ " edges1 \ " , \ n \ " from \ " : [ \ n \ " vertices1 \ " \ n ] , \ n \ " to \ " : [ \ n \ " vertices1 \ " \ n ] \ n } \ n ] \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getThePropertiesOfASpecificOrAllGraphs " <nl> } <nl> ] , <nl> <nl> " notes " : " Creates a vertex in a graph . < br > < br > Returns an object with an attribute < em > vertex < / em > containing a list of all vertex properties . < br > < br > " , <nl> " summary " : " create vertex " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex \ n { \ n \ " _key \ " : \ " v1 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 992290903 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 992290903 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex \ n { \ n \ " _key \ " : \ " v1 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 991891590 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 991891590 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " createVertex " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns an object with an attribute < em > vertex < / em > containing a list of all vertex properties . < br > < br > " , <nl> " summary " : " get vertex " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 994125911 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 994125911 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 997003398 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 997003398 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getVertex " <nl> } <nl> ] , <nl> <nl> " notes " : " Replaces the vertex properties . < br > < br > Returns an object with an attribute < em > vertex < / em > containing a list of all vertex properties . < br > < br > " , <nl> " summary " : " update vertex " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : \ " val2 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 998910039 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 998910039 \ " , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : \ " val2 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1001787526 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1001787526 \ " , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " updateVertex " <nl> } <nl> ] , <nl> <nl> " notes " : " Partially updates the vertex properties . < br > < br > Setting an attribute value to < em > null < / em > in the patch document will cause a value of < em > null < / em > be saved for the attribute by default . If the intention is to delete existing attributes with the patch command , the URL parameter < em > keepNull < / em > can be used with a value of < em > false < / em > . This will modify the behavior of the patch command to remove any attributes from the existing document that are contained in the patch document with an attribute value of < em > null < / em > . < br > < br > Returns an object with an attribute < em > vertex < / em > containing a list of all vertex properties . < br > < br > " , <nl> " summary " : " update vertex " , <nl> " httpMethod " : " PATCH " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : \ " vertexPatch \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1001269335 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1001269335 \ " , \ n \ " optional1 \ " : \ " vertexPatch \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : null \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1001793623 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1001793623 \ " , \ n \ " optional1 \ " : null \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : \ " vertexPatch \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1004146822 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1004146822 \ " , \ n \ " optional1 \ " : \ " vertexPatch \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ nshell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertex / v1 \ n { \ n \ " optional1 \ " : null \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1004671110 \ n \ n { \ n \ " vertex \ " : { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1004671110 \ " , \ n \ " optional1 \ " : null \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " updateVertex " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns a cursor . < br > < br > The call expects a JSON hash array as body to filter the result : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > batchSize < / em > : the batch size of the returned cursor < li > < em > limit < / em > : limit the result size < li > < em > count < / em > : return the total number of results ( default \ " false \ " ) < li > < em > filter < / em > : a optional filter < / ul > The attributes of filter < ul class = \ " swagger - list \ " > < li > < em > properties < / em > : filter by an array of vertex properties < / ul > The attributes of a property filter < ul class = \ " swagger - list \ " > < li > < em > key < / em > : filter the result vertices by a key value pair < li > < em > value < / em > : the value of the < em > key < / em > < li > < em > compare < / em > : a compare operator " , <nl> " summary " : " get vertices " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices \ n { \ n \ " batchSize \ " : 100 \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v3 \ " , \ n \ " _key \ " : \ " v3 \ " , \ n \ " _rev \ " : \ " 1004415063 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v2 \ " , \ n \ " _key \ " : \ " v2 \ " , \ n \ " _rev \ " : \ " 1004021847 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v5 \ " , \ n \ " _key \ " : \ " v5 \ " , \ n \ " _rev \ " : \ " 1005201495 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1004808279 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1003628631 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices \ n { \ n \ " batchSize \ " : 100 \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v3 \ " , \ n \ " _key \ " : \ " v3 \ " , \ n \ " _rev \ " : \ " 1007489158 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v2 \ " , \ n \ " _key \ " : \ " v2 \ " , \ n \ " _rev \ " : \ " 1007095942 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v5 \ " , \ n \ " _key \ " : \ " v5 \ " , \ n \ " _rev \ " : \ " 1008275590 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1007882374 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1006702726 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getVertices " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns a cursor . < br > < br > The call expects a JSON hash array as body to filter the result : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > batchSize < / em > : the batch size of the returned cursor < li > < em > limit < / em > : limit the result size < li > < em > count < / em > : return the total number of results ( default \ " false \ " ) < li > < em > filter < / em > : a optional filter < / ul > The attributes of filter < ul class = \ " swagger - list \ " > < li > < em > direction < / em > : Filter for inbound ( value \ " in \ " ) or outbound ( value \ " out \ " ) neighbors . Default value is \ " any \ " . < li > < em > labels < / em > : filter by an array of edge labels ( empty array means no restriction ) < li > < em > properties < / em > : filter neighbors by an array of edge properties < / ul > The attributes of a property filter < ul class = \ " swagger - list \ " > < li > < em > key < / em > : filter the result vertices by a key value pair < li > < em > value < / em > : the value of the < em > key < / em > < li > < em > compare < / em > : a compare operator " , <nl> " summary " : " get vertices " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " any \ " , \ " properties \ " : [ ] } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1007233111 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1008412759 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Select vertices by direction and property filter < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " out \ " , \ " properties \ " : [ { \ " key \ " : \ " optional1 \ " , \ " value \ " : \ " val2 \ " , \ " compare \ " : \ " = = \ " } , ] } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1014310999 \ " , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1013131351 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " any \ " , \ " properties \ " : [ ] } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1010307206 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1011486854 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Select vertices by direction and property filter < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / vertices / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " out \ " , \ " properties \ " : [ { \ " key \ " : \ " optional1 \ " , \ " value \ " : \ " val2 \ " , \ " compare \ " : \ " = = \ " } , ] } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " vertices / v4 \ " , \ n \ " _key \ " : \ " v4 \ " , \ n \ " _rev \ " : \ " 1017385094 \ " , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n { \ n \ " _id \ " : \ " vertices / v1 \ " , \ n \ " _key \ " : \ " v1 \ " , \ n \ " _rev \ " : \ " 1016205446 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getVertices " <nl> } <nl> ] , <nl> <nl> " notes " : " Creates an edge in a graph . < br > < br > The call expects a JSON hash array as body with the edge properties : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > _key < / em > : The name of the edge ( optional , if edge collection allows user defined keys ) . < li > < em > _from < / em > : The name of the from vertex . < li > < em > _to < / em > : The name of the to vertex . < li > < em > $ label < / em > : A label for the edge ( optional ) . < li > further optional attributes . < / ul > Returns an object with an attribute < em > edge < / em > containing the list of all edge properties . < br > < br > " , <nl> " summary " : " create edge " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge \ n { \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _from \ " : \ " vert2 \ " , \ n \ " _to \ " : \ " vert1 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1020274775 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1020274775 \ " , \ n \ " _from \ " : \ " vertices / vert2 \ " , \ n \ " _to \ " : \ " vertices / vert1 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge \ n { \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _from \ " : \ " vert2 \ " , \ n \ " _to \ " : \ " vert1 \ " , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1023152262 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1023152262 \ " , \ n \ " _from \ " : \ " vertices / vert2 \ " , \ n \ " _to \ " : \ " vertices / vert1 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " createEdge " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns an object with an attribute < em > edge < / em > containing a list of all edge properties . < br > < br > " , <nl> " summary " : " get edge " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1023158359 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1023158359 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1026035846 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1026035846 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getEdge " <nl> } <nl> ] , <nl> <nl> " notes " : " Replaces the optional edge properties . < br > < br > The call expects a JSON hash array as body with the new edge properties . < br > < br > Returns an object with an attribute < em > edge < / em > containing a list of all edge properties . < br > < br > " , <nl> " summary " : " update edge " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n { \ n \ " optional1 \ " : \ " val2 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1030170711 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1030170711 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n { \ n \ " optional1 \ " : \ " val2 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1033244806 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1033244806 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val2 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " updateEdge " <nl> } <nl> ] , <nl> <nl> " notes " : " Partially updates the edge properties . < br > < br > Setting an attribute value to < em > null < / em > in the patch document will cause a value of < em > null < / em > be saved for the attribute by default . If the intention is to delete existing attributes with the patch command , the URL parameter < em > keepNull < / em > can be used with a value of < em > false < / em > . This will modify the behavior of the patch command to remove any attributes from the existing document that are contained in the patch document with an attribute value of < em > null < / em > . < br > < br > Returns an object with an attribute < em > edge < / em > containing a list of all edge properties . < br > < br > " , <nl> " summary " : " update edge " , <nl> " httpMethod " : " PATCH " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n { \ n \ " optional3 \ " : \ " val3 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1036527703 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1036527703 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " , \ n \ " optional3 \ " : \ " val3 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PATCH - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edge / edge1 \ n { \ n \ " optional3 \ " : \ " val3 \ " \ n } \ n \ nHTTP / 1 . 1 202 Accepted \ ncontent - type : application / json ; charset = utf - 8 \ netag : 1036783750 \ n \ n { \ n \ " edge \ " : { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1036783750 \ " , \ n \ " _from \ " : \ " vertices / vert1 \ " , \ n \ " _to \ " : \ " vertices / vert2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " , \ n \ " optional3 \ " : \ " val3 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 202 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " updateEdge " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns a cursor . < br > < br > The call expects a JSON hash array as body to filter the result : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > batchSize < / em > : the batch size of the returned cursor < li > < em > limit < / em > : limit the result size < li > < em > count < / em > : return the total number of results ( default \ " false \ " ) < li > < em > filter < / em > : a optional filter < / ul > The attributes of filter < ul class = \ " swagger - list \ " > < li > < em > labels < / em > : filter by an array of edge labels < li > < em > properties < / em > : filter by an array of edge properties < / ul > The attributes of a property filter < ul class = \ " swagger - list \ " > < li > < em > key < / em > : filter the result edges by a key value pair < li > < em > value < / em > : the value of the < em > key < / em > < li > < em > compare < / em > : a compare operator " , <nl> " summary " : " get edges " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edges \ n { \ n \ " batchSize \ " : 100 \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " edges / edge2 \ " , \ n \ " _key \ " : \ " edge2 \ " , \ n \ " _rev \ " : \ " 1041442903 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v3 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge3 \ " , \ n \ " _key \ " : \ " edge3 \ " , \ n \ " _rev \ " : \ " 1041967191 \ " , \ n \ " _from \ " : \ " vertices / v2 \ " , \ n \ " _to \ " : \ " vertices / v4 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1040918615 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge4 \ " , \ n \ " _key \ " : \ " edge4 \ " , \ n \ " _rev \ " : \ " 1042491479 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v5 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edges \ n { \ n \ " batchSize \ " : 100 \ n } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " edges / edge2 \ " , \ n \ " _key \ " : \ " edge2 \ " , \ n \ " _rev \ " : \ " 1041436806 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v3 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge3 \ " , \ n \ " _key \ " : \ " edge3 \ " , \ n \ " _rev \ " : \ " 1041961094 \ " , \ n \ " _from \ " : \ " vertices / v2 \ " , \ n \ " _to \ " : \ " vertices / v4 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1040912518 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge4 \ " , \ n \ " _key \ " : \ " edge4 \ " , \ n \ " _rev \ " : \ " 1042485382 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v5 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getEdges " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Returns a cursor . < br > < br > The call expects a JSON hash array as body to filter the result : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > batchSize < / em > : the batch size of the returned cursor < li > < em > limit < / em > : limit the result size < li > < em > count < / em > : return the total number of results ( default \ " false \ " ) < li > < em > filter < / em > : a optional filter < / ul > The attributes of filter < ul class = \ " swagger - list \ " > < li > < em > direction < / em > : Filter for inbound ( value \ " in \ " ) or outbound ( value \ " out \ " ) neighbors . Default value is \ " any \ " . < li > < em > labels < / em > : filter by an array of edge labels < li > < em > properties < / em > : filter neighbors by an array of properties < / ul > The attributes of a property filter < ul class = \ " swagger - list \ " > < li > < em > key < / em > : filter the result vertices by a key value pair < li > < em > value < / em > : the value of the < em > key < / em > < li > < em > compare < / em > : a compare operator " , <nl> " summary " : " get edges " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edges / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " any \ " } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1046816855 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge3 \ " , \ n \ " _key \ " : \ " edge3 \ " , \ n \ " _rev \ " : \ " 1047865431 \ " , \ n \ " _from \ " : \ " vertices / v2 \ " , \ n \ " _to \ " : \ " vertices / v4 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / graph / graph / edges / v2 \ n { \ " batchSize \ " : 100 , \ " filter \ " : { \ " direction \ " : \ " any \ " } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " edges / edge1 \ " , \ n \ " _key \ " : \ " edge1 \ " , \ n \ " _rev \ " : \ " 1046810758 \ " , \ n \ " _from \ " : \ " vertices / v1 \ " , \ n \ " _to \ " : \ " vertices / v2 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } , \ n { \ n \ " _id \ " : \ " edges / edge3 \ " , \ n \ " _key \ " : \ " edge3 \ " , \ n \ " _rev \ " : \ " 1047859334 \ " , \ n \ " _from \ " : \ " vertices / v2 \ " , \ n \ " _to \ " : \ " vertices / v4 \ " , \ n \ " $ label \ " : null , \ n \ " optional1 \ " : \ " val1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > " , <nl> " nickname " : " getEdges " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / index . json <nl> ppp b / js / apps / system / aardvark / api - docs / index . json <nl> <nl> " notes " : " < br > < br > Creates a cap constraint for the collection < em > collection - name < / em > , if it does not already exist . Expects an object containing the index details . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > type < / em > : must be equal to < em > \ " cap \ " < / em > . < li > < em > size < / em > : The maximal number of documents for the collection . If specified , the value must be greater than zero . < li > < em > byteSize < / em > : The maximal size of the active document data in the collection ( in bytes ) . If specified , the value must be at least 16384 . < / ul > < em > * Note * < / em > : The cap constraint does not index particular attributes of the documents in a collection , but limits the number of documents in the collection to a maximum value . The cap constraint thus does not support attribute names specified in the < em > fields < / em > attribute nor uniqueness of any kind via the < em > unique < / em > attribute . < br > < br > It is allowed to specify either < em > size < / em > or < em > byteSize < / em > , or both at the same time . If both are specified , then the automatic document removal will be triggered by the first non - met constraint . < br > < br > " , <nl> " summary " : " Create cap constraint " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Creating a cap constraint < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " cap \ " , \ " size \ " : 10 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1049962583 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 10 , \ n \ " byteSize \ " : 0 , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Creating a cap constraint < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " cap \ " , \ " size \ " : 10 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1049956486 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 10 , \ n \ " byteSize \ " : 0 , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateCapConstraint " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Creates a geo - spatial index in the collection < em > collection - name < / em > , if it does not already exist . Expects an object containing the index details . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > type < / em > : must be equal to < em > \ " geo \ " < / em > . < li > < em > fields < / em > : A list with one or two attribute paths . If it is a list with one attribute path < em > location < / em > , then a geo - spatial index on all documents is created using < em > location < / em > as path to the coordinates . The value of the attribute must be a list with at least two double values . The list must contain the latitude ( first value ) and the longitude ( second value ) . All documents , which do not have the attribute path or with value that are not suitable , are ignored . If it is a list with two attribute paths < em > latitude < / em > and < em > longitude < / em > , then a geo - spatial index on all documents is created using < em > latitude < / em > and < em > longitude < / em > as paths the latitude and the longitude . The value of the attribute < em > latitude < / em > and of the attribute < em > longitude < / em > must a double . All documents , which do not have the attribute paths or which values are not suitable , are ignored . < li > < em > geoJson < / em > : If a geo - spatial index on a < em > location < / em > is constructed and < em > geoJson < / em > is < em > true < / em > , then the order within the list is longitude followed by latitude . This corresponds to the format described in http : / / geojson . org / geojson - spec . html # positions < li > < em > constraint < / em > : If < em > constraint < / em > is < em > true < / em > , then a geo - spatial constraint is created . The constraint is a non - unique variant of the index . < em > * Note * < / em > : It is also possible to set the < em > unique < / em > attribute instead of the < em > constraint < / em > attribute . < li > < em > ignoreNull < / em > : If a geo - spatial constraint is created and < em > ignoreNull < / em > is true , then documents with a null in < em > location < / em > or at least one null in < em > latitude < / em > or < em > longitude < / em > are ignored . < / ul > < em > * Note * < / em > : Unique indexes on non - shard keys are not supported in a cluster . < br > < br > " , <nl> " summary " : " Create geo - spatial index " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Creating a geo index with a location attribute : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " geo \ " , \ " fields \ " : [ \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1050486871 \ " , \ n \ " type \ " : \ " geo1 \ " , \ n \ " unique \ " : false , \ n \ " geoJson \ " : false , \ n \ " constraint \ " : false , \ n \ " ignoreNull \ " : false , \ n \ " fields \ " : [ \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Creating a geo index with latitude and longitude attributes : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " geo \ " , \ " fields \ " : [ \ " e \ " , \ " f \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1051011159 \ " , \ n \ " type \ " : \ " geo2 \ " , \ n \ " unique \ " : false , \ n \ " constraint \ " : false , \ n \ " ignoreNull \ " : false , \ n \ " fields \ " : [ \ n \ " e \ " , \ n \ " f \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Creating a geo index with a location attribute : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " geo \ " , \ " fields \ " : [ \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1050480774 \ " , \ n \ " type \ " : \ " geo1 \ " , \ n \ " unique \ " : false , \ n \ " geoJson \ " : false , \ n \ " constraint \ " : false , \ n \ " ignoreNull \ " : false , \ n \ " fields \ " : [ \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Creating a geo index with latitude and longitude attributes : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " geo \ " , \ " fields \ " : [ \ " e \ " , \ " f \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1051005062 \ " , \ n \ " type \ " : \ " geo2 \ " , \ n \ " unique \ " : false , \ n \ " constraint \ " : false , \ n \ " ignoreNull \ " : false , \ n \ " fields \ " : [ \ n \ " e \ " , \ n \ " f \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateGeo - spatialIndex " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Creates a hash index for the collection < em > collection - name < / em > , if it does not already exist . The call expects an object containing the index details . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > type < / em > : must be equal to < em > \ " hash \ " < / em > . < li > < em > fields < / em > : A list of attribute paths . < li > < em > unique < / em > : If < em > true < / em > , then create a unique index . < / ul > < em > * Note * < / em > : unique indexes on non - shard keys are not supported in a cluster . < br > < br > " , <nl> " summary " : " Create hash index " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Creating an unique constraint : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " hash \ " , \ " unique \ " : true , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1051535447 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : true , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Creating a hash index : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " hash \ " , \ " unique \ " : false , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1052059735 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Creating an unique constraint : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " hash \ " , \ " unique \ " : true , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1051529350 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : true , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Creating a hash index : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " hash \ " , \ " unique \ " : false , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1052053638 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateHashIndex " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Creates a skip - list index for the collection < em > collection - name < / em > , if it does not already exist . The call expects an object containing the index details . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > type < / em > : must be equal to < em > \ " skiplist \ " < / em > . < li > < em > fields < / em > : A list of attribute paths . < li > < em > unique < / em > : If < em > true < / em > , then create a unique index . < / ul > < em > * Note * < / em > : unique indexes on non - shard keys are not supported in a cluster . < br > < br > " , <nl> " summary " : " Create skip list " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Creating a skiplist : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " skiplist \ " , \ " unique \ " : false , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1052584023 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Creating a skiplist : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " skiplist \ " , \ " unique \ " : false , \ " fields \ " : [ \ " a \ " , \ " b \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1052577926 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateSkipList " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Creates a fulltext index for the collection < em > collection - name < / em > , if it does not already exist . The call expects an object containing the index details . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > type < / em > : must be equal to < em > \ " fulltext \ " < / em > . < li > < em > fields < / em > : A list of attribute names . Currently , the list is limited to exactly one attribute , so the value of < em > fields < / em > should look like this for example : < em > [ \ " text \ " ] < / em > . < li > < em > minLength < / em > : Minimum character length of words to index . Will default to a server - defined value if unspecified . It is thus recommended to set this value explicitly when creating the index . " , <nl> " summary " : " Create fulltext index " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > Creating a fulltext index : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " fulltext \ " , \ " fields \ " : [ \ " text \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1053042775 \ " , \ n \ " type \ " : \ " fulltext \ " , \ n \ " unique \ " : false , \ n \ " minLength \ " : 2 , \ n \ " fields \ " : [ \ n \ " text \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Creating a fulltext index : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index ? collection = products \ n { \ " type \ " : \ " fulltext \ " , \ " fields \ " : [ \ " text \ " ] } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1053036678 \ " , \ n \ " type \ " : \ " fulltext \ " , \ n \ " unique \ " : false , \ n \ " minLength \ " : 2 , \ n \ " fields \ " : [ \ n \ " text \ " \ n ] , \ n \ " isNewlyCreated \ " : true , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " CreateFulltextIndex " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Deletes an index with < em > index - handle < / em > . < br > < br > " , <nl> " summary " : " Delete index " , <nl> " httpMethod " : " DELETE " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index / products / 1053567063 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1053567063 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X DELETE - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / index / products / 1053560966 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " id \ " : \ " products / 1053560966 \ " , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " DeleteIndex " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / replication . json <nl> ppp b / js / apps / system / aardvark / api - docs / replication . json <nl> <nl> " notes " : " Returns the current state of the server ' s replication logger . The state will include information about whether the logger is running and about the last logged tick value . This tick value is important for incremental fetching of data . < br > < br > The state API can be called regardless of whether the logger is currently running or not . < br > < br > The body of the response contains a JSON object with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > state < / em > : the current logger state as a JSON hash array with the following sub - attributes : - < em > running < / em > : whether or not the logger is running - < em > lastLogTick < / em > : the tick value of the latest tick the logger has logged . This value can be used for incremental fetching of log data . - < em > totalEvents < / em > : total number of events logged since the server was started . The value is not reset between multiple stops and re - starts of the logger . - < em > time < / em > : the current date and time on the logger server < li > < em > server < / em > : a JSON hash with the following sub - attributes : - < em > version < / em > : the logger server ' s version - < em > serverId < / em > : the logger server ' s id " , <nl> " summary " : " Return replication logger state " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Returns the state of the replication logger . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1312041047 \ " , \ n \ " totalEvents \ " : 8099 , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 36Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } , \ n \ " clients \ " : [ ] \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Returns the state of the replication logger . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1309937798 \ " , \ n \ " totalEvents \ " : 8094 , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 45Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } , \ n \ " clients \ " : [ ] \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnReplicationLoggerState " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns data from the server ' s replication log . This method can be called by replication clients after an initial synchronization of data . The method will return all \ " recent \ " log entries from the logger server , and the clients can replay and apply these entries locally so they get to the same data state as the logger server . < br > < br > Clients can call this method repeatedly to incrementally fetch all changes from the logger server . In this case , they should provide the < em > from < / em > value so they will only get returned the log events since their last fetch . < br > < br > When the < em > from < / em > URL parameter is not used , the logger server will return log entries starting at the beginning of its replication log . When the < em > from < / em > parameter is used , the logger server will only return log entries which have higher tick values than the specified < em > from < / em > value ( note : the log entry with a tick value equal to < em > from < / em > will be excluded ) . Use the < em > from < / em > value when incrementally fetching log data . < br > < br > The < em > to < / em > URL parameter can be used to optionally restrict the upper bound of the result to a certain tick value . If used , the result will contain only log events with tick values up to ( including ) < em > to < / em > . In incremental fetching , there is no need to use the < em > to < / em > parameter . It only makes sense in special situations , when only parts of the change log are required . < br > < br > The < em > chunkSize < / em > URL parameter can be used to control the size of the result . It must be specified in bytes . The < em > chunkSize < / em > value will only be honored approximately . Otherwise a too low < em > chunkSize < / em > value could cause the server to not be able to put just one log entry into the result and return it . Therefore , the < em > chunkSize < / em > value will only be consulted after a log entry has been written into the result . If the result size is then bigger than < em > chunkSize < / em > , the server will respond with as many log entries as there are in the response already . If the result size is still smaller than < em > chunkSize < / em > , the server will try to return more data if there ' s more data left to return . < br > < br > If < em > chunkSize < / em > is not specified , some server - side default value will be used . < br > < br > The < em > Content - Type < / em > of the result is < em > application / x - arango - dump < / em > . This is an easy - to - process format , with all log events going onto separate lines in the response body . Each log event itself is a JSON hash , with at least the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > tick < / em > : the log event tick value < li > < em > type < / em > : the log event type < / ul > Individual log events will also have additional attributes , depending on the event type . A few common attributes which are used for multiple events types are : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > cid < / em > : id of the collection the event was for < li > < em > tid < / em > : id of the transaction the event was contained in < li > < em > key < / em > : document key < li > < em > rev < / em > : document revision id < li > < em > data < / em > : the original document data < / ul > A more detailed description of the individual replication event types and their data structures can be found in the manual . < br > < br > The response will also contain the following HTTP headers : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > x - arango - replication - active < / em > : whether or not the logger is active . Clients can use this flag as an indication for their polling frequency . If the logger is not active and there are no more replication events available , it might be sensible for a client to abort , or to go to sleep for a long time and try again later to check whether the logger has been activated . < li > < em > x - arango - replication - lastincluded < / em > : the tick value of the last included value in the result . In incremental log fetching , this value can be used as the < em > from < / em > value for the following request . < em > * Note * < / em > that if the result is empty , the value will be < em > 0 < / em > . This value should not be used as < em > from < / em > value by clients in the next request ( otherwise the server would return the log events from the start of the log again ) . < li > < em > x - arango - replication - lasttick < / em > : the last tick value the logger server has logged ( not necessarily included in the result ) . By comparing the the last tick and last included tick values , clients have an approximate indication of how many events there are still left to fetch . < li > < em > x - arango - replication - checkmore < / em > : whether or not there already exists more log data which the client could fetch immediately . If there is more log data available , the client could call < em > logger - follow < / em > again with an adjusted < em > from < / em > value to fetch remaining log entries until there are no more . If there isn ' t any more log data to fetch , the client might decide to go to sleep for a while before calling the logger again . < / ul > < em > * Note * < / em > : this method is not supported on a coordinator in a cluster . < br > < br > " , <nl> " summary " : " Returns log entries " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > No log events available : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1312041047 \ n \ nHTTP / 1 . 1 204 No Content \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 0 \ nx - arango - replication - lasttick : 1312041047 \ n \ n < / code > < / pre > < br > < br > < br > A few log events : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1312237655 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 1313679447 \ nx - arango - replication - lasttick : 1313679447 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1312368727 \ \ \ " , \ \ \ " type \ \ \ " : 2000 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " collection \ \ \ " : { \ \ \ " version \ \ \ " : 5 , \ \ \ " type \ \ \ " : 2 , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " deleted \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " maximalSize \ \ \ " : 1048576 , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " waitForSync \ \ \ " : false } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1312696407 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312630871 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1312630871 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " flux compensator \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313024087 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312958551 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1312958551 \ \ \ " , \ \ \ " hp \ \ \ " : 5100 , \ \ \ " name \ \ \ " : \ \ \ " hybrid hovercraft \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313220695 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1313155159 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313417303 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1313351767 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1313351767 \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313482839 \ \ \ " , \ \ \ " type \ \ \ " : 2001 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1312303191 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313613911 \ \ \ " , \ \ \ " type \ \ \ " : 2200 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 1313548375 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313679447 \ \ \ " , \ \ \ " type \ \ \ " : 2201 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 1313548375 \ \ \ " } \ \ n \ " \ n < / code > < / pre > < br > < br > < br > More events than would fit into the response : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1313679447 & chunkSize = 400 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : true \ nx - arango - replication - lastincluded : 1314138199 \ nx - arango - replication - lasttick : 1315121239 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1313810519 \ \ \ " , \ \ \ " type \ \ \ " : 2000 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1313744983 \ \ \ " , \ \ \ " collection \ \ \ " : { \ \ \ " version \ \ \ " : 5 , \ \ \ " type \ \ \ " : 2 , \ \ \ " cid \ \ \ " : \ \ \ " 1313744983 \ \ \ " , \ \ \ " deleted \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " maximalSize \ \ \ " : 1048576 , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " waitForSync \ \ \ " : false } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1314138199 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 75863 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1313744983 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1314072663 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1314072663 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " flux compensator \ \ \ " } } \ \ n \ " \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > No log events available : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1311641734 \ n \ nHTTP / 1 . 1 204 No Content \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 0 \ nx - arango - replication - lasttick : 1311641734 \ n \ n < / code > < / pre > < br > < br > < br > A few log events : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1311641734 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 1313280134 \ nx - arango - replication - lasttick : 1313280134 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1311772806 \ \ \ " , \ \ \ " type \ \ \ " : 2000 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311707270 \ \ \ " , \ \ \ " collection \ \ \ " : { \ \ \ " version \ \ \ " : 5 , \ \ \ " type \ \ \ " : 2 , \ \ \ " cid \ \ \ " : \ \ \ " 1311707270 \ \ \ " , \ \ \ " deleted \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " maximalSize \ \ \ " : 1048576 , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " waitForSync \ \ \ " : false } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1311838342 \ \ \ " , \ \ \ " type \ \ \ " : 2001 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311707270 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1311969414 \ \ \ " , \ \ \ " type \ \ \ " : 2000 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " collection \ \ \ " : { \ \ \ " version \ \ \ " : 5 , \ \ \ " type \ \ \ " : 2 , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " deleted \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " maximalSize \ \ \ " : 1048576 , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " waitForSync \ \ \ " : false } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1312297094 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312231558 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1312231558 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " flux compensator \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1312624774 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312559238 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1312559238 \ \ \ " , \ \ \ " hp \ \ \ " : 5100 , \ \ \ " name \ \ \ " : \ \ \ " hybrid hovercraft \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1312821382 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312755846 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313017990 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1312952454 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p2 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1312952454 \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313083526 \ \ \ " , \ \ \ " type \ \ \ " : 2001 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1311903878 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313214598 \ \ \ " , \ \ \ " type \ \ \ " : 2200 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 1313149062 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313280134 \ \ \ " , \ \ \ " type \ \ \ " : 2201 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 1313149062 \ \ \ " } \ \ n \ " \ n < / code > < / pre > < br > < br > < br > More events than would fit into the response : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / logger - follow ? from = 1313280134 & chunkSize = 400 \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - active : true \ nx - arango - replication - checkmore : true \ nx - arango - replication - lastincluded : 1313738886 \ nx - arango - replication - lasttick : 1314721926 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1313411206 \ \ \ " , \ \ \ " type \ \ \ " : 2000 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1313345670 \ \ \ " , \ \ \ " collection \ \ \ " : { \ \ \ " version \ \ \ " : 5 , \ \ \ " type \ \ \ " : 2 , \ \ \ " cid \ \ \ " : \ \ \ " 1313345670 \ \ \ " , \ \ \ " deleted \ \ \ " : false , \ \ \ " doCompact \ \ \ " : true , \ \ \ " maximalSize \ \ \ " : 1048576 , \ \ \ " name \ \ \ " : \ \ \ " products \ \ \ " , \ \ \ " isVolatile \ \ \ " : false , \ \ \ " waitForSync \ \ \ " : false } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1313738886 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " database \ \ \ " : \ \ \ " 69766 \ \ \ " , \ \ \ " cid \ \ \ " : \ \ \ " 1313345670 \ \ \ " , \ \ \ " tid \ \ \ " : \ \ \ " 0 \ \ \ " , \ \ \ " key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1313673350 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " p1 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1313673350 \ \ \ " , \ \ \ " name \ \ \ " : \ \ \ " flux compensator \ \ \ " } } \ \ n \ " \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnsLogEntries " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns the list of collections and indexes available on the server . This list can be used by replication clients to initiate an initial sync with the server . < br > < br > The response will contain a JSON hash array with the < em > collection < / em > and < em > state < / em > and < em > tick < / em > attributes . < br > < br > < em > collections < / em > is a list of collections with the following sub - attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > parameters < / em > : the collection properties < li > < em > indexes < / em > : a list of the indexes of a the collection . Primary indexes and edges indexes are not included in this list . < / ul > The < em > state < / em > attribute contains the current state of the replication logger . It contains the following sub - attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > running < / em > : whether or not the replication logger is currently active < li > < em > lastLogTick < / em > : the value of the last tick the replication logger has written < li > < em > time < / em > : the current time on the server < / ul > Replication clients should note the < em > lastLogTick < / em > value returned . They can then fetch collections ' data using the dump method up to the value of lastLogTick , and query the continuous replication log for log events after this tick value . < br > < br > To create a full copy of the collections on the logger server , a replication client can execute these steps : < br > < br > < ul class = \ " swagger - list \ " > < li > call the < em > / inventory < / em > API method . This returns the < em > lastLogTick < / em > value and the list of collections and indexes from the logger server . < li > for each collection returned by < em > / inventory < / em > , create the collection locally and call < em > / dump < / em > to stream the collection data to the client , up to the value of < em > lastLogTick < / em > . After that , the client can create the indexes on the collections as they were reported by < em > / inventory < / em > . < / ul > If the clients wants to continuously stream replication log events from the logger server , the following additional steps need to be carried out : < br > < br > < ul class = \ " swagger - list \ " > < li > the client should call < em > / logger - follow < / em > initially to fetch the first batch of replication events that were logged after the client ' s call to < em > / inventory < / em > . The call to < em > / logger - follow < / em > should use a < em > from < / em > parameter with the value of the < em > lastLogTick < / em > as reported by < em > / inventory < / em > . The call to < em > / logger - follow < / em > will return the < em > x - arango - replication - lastincluded < / em > which will contain the last tick value included in the response . < li > the client can then continuously call < em > / logger - follow < / em > to incrementally fetch new replication events that occurred after the last transfer . Calls should use a < em > from < / em > parameter with the value of the < em > x - arango - replication - lastincluded < / em > header of the previous response . If there are no more replication events , the response will be empty and clients can go to sleep for a while and try again later . < / ul > < em > * Note * < / em > : on a coordinator , this request must have the URL parameter < em > DBserver < / em > which must be an ID of a DBserver . The very same request is forwarded synchronously to that DBserver . It is an error if this attribute is not bound in the coordinator case . < br > < br > " , <nl> " summary " : " Return inventory of collections and indexes " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / inventory \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 832710743 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " animals \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 646195287 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " better - example \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742533207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Company \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742271063 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Customer \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 831793239 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " demo \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742795351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1275013207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " female \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184901207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184639063 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742664279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1275144279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " male \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1210722391 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " otherVertices \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 26159191 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " sessions \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 986130519 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " vertices1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 987506775 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " edges2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185032279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742402135 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1184770135 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742926423 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185163351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1275275351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " relation \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } \ n ] , \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1315121239 \ " , \ n \ " totalEvents \ " : 8123 , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 38Z \ " \ n } , \ n \ " tick \ " : \ " 1315121239 \ " \ n } \ n < / code > < / pre > < br > < br > < br > With some additional indexes : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / inventory \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 832710743 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " animals \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 646195287 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " better - example \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742533207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Company \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742271063 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Customer \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 831793239 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " demo \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742795351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1275013207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " female \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184901207 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184639063 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742664279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1315186775 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " IndexedCollection1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ \ n { \ n \ " id \ " : \ " 1315448919 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " name \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1315776599 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : true , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1315973207 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 500 , \ n \ " byteSize \ " : 0 \ n } \ n ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1316169815 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " IndexedCollection2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ \ n { \ n \ " id \ " : \ " 1316431959 \ " , \ n \ " type \ " : \ " fulltext \ " , \ n \ " unique \ " : false , \ n \ " minLength \ " : 10 , \ n \ " fields \ " : [ \ n \ " text \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1316694103 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1316890711 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 0 , \ n \ " byteSize \ " : 1048576 \ n } \ n ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1275144279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " male \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1210722391 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " otherVertices \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 26159191 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " sessions \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 986130519 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " vertices1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 987506775 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " edges2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185032279 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742402135 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1184770135 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742926423 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185163351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1275275351 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " relation \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } \ n ] , \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1315121239 \ " , \ n \ " totalEvents \ " : 8136 , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 38Z \ " \ n } , \ n \ " tick \ " : \ " 1317021783 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / inventory \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 832508038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " animals \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 646451334 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " better - example \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742527110 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Company \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742264966 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Customer \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 831590534 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " demo \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742789254 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1274810502 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " female \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184895110 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184632966 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742658182 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1274941574 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " male \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1210519686 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " otherVertices \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 26153094 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " sessions \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 985927814 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " vertices1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 987304070 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " edges2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185026182 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742396038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1184764038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742920326 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185157254 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1275072646 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " relation \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } \ n ] , \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1314721926 \ " , \ n \ " totalEvents \ " : 8118 , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 47Z \ " \ n } , \ n \ " tick \ " : \ " 1314721926 \ " \ n } \ n < / code > < / pre > < br > < br > < br > With some additional indexes : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / inventory \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " collections \ " : [ \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 832508038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " animals \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 646451334 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " better - example \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742527110 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Company \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742264966 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Customer \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 831590534 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " demo \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742789254 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Electronics \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1274810502 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " female \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184895110 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1184632966 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanCity \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 742658182 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " Groceries \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1314787462 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " IndexedCollection1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ \ n { \ n \ " id \ " : \ " 1315049606 \ " , \ n \ " type \ " : \ " hash \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " name \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1315377286 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : true , \ n \ " fields \ " : [ \ n \ " a \ " , \ n \ " b \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1315573894 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 500 , \ n \ " byteSize \ " : 0 \ n } \ n ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1315770502 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " IndexedCollection2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ \ n { \ n \ " id \ " : \ " 1316032646 \ " , \ n \ " type \ " : \ " fulltext \ " , \ n \ " unique \ " : false , \ n \ " minLength \ " : 10 , \ n \ " fields \ " : [ \ n \ " text \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1316294790 \ " , \ n \ " type \ " : \ " skiplist \ " , \ n \ " unique \ " : false , \ n \ " fields \ " : [ \ n \ " a \ " \ n ] \ n } , \ n { \ n \ " id \ " : \ " 1316491398 \ " , \ n \ " type \ " : \ " cap \ " , \ n \ " unique \ " : false , \ n \ " size \ " : 0 , \ n \ " byteSize \ " : 1048576 \ n } \ n ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1274941574 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " male \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 1210519686 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " otherVertices \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 26153094 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " sessions \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 2 , \ n \ " cid \ " : \ " 985927814 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " vertices1 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 987304070 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " edges2 \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185026182 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " frenchHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742396038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " friend_of \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1184764038 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " germanHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 742920326 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " has_bought \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1185157254 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " internationalHighway \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } , \ n { \ n \ " parameters \ " : { \ n \ " version \ " : 5 , \ n \ " type \ " : 3 , \ n \ " cid \ " : \ " 1275072646 \ " , \ n \ " deleted \ " : false , \ n \ " doCompact \ " : true , \ n \ " maximalSize \ " : 1048576 , \ n \ " name \ " : \ " relation \ " , \ n \ " isVolatile \ " : false , \ n \ " waitForSync \ " : false \ n } , \ n \ " indexes \ " : [ ] \ n } \ n ] , \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastLogTick \ " : \ " 1314721926 \ " , \ n \ " totalEvents \ " : 8131 , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 47Z \ " \ n } , \ n \ " tick \ " : \ " 1316622470 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnInventoryOfCollectionsAndIndexes " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns the data from the collection for the requested range . < br > < br > When the < em > from < / em > URL parameter is not used , collection events are returned from the beginning . When the < em > from < / em > parameter is used , the result will only contain collection entries which have higher tick values than the specified < em > from < / em > value ( note : the log entry with a tick value equal to < em > from < / em > will be excluded ) . < br > < br > The < em > to < / em > URL parameter can be used to optionally restrict the upper bound of the result to a certain tick value . If used , the result will only contain collection entries with tick values up to ( including ) < em > to < / em > . < br > < br > The < em > chunkSize < / em > URL parameter can be used to control the size of the result . It must be specified in bytes . The < em > chunkSize < / em > value will only be honored approximately . Otherwise a too low < em > chunkSize < / em > value could cause the server to not be able to put just one entry into the result and return it . Therefore , the < em > chunkSize < / em > value will only be consulted after an entry has been written into the result . If the result size is then bigger than < em > chunkSize < / em > , the server will respond with as many entries as there are in the response already . If the result size is still smaller than < em > chunkSize < / em > , the server will try to return more data if there ' s more data left to return . < br > < br > If < em > chunkSize < / em > is not specified , some server - side default value will be used . < br > < br > The < em > Content - Type < / em > of the result is < em > application / x - arango - dump < / em > . This is an easy - to - process format , with all entries going onto separate lines in the response body . < br > < br > Each line itself is a JSON hash , with at least the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > tick < / em > : the operation ' s tick attribute < li > < em > key < / em > : the key of the document / edge or the key used in the deletion operation < li > < em > rev < / em > : the revision id of the document / edge or the deletion operation < li > < em > data < / em > : the actual document / edge data for types 2300 and 2301 . The full document / edge data will be returned even for updates . < li > < em > type < / em > : the type of entry . Possible values for < em > type < / em > are : - 2300 : document insertion / update - 2301 : edge insertion / update - 2302 : document / edge deletion < / ul > < em > * Note * < / em > : there will be no distinction between inserts and updates when calling this method . < br > < br > " , <nl> " summary " : " Return data of a collection " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Empty collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / dump ? collection = testCollection \ n \ nHTTP / 1 . 1 204 No Content \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 0 \ n \ n < / code > < / pre > < br > < br > < br > Non - empty collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / dump ? collection = testCollection \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 1320888407 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1320233047 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " key \ \ \ " : \ \ \ " 123456 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1320167511 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " 123456 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1320167511 \ \ \ " , \ \ \ " c \ \ \ " : false , \ \ \ " b \ \ \ " : 1 , \ \ \ " d \ \ \ " : \ \ \ " additional value \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1320691799 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " key \ \ \ " : \ \ \ " foobar \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1320626263 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1320888407 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " key \ \ \ " : \ \ \ " abcdef \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1320822871 \ \ \ " } \ \ n \ " \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Empty collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / dump ? collection = testCollection \ n \ nHTTP / 1 . 1 204 No Content \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 0 \ n \ n < / code > < / pre > < br > < br > < br > Non - empty collection : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / dump ? collection = testCollection \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / x - arango - dump ; charset = utf - 8 \ nx - arango - replication - checkmore : false \ nx - arango - replication - lastincluded : 1319440518 \ n \ n \ " { \ \ \ " tick \ \ \ " : \ \ \ " 1318785158 \ \ \ " , \ \ \ " type \ \ \ " : 2300 , \ \ \ " key \ \ \ " : \ \ \ " 123456 \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1318719622 \ \ \ " , \ \ \ " data \ \ \ " : { \ \ \ " _key \ \ \ " : \ \ \ " 123456 \ \ \ " , \ \ \ " _rev \ \ \ " : \ \ \ " 1318719622 \ \ \ " , \ \ \ " c \ \ \ " : false , \ \ \ " b \ \ \ " : 1 , \ \ \ " d \ \ \ " : \ \ \ " additional value \ \ \ " } } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1319243910 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " key \ \ \ " : \ \ \ " foobar \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1319178374 \ \ \ " } \ \ n { \ \ \ " tick \ \ \ " : \ \ \ " 1319440518 \ \ \ " , \ \ \ " type \ \ \ " : 2302 , \ \ \ " key \ \ \ " : \ \ \ " abcdef \ \ \ " , \ \ \ " rev \ \ \ " : \ \ \ " 1319374982 \ \ \ " } \ \ n \ " \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnDataOfACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns the servers id . The id is also returned by other replication API methods , and this method is an easy means of determining a server ' s id . < br > < br > The body of the response is a JSON hash with the attribute < em > serverId < / em > . The server id is returned as a string . < br > < br > " , <nl> " summary " : " Return server id " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / server - id \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / server - id \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnServerId " <nl> } <nl> ] , <nl> <nl> " notes " : " Starts the replication applier . This will return immediately if the replication applier is already running . < br > < br > If the replication applier is not already running , the applier configuration will be checked , and if it is complete , the applier will be started in a background thread . This means that even if the applier will encounter any errors while running , they will not be reported in the response to this method . < br > < br > To detect replication applier errors after the applier was started , use the < em > / _api / replication / applier - state < / em > API instead . < br > < br > " , <nl> " summary " : " Start replication applier " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / replication / applier - start \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 38 : 26Z \ " , \ n \ " message \ " : \ " applier created \ " , \ n \ " failedConnects \ " : 0 \ n } , \ n \ " totalRequests \ " : 0 , \ n \ " totalFailedConnects \ " : 0 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " errorNum \ " : 0 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / replication / applier - start \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 03 : 38Z \ " , \ n \ " message \ " : \ " applier created \ " , \ n \ " failedConnects \ " : 0 \ n } , \ n \ " totalRequests \ " : 0 , \ n \ " totalFailedConnects \ " : 0 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " errorNum \ " : 0 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " StartReplicationApplier " <nl> } <nl> ] , <nl> <nl> " notes " : " Stops the replication applier . This will return immediately if the replication applier is not running . < br > < br > " , <nl> " summary " : " Stop replication applier " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / replication / applier - stop \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : false , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " , \ n \ " message \ " : \ " applier shut down \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 2 , \ n \ " totalFailedConnects \ " : 2 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " , \ n \ " errorMessage \ " : \ " could not connect to master at tcp : / / 127 . 0 . 0 . 1 : 8529 : Could not connect \ " , \ n \ " errorNum \ " : 1412 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - dump - http : / / localhost : 8529 / _api / replication / applier - stop \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : false , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " , \ n \ " message \ " : \ " applier shut down \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 2 , \ n \ " totalFailedConnects \ " : 2 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " , \ n \ " errorMessage \ " : \ " could not connect to master at tcp : / / 127 . 0 . 0 . 1 : 8529 : Could not connect \ " , \ n \ " errorNum \ " : 1412 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " StopReplicationApplier " <nl> } <nl> ] , <nl> <nl> " notes " : " Returns the state of the replication applier , regardless of whether the applier is currently running or not . < br > < br > The response is a JSON hash with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > state < / em > : a JSON hash with the following sub - attributes : - < em > running < / em > : whether or not the applier is active and running - < em > lastAppliedContinuousTick < / em > : the last tick value from the continuous replication log the applier has applied . - < em > lastProcessedContinuousTick < / em > : the last tick value from the continuous replication log the applier has processed . Regularly , the last applied and last processed tick values should be identical . For transactional operations , the replication applier will first process incoming log events before applying them , so the processed tick value might be higher than the applied tick value . This will be the case until the applier encounters the < em > transaction commit < / em > log event for the transaction . - < em > lastAvailableContinuousTick < / em > : the last tick value the logger server can provide . - < em > time < / em > : the time on the applier server . - < em > totalRequests < / em > : the total number of requests the applier has made to the endpoint . - < em > totalFailedConnects < / em > : the total number of failed connection attempts the applier has made . - < em > totalEvents < / em > : the total number of log events the applier has processed . - < em > progress < / em > : a JSON hash with details about the replication applier progress . It contains the following sub - attributes if there is progress to report : - < em > message < / em > : a textual description of the progress - < em > time < / em > : the date and time the progress was logged - < em > failedConnects < / em > : the current number of failed connection attempts - < em > lastError < / em > : a JSON hash with details about the last error that happened on the applier . It contains the following sub - attributes if there was an error : - < em > errorNum < / em > : a numerical error code - < em > errorMessage < / em > : a textual error description - < em > time < / em > : the date and time the error occurred In case no error has occurred , < em > lastError < / em > will be empty . < li > < em > server < / em > : a JSON hash with the following sub - attributes : - < em > version < / em > : the applier server ' s version - < em > serverId < / em > : the applier server ' s id < li > < em > endpoint < / em > : the endpoint the applier is connected to ( if applier is active ) or will connect to ( if applier is currently inactive ) < li > < em > database < / em > : the name of the database the applier is connected to ( if applier is active ) or will connect to ( if applier is currently inactive ) " , <nl> " summary " : " State of the replication applier " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Fetching the state of an inactive applier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / applier - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : false , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " , \ n \ " message \ " : \ " applier shut down \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 2 , \ n \ " totalFailedConnects \ " : 2 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " , \ n \ " errorMessage \ " : \ " could not connect to master at tcp : / / 127 . 0 . 0 . 1 : 8529 : Could not connect \ " , \ n \ " errorNum \ " : 1412 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > < br > < br > Fetching the state of an active applier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / applier - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " , \ n \ " message \ " : \ " fetching master state information \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 3 , \ n \ " totalFailedConnects \ " : 3 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " errorNum \ " : 0 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 02T16 : 39 : 43Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " serverId \ " : \ " 160810523825680 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Fetching the state of an inactive applier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / applier - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : false , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " , \ n \ " message \ " : \ " applier shut down \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 2 , \ n \ " totalFailedConnects \ " : 2 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " , \ n \ " errorMessage \ " : \ " could not connect to master at tcp : / / 127 . 0 . 0 . 1 : 8529 : Could not connect \ " , \ n \ " errorNum \ " : 1412 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > < br > < br > Fetching the state of an active applier : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / replication / applier - state \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " state \ " : { \ n \ " running \ " : true , \ n \ " lastAppliedContinuousTick \ " : null , \ n \ " lastProcessedContinuousTick \ " : null , \ n \ " lastAvailableContinuousTick \ " : null , \ n \ " progress \ " : { \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " , \ n \ " message \ " : \ " applier shut down \ " , \ n \ " failedConnects \ " : 1 \ n } , \ n \ " totalRequests \ " : 2 , \ n \ " totalFailedConnects \ " : 2 , \ n \ " totalEvents \ " : 0 , \ n \ " lastError \ " : { \ n \ " errorNum \ " : 0 \ n } , \ n \ " time \ " : \ " 2014 - 10 - 03T08 : 04 : 52Z \ " \ n } , \ n \ " server \ " : { \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " serverId \ " : \ " 165579403628604 \ " \ n } , \ n \ " endpoint \ " : \ " tcp : / / 127 . 0 . 0 . 1 : 8529 \ " , \ n \ " database \ " : \ " _system \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " StateOfTheReplicationApplier " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / simple . json <nl> ppp b / js / apps / system / aardvark / api - docs / simple . json <nl> <nl> " notes " : " < br > < br > Returns all documents of a collections . The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > skip < / em > : The number of documents to skip in the query ( optional ) . < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . ( optional ) < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Return all " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Limit the amount of documents using < em > limit < / em > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all \ n { \ " collection \ " : \ " products \ " , \ " skip \ " : 2 , \ " limit \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1055402071 \ " , \ n \ " _key \ " : \ " 1055402071 \ " , \ n \ " _rev \ " : \ " 1055402071 \ " , \ n \ " Hello4 \ " : \ " World4 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1054222423 \ " , \ n \ " _key \ " : \ " 1054222423 \ " , \ n \ " _rev \ " : \ " 1054222423 \ " , \ n \ " Hello1 \ " : \ " World1 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Using a < em > batchSize < / em > value < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all \ n { \ " collection \ " : \ " products \ " , \ " batchSize \ " : 3 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1056385111 \ " , \ n \ " _key \ " : \ " 1056385111 \ " , \ n \ " _rev \ " : \ " 1056385111 \ " , \ n \ " Hello1 \ " : \ " World1 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1057040471 \ " , \ n \ " _key \ " : \ " 1057040471 \ " , \ n \ " _rev \ " : \ " 1057040471 \ " , \ n \ " Hello3 \ " : \ " World3 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1057368151 \ " , \ n \ " _key \ " : \ " 1057368151 \ " , \ n \ " _rev \ " : \ " 1057368151 \ " , \ n \ " Hello4 \ " : \ " World4 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 1057892439 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Limit the amount of documents using < em > limit < / em > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all \ n { \ " collection \ " : \ " products \ " , \ " skip \ " : 2 , \ " limit \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1054544006 \ " , \ n \ " _key \ " : \ " 1054544006 \ " , \ n \ " _rev \ " : \ " 1054544006 \ " , \ n \ " Hello2 \ " : \ " World2 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1055199366 \ " , \ n \ " _key \ " : \ " 1055199366 \ " , \ n \ " _rev \ " : \ " 1055199366 \ " , \ n \ " Hello4 \ " : \ " World4 \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Using a < em > batchSize < / em > value < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / all \ n { \ " collection \ " : \ " products \ " , \ " batchSize \ " : 3 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1057165446 \ " , \ n \ " _key \ " : \ " 1057165446 \ " , \ n \ " _rev \ " : \ " 1057165446 \ " , \ n \ " Hello4 \ " : \ " World4 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1056837766 \ " , \ n \ " _key \ " : \ " 1056837766 \ " , \ n \ " _rev \ " : \ " 1056837766 \ " , \ n \ " Hello3 \ " : \ " World3 \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1056510086 \ " , \ n \ " _key \ " : \ " 1056510086 \ " , \ n \ " _rev \ " : \ " 1056510086 \ " , \ n \ " Hello2 \ " : \ " World2 \ " \ n } \ n ] , \ n \ " hasMore \ " : true , \ n \ " id \ " : \ " 1057689734 \ " , \ n \ " count \ " : 5 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnAll " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > Returns a random document from a collection . The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The identifier or name of the collection to query . < / ul > Returns a JSON object with the document stored in the attribute < em > document < / em > if the collection contains at least one document . If the collection is empty , the < em > document < / em > attrbute contains null . < br > < br > " , <nl> " summary " : " Random document " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / any \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " document \ " : { \ n \ " _id \ " : \ " products / 1058678871 \ " , \ n \ " _key \ " : \ " 1058678871 \ " , \ n \ " _rev \ " : \ " 1058678871 \ " , \ n \ " Hello2 \ " : \ " World2 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / any \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " document \ " : { \ n \ " _id \ " : \ " products / 1059590278 \ " , \ n \ " _key \ " : \ " 1059590278 \ " , \ n \ " _rev \ " : \ " 1059590278 \ " , \ n \ " Hello5 \ " : \ " World5 \ " \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " RandomDocument " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > The default will find at most 100 documents near the given coordinate . The returned list is sorted according to the distance , with the nearest document being first in the list . If there are near documents of equal distance , documents are chosen randomly from this set until the limit is reached . < br > < br > In order to use the < em > near < / em > operator , a geo index must be defined for the collection . This index also defines which attribute holds the coordinates for the document . If you have more then one geo - spatial index , you can use the < em > geo < / em > field to select a particular index . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > latitude < / em > : The latitude of the coordinate . < li > < em > longitude < / em > : The longitude of the coordinate . < li > < em > distance < / em > : If given , the attribute key used to return the distance to the given coordinate . ( optional ) . If specified , distances are returned in meters . < li > < em > skip < / em > : The number of documents to skip in the query . ( optional ) < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . The default is 100 . ( optional ) < li > < em > geo < / em > : If given , the identifier of the geo - index to use . ( optional ) < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Near query " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Without distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1061759063 \ " , \ n \ " _key \ " : \ " 1061759063 \ " , \ n \ " _rev \ " : \ " 1061759063 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] \ n } , \ n { \ n \ " _id \ " : \ " products / 1061365847 \ " , \ n \ " _key \ " : \ " 1061365847 \ " , \ n \ " _rev \ " : \ " 1061365847 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > With distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 3 , \ " distance \ " : \ " distance \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1064380503 \ " , \ n \ " _key \ " : \ " 1064380503 \ " , \ n \ " _rev \ " : \ " 1064380503 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1064773719 \ " , \ n \ " _key \ " : \ " 1064773719 \ " , \ n \ " _rev \ " : \ " 1064773719 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1064183895 \ " , \ n \ " _key \ " : \ " 1064183895 \ " , \ n \ " _rev \ " : \ " 1064183895 \ " , \ n \ " name \ " : \ " Name / - 0 . 004 / \ " , \ n \ " loc \ " : [ \ n - 0 . 004 , \ n 0 \ n ] , \ n \ " distance \ " : 444 . 779706578235 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Without distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 2 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1061752966 \ " , \ n \ " _key \ " : \ " 1061752966 \ " , \ n \ " _rev \ " : \ " 1061752966 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] \ n } , \ n { \ n \ " _id \ " : \ " products / 1061359750 \ " , \ n \ " _key \ " : \ " 1061359750 \ " , \ n \ " _rev \ " : \ " 1061359750 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > With distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 3 , \ " distance \ " : \ " distance \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1064374406 \ " , \ n \ " _key \ " : \ " 1064374406 \ " , \ n \ " _rev \ " : \ " 1064374406 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1064767622 \ " , \ n \ " _key \ " : \ " 1064767622 \ " , \ n \ " _rev \ " : \ " 1064767622 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1064177798 \ " , \ n \ " _key \ " : \ " 1064177798 \ " , \ n \ " _rev \ " : \ " 1064177798 \ " , \ n \ " name \ " : \ " Name / - 0 . 004 / \ " , \ n \ " loc \ " : [ \ n - 0 . 004 , \ n 0 \ n ] , \ n \ " distance \ " : 444 . 779706578235 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " NearQuery " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will find all documents within a given radius around the coordinate ( < em > latitude < / em > , < em > longitude < / em > ) . The returned list is sorted by distance . < br > < br > In order to use the < em > within < / em > operator , a geo index must be defined for the collection . This index also defines which attribute holds the coordinates for the document . If you have more then one geo - spatial index , you can use the < em > geo < / em > field to select a particular index . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > latitude < / em > : The latitude of the coordinate . < li > < em > longitude < / em > : The longitude of the coordinate . < li > < em > radius < / em > : The maximal radius ( in meters ) . < li > < em > distance < / em > : If given , the attribute key used to return the distance to the given coordinate . ( optional ) . If specified , distances are returned in meters . < li > < em > skip < / em > : The number of documents to skip in the query . ( optional ) < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . The default is 100 . ( optional ) < li > < em > geo < / em > : If given , the identifier of the geo - index to use . ( optional ) < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Within query " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Without distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 2 , \ " radius \ " : 500 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1067984983 \ " , \ n \ " _key \ " : \ " 1067984983 \ " , \ n \ " _rev \ " : \ " 1067984983 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] \ n } , \ n { \ n \ " _id \ " : \ " products / 1067591767 \ " , \ n \ " _key \ " : \ " 1067591767 \ " , \ n \ " _rev \ " : \ " 1067591767 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > With distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 3 , \ " distance \ " : \ " distance \ " , \ " radius \ " : 300 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1070606423 \ " , \ n \ " _key \ " : \ " 1070606423 \ " , \ n \ " _rev \ " : \ " 1070606423 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1070999639 \ " , \ n \ " _key \ " : \ " 1070999639 \ " , \ n \ " _rev \ " : \ " 1070999639 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1070409815 \ " , \ n \ " _key \ " : \ " 1070409815 \ " , \ n \ " _rev \ " : \ " 1070409815 \ " , \ n \ " name \ " : \ " Name / - 0 . 004 / \ " , \ n \ " loc \ " : [ \ n - 0 . 004 , \ n 0 \ n ] , \ n \ " distance \ " : 444 . 779706578235 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Without distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 2 , \ " radius \ " : 500 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1067782278 \ " , \ n \ " _key \ " : \ " 1067782278 \ " , \ n \ " _rev \ " : \ " 1067782278 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] \ n } , \ n { \ n \ " _id \ " : \ " products / 1067389062 \ " , \ n \ " _key \ " : \ " 1067389062 \ " , \ n \ " _rev \ " : \ " 1067389062 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > With distance : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / near \ n { \ " collection \ " : \ " products \ " , \ " latitude \ " : 0 , \ " longitude \ " : 0 , \ " skip \ " : 1 , \ " limit \ " : 3 , \ " distance \ " : \ " distance \ " , \ " radius \ " : 300 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1070403718 \ " , \ n \ " _key \ " : \ " 1070403718 \ " , \ n \ " _rev \ " : \ " 1070403718 \ " , \ n \ " name \ " : \ " Name / - 0 . 002 / \ " , \ n \ " loc \ " : [ \ n - 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1070993542 \ " , \ n \ " _key \ " : \ " 1070993542 \ " , \ n \ " _rev \ " : \ " 1070993542 \ " , \ n \ " name \ " : \ " Name / 0 . 002 / \ " , \ n \ " loc \ " : [ \ n 0 . 002 , \ n 0 \ n ] , \ n \ " distance \ " : 222 . 38985328911744 \ n } , \ n { \ n \ " _id \ " : \ " products / 1070207110 \ " , \ n \ " _key \ " : \ " 1070207110 \ " , \ n \ " _rev \ " : \ " 1070207110 \ " , \ n \ " name \ " : \ " Name / - 0 . 004 / \ " , \ n \ " loc \ " : [ \ n - 0 . 004 , \ n 0 \ n ] , \ n \ " distance \ " : 444 . 779706578235 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 3 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " WithinQuery " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will find all documents from the collection that match the fulltext query specified in < em > query < / em > . < br > < br > In order to use the < em > fulltext < / em > operator , a fulltext index must be defined for the collection and the specified attribute . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > attribute < / em > : The attribute that contains the texts . < li > < em > query < / em > : The fulltext query . < li > < em > skip < / em > : The number of documents to skip in the query ( optional ) . < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . ( optional ) < li > < em > index < / em > : The identifier of the fulltext - index to use . < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Fulltext index query " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / fulltext \ n { \ " collection \ " : \ " products \ " , \ " attribute \ " : \ " text \ " , \ " query \ " : \ " word \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1072506967 \ " , \ n \ " _key \ " : \ " 1072506967 \ " , \ n \ " _rev \ " : \ " 1072506967 \ " , \ n \ " text \ " : \ " this text contains word \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1072703575 \ " , \ n \ " _key \ " : \ " 1072703575 \ " , \ n \ " _rev \ " : \ " 1072703575 \ " , \ n \ " text \ " : \ " this text also has a word \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / fulltext \ n { \ " collection \ " : \ " products \ " , \ " attribute \ " : \ " text \ " , \ " query \ " : \ " word \ " } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1072697478 \ " , \ n \ " _key \ " : \ " 1072697478 \ " , \ n \ " _rev \ " : \ " 1072697478 \ " , \ n \ " text \ " : \ " this text also has a word \ " \ n } , \ n { \ n \ " _id \ " : \ " products / 1072500870 \ " , \ n \ " _key \ " : \ " 1072500870 \ " , \ n \ " _rev \ " : \ " 1072500870 \ " , \ n \ " text \ " : \ " this text contains word \ " \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " FulltextIndexQuery " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will find all documents matching a given example . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > example < / em > : The example document . < li > < em > skip < / em > : The number of documents to skip in the query ( optional ) . < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . ( optional ) < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Simple query by - example " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Matching an attribute : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " i \ " : 1 } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1074866263 \ " , \ n \ " _key \ " : \ " 1074866263 \ " , \ n \ " _rev \ " : \ " 1074866263 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1074407511 \ " , \ n \ " _key \ " : \ " 1074407511 \ " , \ n \ " _rev \ " : \ " 1074407511 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1074669655 \ " , \ n \ " _key \ " : \ " 1074669655 \ " , \ n \ " _rev \ " : \ " 1074669655 \ " , \ n \ " i \ " : 1 \ n } , \ n { \ n \ " _id \ " : \ " products / 1074079831 \ " , \ n \ " _key \ " : \ " 1074079831 \ " , \ n \ " _rev \ " : \ " 1074079831 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 4 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Matching an attribute which is a sub - document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " a . j \ " : 1 } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1076373591 \ " , \ n \ " _key \ " : \ " 1076373591 \ " , \ n \ " _rev \ " : \ " 1076373591 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1076045911 \ " , \ n \ " _key \ " : \ " 1076045911 \ " , \ n \ " _rev \ " : \ " 1076045911 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Matching an attribute within a sub - document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " a \ " : { \ " j \ " : 1 } } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1078339671 \ " , \ n \ " _key \ " : \ " 1078339671 \ " , \ n \ " _rev \ " : \ " 1078339671 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 1 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Matching an attribute : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " i \ " : 1 } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1074860166 \ " , \ n \ " _key \ " : \ " 1074860166 \ " , \ n \ " _rev \ " : \ " 1074860166 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1074401414 \ " , \ n \ " _key \ " : \ " 1074401414 \ " , \ n \ " _rev \ " : \ " 1074401414 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1074073734 \ " , \ n \ " _key \ " : \ " 1074073734 \ " , \ n \ " _rev \ " : \ " 1074073734 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1074663558 \ " , \ n \ " _key \ " : \ " 1074663558 \ " , \ n \ " _rev \ " : \ " 1074663558 \ " , \ n \ " i \ " : 1 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 4 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Matching an attribute which is a sub - document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " a . j \ " : 1 } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1076367494 \ " , \ n \ " _key \ " : \ " 1076367494 \ " , \ n \ " _rev \ " : \ " 1076367494 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1076039814 \ " , \ n \ " _key \ " : \ " 1076039814 \ " , \ n \ " _rev \ " : \ " 1076039814 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > < br > < br > Matching an attribute within a sub - document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / by - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " a \ " : { \ " j \ " : 1 } } } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1078333574 \ " , \ n \ " _key \ " : \ " 1078333574 \ " , \ n \ " _rev \ " : \ " 1078333574 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 1 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " SimpleQueryBy - example " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will return the first document matching a given example . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > example < / em > : The example document . < / ul > Returns a result containing the document or < em > HTTP 404 < / em > if no document matched the example . < br > < br > If more than one document in the collection matches the specified example , only one of these documents will be returned , and it is undefined which of the matching documents is returned . < br > < br > " , <nl> " summary " : " Document matching an example " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > If a matching document was found : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " i \ " : 1 } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " document \ " : { \ n \ " _id \ " : \ " products / 1080764503 \ " , \ n \ " _key \ " : \ " 1080764503 \ " , \ n \ " _rev \ " : \ " 1080764503 \ " , \ n \ " i \ " : 1 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > If no document was found : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " l \ " : 1 } } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 404 , \ n \ " errorMessage \ " : \ " no match \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > If a matching document was found : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " i \ " : 1 } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " document \ " : { \ n \ " _id \ " : \ " products / 1080561798 \ " , \ n \ " _key \ " : \ " 1080561798 \ " , \ n \ " _rev \ " : \ " 1080561798 \ " , \ n \ " i \ " : 1 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > If no document was found : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first - example \ n { \ " collection \ " : \ " products \ " , \ " example \ " : { \ " l \ " : 1 } } \ n \ nHTTP / 1 . 1 404 Not Found \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 404 , \ n \ " errorNum \ " : 404 , \ n \ " errorMessage \ " : \ " no match \ " \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " DocumentMatchingAnExample " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will return the first document ( s ) from the collection , in the order of insertion / update time . When the < em > count < / em > argument is supplied , the result will be a list of documents , with the \ " oldest \ " document being first in the result list . If the < em > count < / em > argument is not supplied , the result is the \ " oldest \ " document of the collection , or < em > null < / em > if the collection is empty . < br > < br > The request body must be a JSON object with the following attributes : < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : the name of the collection < li > < em > count < / em > : the number of documents to return at most . Specifiying count is optional . If it is not specified , it defaults to 1 . < / ul > Note : this method is not supported for sharded collections with more than one shard . < br > < br > " , <nl> " summary " : " First document of a collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Retrieving the first n documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first \ n { \ " collection \ " : \ " products \ " , \ " count \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1083975767 \ " , \ n \ " _key \ " : \ " 1083975767 \ " , \ n \ " _rev \ " : \ " 1083975767 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1084303447 \ " , \ n \ " _key \ " : \ " 1084303447 \ " , \ n \ " _rev \ " : \ " 1084303447 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the first document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " _id \ " : \ " products / 1085614167 \ " , \ n \ " _key \ " : \ " 1085614167 \ " , \ n \ " _rev \ " : \ " 1085614167 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Retrieving the first n documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first \ n { \ " collection \ " : \ " products \ " , \ " count \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1083773062 \ " , \ n \ " _key \ " : \ " 1083773062 \ " , \ n \ " _rev \ " : \ " 1083773062 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1084100742 \ " , \ n \ " _key \ " : \ " 1084100742 \ " , \ n \ " _rev \ " : \ " 1084100742 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " j \ " : 1 \ n } \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the first document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / first \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " _id \ " : \ " products / 1085608070 \ " , \ n \ " _key \ " : \ " 1085608070 \ " , \ n \ " _rev \ " : \ " 1085608070 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 1 , \ n \ " j \ " : 1 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " FirstDocumentOfACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will return the last documents from the collection , in the order of insertion / update time . When the < em > count < / em > argument is supplied , the result will be a list of documents , with the \ " latest \ " document being first in the result list . < br > < br > The request body must be a JSON object with the following attributes : < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : the name of the collection < li > < em > count < / em > : the number of documents to return at most . Specifiying count is optional . If it is not specified , it defaults to 1 . < / ul > If the < em > count < / em > argument is not supplied , the result is the \ " latest \ " document of the collection , or < em > null < / em > if the collection is empty . < br > < br > Note : this method is not supported for sharded collections with more than one shard . < br > < br > " , <nl> " summary " : " Last document of a collection " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > Retrieving the last n documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / last \ n { \ " collection \ " : \ " products \ " , \ " count \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1088038999 \ " , \ n \ " _key \ " : \ " 1088038999 \ " , \ n \ " _rev \ " : \ " 1088038999 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1087842391 \ " , \ n \ " _key \ " : \ " 1087842391 \ " , \ n \ " _rev \ " : \ " 1087842391 \ " , \ n \ " i \ " : 1 \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the first document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / last \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " _id \ " : \ " products / 1089677399 \ " , \ n \ " _key \ " : \ " 1089677399 \ " , \ n \ " _rev \ " : \ " 1089677399 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Retrieving the last n documents : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / last \ n { \ " collection \ " : \ " products \ " , \ " count \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1088032902 \ " , \ n \ " _key \ " : \ " 1088032902 \ " , \ n \ " _rev \ " : \ " 1088032902 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n { \ n \ " _id \ " : \ " products / 1087836294 \ " , \ n \ " _key \ " : \ " 1087836294 \ " , \ n \ " _rev \ " : \ " 1087836294 \ " , \ n \ " i \ " : 1 \ n } \ n ] , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Retrieving the first document : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / last \ n { \ " collection \ " : \ " products \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " _id \ " : \ " products / 1089671302 \ " , \ n \ " _key \ " : \ " 1089671302 \ " , \ n \ " _rev \ " : \ " 1089671302 \ " , \ n \ " i \ " : 1 , \ n \ " a \ " : { \ n \ " k \ " : 2 , \ n \ " j \ " : 2 \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " LastDocumentOfACollection " <nl> } <nl> ] , <nl> <nl> " notes " : " < br > < br > This will find all documents within a given range . In order to execute a range query , a skip - list index on the queried attribute must be present . < br > < br > The call expects a JSON object as body with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > collection < / em > : The name of the collection to query . < li > < em > attribute < / em > : The attribute path to check . < li > < em > left < / em > : The lower bound . < li > < em > right < / em > : The upper bound . < li > < em > closed < / em > : If < em > true < / em > , use interval including < em > left < / em > and < em > right < / em > , otherwise exclude < em > right < / em > , but include < em > left < / em > . < li > < em > skip < / em > : The number of documents to skip in the query ( optional ) . < li > < em > limit < / em > : The maximal amount of documents to return . The < em > skip < / em > is applied before the < em > limit < / em > restriction . ( optional ) < / ul > Returns a cursor containing the result , see [ Http Cursor ] ( . . / HttpAqlQueryCursor / README . md ) for details . < br > < br > " , <nl> " summary " : " Simple range query " , <nl> " httpMethod " : " PUT " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / range \ n { \ " collection \ " : \ " products \ " , \ " attribute \ " : \ " i \ " , \ " left \ " : 2 , \ " right \ " : 4 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1090660439 \ " , \ n \ " _key \ " : \ " 1090660439 \ " , \ n \ " _rev \ " : \ " 1090660439 \ " , \ n \ " i \ " : 2 \ n } , \ n { \ n \ " _id \ " : \ " products / 1090857047 \ " , \ n \ " _key \ " : \ " 1090857047 \ " , \ n \ " _rev \ " : \ " 1090857047 \ " , \ n \ " i \ " : 3 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X PUT - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / simple / range \ n { \ " collection \ " : \ " products \ " , \ " attribute \ " : \ " i \ " , \ " left \ " : 2 , \ " right \ " : 4 } \ n \ nHTTP / 1 . 1 201 Created \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : [ \ n { \ n \ " _id \ " : \ " products / 1090654342 \ " , \ n \ " _key \ " : \ " 1090654342 \ " , \ n \ " _rev \ " : \ " 1090654342 \ " , \ n \ " i \ " : 2 \ n } , \ n { \ n \ " _id \ " : \ " products / 1090850950 \ " , \ n \ " _key \ " : \ " 1090850950 \ " , \ n \ " _rev \ " : \ " 1090850950 \ " , \ n \ " i \ " : 3 \ n } \ n ] , \ n \ " hasMore \ " : false , \ n \ " count \ " : 2 , \ n \ " error \ " : false , \ n \ " code \ " : 201 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " SimpleRangeQuery " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / system . json <nl> ppp b / js / apps / system / aardvark / api - docs / system . json <nl> <nl> " notes " : " < br > < br > Returns the statistics information . The returned object contains the statistics figures grouped together according to the description returned by < em > _admin / statistics - description < / em > . For instance , to access a figure < em > userTime < / em > from the group < em > system < / em > , you first select the sub - object describing the group stored in < em > system < / em > and in that sub - object the value for < em > userTime < / em > is stored in the attribute of the same name . < br > < br > In case of a distribution , the returned object contains the total count in < em > count < / em > and the distribution list in < em > counts < / em > . The sum ( or total ) of the individual values is returned in < em > sum < / em > . < br > < br > " , <nl> " summary " : " Read the statistics " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _admin / statistics \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " time \ " : 1412267966 . 886969 , \ n \ " system \ " : { \ n \ " minorPageFaults \ " : 180828 , \ n \ " majorPageFaults \ " : 2197 , \ n \ " userTime \ " : 13 . 425886 , \ n \ " systemTime \ " : 5 . 1362 , \ n \ " numberOfThreads \ " : 19 , \ n \ " residentSize \ " : 269860864 , \ n \ " residentSizePercent \ " : 0 . 06283187866210938 , \ n \ " virtualSize \ " : 5288067072 \ n } , \ n \ " client \ " : { \ n \ " httpConnections \ " : 1 , \ n \ " connectionTime \ " : { \ n \ " sum \ " : 0 . 0004680156707763672 , \ n \ " count \ " : 1 , \ n \ " counts \ " : [ \ n 1 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " totalTime \ " : { \ n \ " sum \ " : 48 . 10051465034485 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8095 , \ n 565 , \ n 63 , \ n 44 , \ n 8 , \ n 5 , \ n 4 \ n ] \ n } , \ n \ " requestTime \ " : { \ n \ " sum \ " : 46 . 95299553871155 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8110 , \ n 551 , \ n 62 , \ n 44 , \ n 8 , \ n 5 , \ n 4 \ n ] \ n } , \ n \ " queueTime \ " : { \ n \ " sum \ " : 0 . 1864941120147705 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8784 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " ioTime \ " : { \ n \ " sum \ " : 0 . 9610249996185303 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8784 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " bytesSent \ " : { \ n \ " sum \ " : 3262584 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 2667 , \ n 5978 , \ n 9 , \ n 8 , \ n 122 , \ n 0 \ n ] \ n } , \ n \ " bytesReceived \ " : { \ n \ " sum \ " : 2079017 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 6564 , \ n 2220 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } \ n } , \ n \ " http \ " : { \ n \ " requestsTotal \ " : 8784 , \ n \ " requestsAsync \ " : 0 , \ n \ " requestsGet \ " : 3650 , \ n \ " requestsHead \ " : 295 , \ n \ " requestsPost \ " : 3165 , \ n \ " requestsPut \ " : 685 , \ n \ " requestsPatch \ " : 31 , \ n \ " requestsDelete \ " : 958 , \ n \ " requestsOptions \ " : 0 , \ n \ " requestsOther \ " : 0 \ n } , \ n \ " server \ " : { \ n \ " uptime \ " : 60 . 220277070999146 , \ n \ " physicalMemory \ " : 4294967296 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _admin / statistics \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " time \ " : 1412323477 . 41357 , \ n \ " system \ " : { \ n \ " minorPageFaults \ " : 191415 , \ n \ " majorPageFaults \ " : 2162 , \ n \ " userTime \ " : 11 . 70447 , \ n \ " systemTime \ " : 4 . 978205 , \ n \ " numberOfThreads \ " : 23 , \ n \ " residentSize \ " : 288788480 , \ n \ " residentSizePercent \ " : 0 . 06723880767822266 , \ n \ " virtualSize \ " : 5336215552 \ n } , \ n \ " client \ " : { \ n \ " httpConnections \ " : 1 , \ n \ " connectionTime \ " : { \ n \ " sum \ " : 0 . 0004870891571044922 , \ n \ " count \ " : 1 , \ n \ " counts \ " : [ \ n 1 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " totalTime \ " : { \ n \ " sum \ " : 46 . 77754878997803 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8147 , \ n 516 , \ n 54 , \ n 50 , \ n 9 , \ n 4 , \ n 4 \ n ] \ n } , \ n \ " requestTime \ " : { \ n \ " sum \ " : 45 . 6920006275177 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8165 , \ n 498 , \ n 54 , \ n 50 , \ n 9 , \ n 4 , \ n 4 \ n ] \ n } , \ n \ " queueTime \ " : { \ n \ " sum \ " : 0 . 1780388355255127 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8784 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " ioTime \ " : { \ n \ " sum \ " : 0 . 9075093269348145 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 8784 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } , \ n \ " bytesSent \ " : { \ n \ " sum \ " : 3262597 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 2667 , \ n 5978 , \ n 9 , \ n 8 , \ n 122 , \ n 0 \ n ] \ n } , \ n \ " bytesReceived \ " : { \ n \ " sum \ " : 2079017 , \ n \ " count \ " : 8784 , \ n \ " counts \ " : [ \ n 6564 , \ n 2220 , \ n 0 , \ n 0 , \ n 0 , \ n 0 \ n ] \ n } \ n } , \ n \ " http \ " : { \ n \ " requestsTotal \ " : 8784 , \ n \ " requestsAsync \ " : 0 , \ n \ " requestsGet \ " : 3650 , \ n \ " requestsHead \ " : 295 , \ n \ " requestsPost \ " : 3165 , \ n \ " requestsPut \ " : 685 , \ n \ " requestsPatch \ " : 31 , \ n \ " requestsDelete \ " : 958 , \ n \ " requestsOptions \ " : 0 , \ n \ " requestsOther \ " : 0 \ n } , \ n \ " server \ " : { \ n \ " uptime \ " : 58 . 685256004333496 , \ n \ " physicalMemory \ " : 4294967296 \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReadTheStatistics " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / traversal . json <nl> ppp b / js / apps / system / aardvark / api - docs / traversal . json <nl> <nl> " notes " : " Starts a traversal starting from a given vertex and following . edges contained in a given edgeCollection . The request must contain the following attributes . < br > < br > < ul class = \ " swagger - list \ " > < li > < em > startVertex < / em > : id of the startVertex , e . g . < em > \ " users / foo \ " < / em > . < li > < em > edgeCollection < / em > : < em > * Deprecated * < / em > name of the collection that contains the edges . < li > < em > graphName < / em > : name of the graph that contains the edges . < li > < em > filter < / em > ( optional , default is to include all nodes ) : body ( JavaScript code ) of custom filter function function signature : ( config , vertex , path ) - > mixed can return four different string values : - < em > \ " exclude \ " < / em > - > this vertex will not be visited . - < em > \ " prune \ " < / em > - > the edges of this vertex will not be followed . - < em > \ " \ " < / em > or < em > undefined < / em > - > visit the vertex and follow it ' s edges . - < em > Array < / em > - > containing any combination of the above . If there is at least one < em > \ " exclude \ " < / em > or < em > \ " prune \ " < / em > respectivly is contained , it ' s effect will occur . < li > < em > minDepth < / em > ( optional , ANDed with any existing filters ) : visits only nodes in at least the given depth < li > < em > maxDepth < / em > ( optional , ANDed with any existing filters ) : visits only nodes in at most the given depth < li > < em > visitor < / em > ( optional ) : body ( JavaScript ) code of custom visitor function function signature : ( config , result , vertex , path ) - > void visitor function can do anything , but its return value is ignored . To populate a result , use the < em > result < / em > variable by reference < li > < em > direction < / em > ( optional ) : direction for traversal - < em > if set < / em > , must be either < em > \ " outbound \ " < / em > , < em > \ " inbound \ " < / em > , or < em > \ " any \ " < / em > - < em > if not set < / em > , the < em > expander < / em > attribute must be specified < li > < em > init < / em > ( optional ) : body ( JavaScript ) code of custom result initialisation function function signature : ( config , result ) - > void initialise any values in result with what is required < li > < em > expander < / em > ( optional ) : body ( JavaScript ) code of custom expander function < em > must < / em > be set if < em > direction < / em > attribute is < em > * not * < / em > set function signature : ( config , vertex , path ) - > array expander must return an array of the connections for < em > vertex < / em > each connection is an object with the attributes < em > edge < / em > and < em > vertex < / em > < li > < em > sort < / em > ( optional ) : body ( JavaScript ) code of a custom comparison function for the edges . The signature of this function is ( l , r ) - > integer ( where l and r are edges ) and must return - 1 if l is smaller than , + 1 if l is greater than , and 0 if l and r are equal . The reason for this is the following : The order of edges returned for a certain vertex is undefined . This is because there is no natural order of edges for a vertex with multiple connected edges . To explicitly define the order in which edges on the vertex are followed , you can specify an edge comparator function with this attribute . Note that the value here has to be a string to conform to the JSON standard , which in turn is parsed as function body on the server side . Furthermore note that this attribute is only used for the standard expanders . If you use your custom expander you have to do the sorting yourself within the expander code . < li > < em > strategy < / em > ( optional ) : traversal strategy can be < em > \ " depthfirst \ " < / em > or < em > \ " breadthfirst \ " < / em > < li > < em > order < / em > ( optional ) : traversal order can be < em > \ " preorder \ " < / em > or < em > \ " postorder \ " < / em > < li > < em > itemOrder < / em > ( optional ) : item iteration order can be < em > \ " forward \ " < / em > or < em > \ " backward \ " < / em > < li > < em > uniqueness < / em > ( optional ) : specifies uniqueness for vertices and edges visited if set , must be an object like this : < em > \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " | \ " global \ " | path \ " , \ " edges \ " : \ " none \ " | \ " global \ " | \ " path \ " } < / em > < li > < em > maxIterations < / em > ( optional ) : Maximum number of iterations in each traversal . This number can be set to prevent endless loops in traversal of cyclic graphs . When a traversal performs as many iterations as the < em > maxIterations < / em > value , the traversal will abort with an error . If < em > maxIterations < / em > is not set , a server - defined value may be used . < / ul > If the Traversal is successfully executed < em > HTTP 200 < / em > will be returned . Additionally the < em > result < / em > object will be returned by the traversal . < br > < br > For successful traversals , the returned JSON object has the following properties : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > error < / em > : boolean flag to indicate if an error occurred ( < em > false < / em > in this case ) < li > < em > code < / em > : the HTTP status code < li > < em > result < / em > : the return value of the traversal < / ul > If the traversal specification is either missing or malformed , the server will respond with < em > HTTP 400 < / em > . < br > < br > The body of the response will then contain a JSON object with additional error details . The object has the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > error < / em > : boolean flag to indicate that an error occurred ( < em > true < / em > in this case ) < li > < em > code < / em > : the HTTP status code < li > < em > errorNum < / em > : the server error number < li > < em > errorMessage < / em > : a descriptive error message " , <nl> " summary " : " executes a traversal " , <nl> " httpMethod " : " POST " , <nl> - " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1110124631 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110321239 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1110583383 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1110779991 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1110124631 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111238743 \ " , \ n \ " _key \ " : \ " 1111238743 \ " , \ n \ " _rev \ " : \ " 1111238743 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1110124631 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110321239 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111238743 \ " , \ n \ " _key \ " : \ " 1111238743 \ " , \ n \ " _rev \ " : \ " 1111238743 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1111435351 \ " , \ n \ " _key \ " : \ " 1111435351 \ " , \ n \ " _rev \ " : \ " 1111435351 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1110124631 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110321239 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1110583383 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111238743 \ " , \ n \ " _key \ " : \ " 1111238743 \ " , \ n \ " _rev \ " : \ " 1111238743 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1111631959 \ " , \ n \ " _key \ " : \ " 1111631959 \ " , \ n \ " _rev \ " : \ " 1111631959 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1110124631 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110321239 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1110779991 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow only inbound edges : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " inbound \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114581079 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1115433047 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114581079 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1116285015 \ " , \ n \ " _key \ " : \ " 1116285015 \ " , \ n \ " _rev \ " : \ " 1116285015 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114581079 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1115433047 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow any direction of edges : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " global \ " } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118971991 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1119234135 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1119430743 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120479319 \ " , \ n \ " _key \ " : \ " 1120479319 \ " , \ n \ " _rev \ " : \ " 1120479319 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120479319 \ " , \ n \ " _key \ " : \ " 1120479319 \ " , \ n \ " _rev \ " : \ " 1120479319 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120675927 \ " , \ n \ " _key \ " : \ " 1120675927 \ " , \ n \ " _rev \ " : \ " 1120675927 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118971991 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120479319 \ " , \ n \ " _key \ " : \ " 1120479319 \ " , \ n \ " _rev \ " : \ " 1120479319 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120675927 \ " , \ n \ " _key \ " : \ " 1120675927 \ " , \ n \ " _rev \ " : \ " 1120675927 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1119889495 \ " , \ n \ " _key \ " : \ " 1119889495 \ " , \ n \ " _rev \ " : \ " 1119889495 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118971991 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120479319 \ " , \ n \ " _key \ " : \ " 1120479319 \ " , \ n \ " _rev \ " : \ " 1120479319 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120675927 \ " , \ n \ " _key \ " : \ " 1120675927 \ " , \ n \ " _rev \ " : \ " 1120675927 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120086103 \ " , \ n \ " _key \ " : \ " 1120086103 \ " , \ n \ " _rev \ " : \ " 1120086103 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118971991 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1119234135 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120479319 \ " , \ n \ " _key \ " : \ " 1120479319 \ " , \ n \ " _rev \ " : \ " 1120479319 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120675927 \ " , \ n \ " _key \ " : \ " 1120675927 \ " , \ n \ " _rev \ " : \ " 1120675927 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120282711 \ " , \ n \ " _key \ " : \ " 1120282711 \ " , \ n \ " _rev \ " : \ " 1120282711 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118775383 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119627351 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118971991 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1119430743 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Excluding < em > Charlie < / em > and < em > Bob < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " filter \ " : \ " if ( vertex . name = = = \ \ \ " Bob \ \ \ " | | vertex . name = = = \ \ \ " Charlie \ \ \ " ) { return \ \ \ " exclude \ \ \ " ; } return ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123952727 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1124608087 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123952727 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1125066839 \ " , \ n \ " _key \ " : \ " 1125066839 \ " , \ n \ " _rev \ " : \ " 1125066839 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1125460055 \ " , \ n \ " _key \ " : \ " 1125460055 \ " , \ n \ " _rev \ " : \ " 1125460055 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123952727 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1124149335 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1124608087 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Do not follow edges from < em > Bob < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " filter \ " : \ " if ( vertex . name = = = \ \ \ " Bob \ \ \ " ) { return \ \ \ " prune \ \ \ " ; } return ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128605783 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1128802391 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128605783 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1129719895 \ " , \ n \ " _key \ " : \ " 1129719895 \ " , \ n \ " _rev \ " : \ " 1129719895 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128605783 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1128802391 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Visit only nodes in a depth of at least 2 : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " minDepth \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1133193303 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1133389911 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1133848663 \ " , \ n \ " _key \ " : \ " 1133848663 \ " , \ n \ " _rev \ " : \ " 1133848663 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1134045271 \ " , \ n \ " _key \ " : \ " 1134045271 \ " , \ n \ " _rev \ " : \ " 1134045271 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1132734551 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1132931159 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1133193303 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1133848663 \ " , \ n \ " _key \ " : \ " 1133848663 \ " , \ n \ " _rev \ " : \ " 1133848663 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1134241879 \ " , \ n \ " _key \ " : \ " 1134241879 \ " , \ n \ " _rev \ " : \ " 1134241879 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1132734551 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1132931159 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1133389911 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Visit only nodes in a depth of at most 1 : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " maxDepth \ " : 1 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1137190999 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1137387607 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1137190999 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1138305111 \ " , \ n \ " _key \ " : \ " 1138305111 \ " , \ n \ " _rev \ " : \ " 1138305111 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1137190999 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1137387607 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Count all visited nodes and return a list of nodes only : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " init \ " : \ " result . visited = 0 ; result . myVertices = [ ] ; \ " , \ " visitor \ " : \ " result . visited + + ; result . myVertices . push ( vertex ) ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : 4 , \ n \ " myVertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1141319767 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1141516375 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1141778519 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1141975127 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Expand only inbound edges of < em > Alice < / em > and outbound edges of < em > Eve < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " expander \ " : \ " var connections = [ ] ; if ( vertex . name = = = \ \ \ " Alice \ \ \ " ) { config . datasource . getInEdges ( vertex ) . forEach ( function ( e ) { connections . push ( { vertex : require ( \ \ \ " internal \ \ \ " ) . db . _document ( e . _from ) , edge : e } ) ; } ) ; } if ( vertex . name = = = \ \ \ " Eve \ \ \ " ) { config . datasource . getOutEdges ( vertex ) . forEach ( function ( e ) { connections . push ( { vertex : require ( \ \ \ " internal \ \ \ " ) . db . _document ( e . _to ) , edge : e } ) ; } ) ; } return connections ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145776215 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146628183 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1145972823 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145776215 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1147480151 \ " , \ n \ " _key \ " : \ " 1147480151 \ " , \ n \ " _rev \ " : \ " 1147480151 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145776215 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146628183 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1147480151 \ " , \ n \ " _key \ " : \ " 1147480151 \ " , \ n \ " _rev \ " : \ " 1147480151 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1147676759 \ " , \ n \ " _key \ " : \ " 1147676759 \ " , \ n \ " _rev \ " : \ " 1147676759 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145776215 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146628183 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1145972823 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow the < em > depthfirst < / em > strategy : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " strategy \ " : \ " depthfirst \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150560343 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150756951 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150560343 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150756951 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151412311 \ " , \ n \ " _key \ " : \ " 1151412311 \ " , \ n \ " _rev \ " : \ " 1151412311 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150560343 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151608919 \ " , \ n \ " _key \ " : \ " 1151608919 \ " , \ n \ " _rev \ " : \ " 1151608919 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150756951 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1152002135 \ " , \ n \ " _key \ " : \ " 1152002135 \ " , \ n \ " _rev \ " : \ " 1152002135 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151805527 \ " , \ n \ " _key \ " : \ " 1151805527 \ " , \ n \ " _rev \ " : \ " 1151805527 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150953559 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151412311 \ " , \ n \ " _key \ " : \ " 1151412311 \ " , \ n \ " _rev \ " : \ " 1151412311 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150560343 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151215703 \ " , \ n \ " _key \ " : \ " 1151215703 \ " , \ n \ " _rev \ " : \ " 1151215703 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151608919 \ " , \ n \ " _key \ " : \ " 1151608919 \ " , \ n \ " _rev \ " : \ " 1151608919 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150101591 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150298199 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150756951 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using < em > postorder < / em > ordering : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " order \ " : \ " postorder \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156720727 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156917335 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156720727 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156917335 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157572695 \ " , \ n \ " _key \ " : \ " 1157572695 \ " , \ n \ " _rev \ " : \ " 1157572695 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156720727 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157769303 \ " , \ n \ " _key \ " : \ " 1157769303 \ " , \ n \ " _rev \ " : \ " 1157769303 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156917335 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157965911 \ " , \ n \ " _key \ " : \ " 1157965911 \ " , \ n \ " _rev \ " : \ " 1157965911 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158162519 \ " , \ n \ " _key \ " : \ " 1158162519 \ " , \ n \ " _rev \ " : \ " 1158162519 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157113943 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157572695 \ " , \ n \ " _key \ " : \ " 1157572695 \ " , \ n \ " _rev \ " : \ " 1157572695 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156720727 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157769303 \ " , \ n \ " _key \ " : \ " 1157769303 \ " , \ n \ " _rev \ " : \ " 1157769303 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156917335 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157376087 \ " , \ n \ " _key \ " : \ " 1157376087 \ " , \ n \ " _rev \ " : \ " 1157376087 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156458583 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156261975 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using < em > backward < / em > item - ordering : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " itemOrder \ " : \ " backward \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163274327 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1163077719 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163274327 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1163077719 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164126295 \ " , \ n \ " _key \ " : \ " 1164126295 \ " , \ n \ " _rev \ " : \ " 1164126295 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163274327 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163929687 \ " , \ n \ " _key \ " : \ " 1163929687 \ " , \ n \ " _rev \ " : \ " 1163929687 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1163077719 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164126295 \ " , \ n \ " _key \ " : \ " 1164126295 \ " , \ n \ " _rev \ " : \ " 1164126295 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163274327 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163929687 \ " , \ n \ " _key \ " : \ " 1163929687 \ " , \ n \ " _rev \ " : \ " 1163929687 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1163077719 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164322903 \ " , \ n \ " _key \ " : \ " 1164322903 \ " , \ n \ " _rev \ " : \ " 1164322903 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164519511 \ " , \ n \ " _key \ " : \ " 1164519511 \ " , \ n \ " _rev \ " : \ " 1164519511 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163733079 \ " , \ n \ " _key \ " : \ " 1163733079 \ " , \ n \ " _rev \ " : \ " 1163733079 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163470935 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162815575 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162618967 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Edges should only be included once globally , but nodes are included every time they are visited : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " global \ " } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168975959 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1169238103 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1169434711 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170483287 \ " , \ n \ " _key \ " : \ " 1170483287 \ " , \ n \ " _rev \ " : \ " 1170483287 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170483287 \ " , \ n \ " _key \ " : \ " 1170483287 \ " , \ n \ " _rev \ " : \ " 1170483287 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170679895 \ " , \ n \ " _key \ " : \ " 1170679895 \ " , \ n \ " _rev \ " : \ " 1170679895 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168975959 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170483287 \ " , \ n \ " _key \ " : \ " 1170483287 \ " , \ n \ " _rev \ " : \ " 1170483287 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170679895 \ " , \ n \ " _key \ " : \ " 1170679895 \ " , \ n \ " _rev \ " : \ " 1170679895 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1169893463 \ " , \ n \ " _key \ " : \ " 1169893463 \ " , \ n \ " _rev \ " : \ " 1169893463 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168975959 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170483287 \ " , \ n \ " _key \ " : \ " 1170483287 \ " , \ n \ " _rev \ " : \ " 1170483287 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170679895 \ " , \ n \ " _key \ " : \ " 1170679895 \ " , \ n \ " _rev \ " : \ " 1170679895 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170090071 \ " , \ n \ " _key \ " : \ " 1170090071 \ " , \ n \ " _rev \ " : \ " 1170090071 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168975959 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1169238103 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170483287 \ " , \ n \ " _key \ " : \ " 1170483287 \ " , \ n \ " _rev \ " : \ " 1170483287 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170679895 \ " , \ n \ " _key \ " : \ " 1170679895 \ " , \ n \ " _rev \ " : \ " 1170679895 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170286679 \ " , \ n \ " _key \ " : \ " 1170286679 \ " , \ n \ " _rev \ " : \ " 1170286679 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168779351 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169631319 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168975959 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1169434711 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > If the underlying graph is cyclic , < em > maxIterations < / em > should be set : < br > < br > The underlying graph has two vertices < em > Alice < / em > and < em > Bob < / em > . With the directed edges : < ul class = \ " swagger - list \ " > < li > < em > Alice < / em > knows < em > Bob < / em > < / ul > _ < em > Bob < / em > knows < em > Alice < / em > < br > < br > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " none \ " } , \ " maxIterations \ " : 5 } \ n \ nHTTP / 1 . 1 500 Internal Error \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 500 , \ n \ " errorNum \ " : 1909 , \ n \ " errorMessage \ " : \ " too many iterations \ " \ n } \ n < / code > < / pre > < br > < br > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1109921926 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110118534 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1110380678 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1110577286 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1109921926 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111036038 \ " , \ n \ " _key \ " : \ " 1111036038 \ " , \ n \ " _rev \ " : \ " 1111036038 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1109921926 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110118534 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111036038 \ " , \ n \ " _key \ " : \ " 1111036038 \ " , \ n \ " _rev \ " : \ " 1111036038 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1111232646 \ " , \ n \ " _key \ " : \ " 1111232646 \ " , \ n \ " _rev \ " : \ " 1111232646 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1109921926 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110118534 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1110380678 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1111036038 \ " , \ n \ " _key \ " : \ " 1111036038 \ " , \ n \ " _rev \ " : \ " 1111036038 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1111429254 \ " , \ n \ " _key \ " : \ " 1111429254 \ " , \ n \ " _rev \ " : \ " 1111429254 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1109921926 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1110118534 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1110577286 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow only inbound edges : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " inbound \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114574982 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1115426950 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114574982 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1116278918 \ " , \ n \ " _key \ " : \ " 1116278918 \ " , \ n \ " _rev \ " : \ " 1116278918 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1114574982 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1115426950 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow any direction of edges : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " global \ " } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118965894 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1119228038 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1119424646 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120473222 \ " , \ n \ " _key \ " : \ " 1120473222 \ " , \ n \ " _rev \ " : \ " 1120473222 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120473222 \ " , \ n \ " _key \ " : \ " 1120473222 \ " , \ n \ " _rev \ " : \ " 1120473222 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120669830 \ " , \ n \ " _key \ " : \ " 1120669830 \ " , \ n \ " _rev \ " : \ " 1120669830 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118965894 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120473222 \ " , \ n \ " _key \ " : \ " 1120473222 \ " , \ n \ " _rev \ " : \ " 1120473222 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120669830 \ " , \ n \ " _key \ " : \ " 1120669830 \ " , \ n \ " _rev \ " : \ " 1120669830 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1119883398 \ " , \ n \ " _key \ " : \ " 1119883398 \ " , \ n \ " _rev \ " : \ " 1119883398 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118965894 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120473222 \ " , \ n \ " _key \ " : \ " 1120473222 \ " , \ n \ " _rev \ " : \ " 1120473222 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120669830 \ " , \ n \ " _key \ " : \ " 1120669830 \ " , \ n \ " _rev \ " : \ " 1120669830 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120080006 \ " , \ n \ " _key \ " : \ " 1120080006 \ " , \ n \ " _rev \ " : \ " 1120080006 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118965894 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1119228038 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1120473222 \ " , \ n \ " _key \ " : \ " 1120473222 \ " , \ n \ " _rev \ " : \ " 1120473222 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120669830 \ " , \ n \ " _key \ " : \ " 1120669830 \ " , \ n \ " _rev \ " : \ " 1120669830 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1120276614 \ " , \ n \ " _key \ " : \ " 1120276614 \ " , \ n \ " _rev \ " : \ " 1120276614 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1118769286 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1119621254 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1118965894 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1119424646 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Excluding < em > Charlie < / em > and < em > Bob < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " filter \ " : \ " if ( vertex . name = = = \ \ \ " Bob \ \ \ " | | vertex . name = = = \ \ \ " Charlie \ \ \ " ) { return \ \ \ " exclude \ \ \ " ; } return ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123946630 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1124601990 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123946630 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1125060742 \ " , \ n \ " _key \ " : \ " 1125060742 \ " , \ n \ " _rev \ " : \ " 1125060742 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1125453958 \ " , \ n \ " _key \ " : \ " 1125453958 \ " , \ n \ " _rev \ " : \ " 1125453958 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1123946630 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1124143238 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1124601990 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Do not follow edges from < em > Bob < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " filter \ " : \ " if ( vertex . name = = = \ \ \ " Bob \ \ \ " ) { return \ \ \ " prune \ \ \ " ; } return ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128403078 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1128599686 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128403078 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1129517190 \ " , \ n \ " _key \ " : \ " 1129517190 \ " , \ n \ " _rev \ " : \ " 1129517190 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1128403078 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1128599686 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Visit only nodes in a depth of at least 2 : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " minDepth \ " : 2 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1132990598 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1133187206 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1133645958 \ " , \ n \ " _key \ " : \ " 1133645958 \ " , \ n \ " _rev \ " : \ " 1133645958 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1133842566 \ " , \ n \ " _key \ " : \ " 1133842566 \ " , \ n \ " _rev \ " : \ " 1133842566 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1132531846 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1132728454 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1132990598 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1133645958 \ " , \ n \ " _key \ " : \ " 1133645958 \ " , \ n \ " _rev \ " : \ " 1133645958 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1134039174 \ " , \ n \ " _key \ " : \ " 1134039174 \ " , \ n \ " _rev \ " : \ " 1134039174 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1132531846 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1132728454 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1133187206 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Visit only nodes in a depth of at most 1 : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " maxDepth \ " : 1 } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1136988294 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1137184902 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1136988294 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1138102406 \ " , \ n \ " _key \ " : \ " 1138102406 \ " , \ n \ " _rev \ " : \ " 1138102406 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1136988294 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1137184902 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Count all visited nodes and return a list of nodes only : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " outbound \ " , \ " init \ " : \ " result . visited = 0 ; result . myVertices = [ ] ; \ " , \ " visitor \ " : \ " result . visited + + ; result . myVertices . push ( vertex ) ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : 4 , \ n \ " myVertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1141117062 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1141313670 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1141575814 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1141772422 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Expand only inbound edges of < em > Alice < / em > and outbound edges of < em > Eve < / em > : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " expander \ " : \ " var connections = [ ] ; if ( vertex . name = = = \ \ \ " Alice \ \ \ " ) { config . datasource . getInEdges ( vertex ) . forEach ( function ( e ) { connections . push ( { vertex : require ( \ \ \ " internal \ \ \ " ) . db . _document ( e . _from ) , edge : e } ) ; } ) ; } if ( vertex . name = = = \ \ \ " Eve \ \ \ " ) { config . datasource . getOutEdges ( vertex ) . forEach ( function ( e ) { connections . push ( { vertex : require ( \ \ \ " internal \ \ \ " ) . db . _document ( e . _to ) , edge : e } ) ; } ) ; } return connections ; \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145573510 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146425478 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1145770118 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145573510 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1147277446 \ " , \ n \ " _key \ " : \ " 1147277446 \ " , \ n \ " _rev \ " : \ " 1147277446 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145573510 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146425478 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1147277446 \ " , \ n \ " _key \ " : \ " 1147277446 \ " , \ n \ " _rev \ " : \ " 1147277446 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1147474054 \ " , \ n \ " _key \ " : \ " 1147474054 \ " , \ n \ " _rev \ " : \ " 1147474054 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1145573510 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1146425478 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1145770118 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Follow the < em > depthfirst < / em > strategy : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " strategy \ " : \ " depthfirst \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150554246 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150750854 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150554246 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150750854 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151406214 \ " , \ n \ " _key \ " : \ " 1151406214 \ " , \ n \ " _rev \ " : \ " 1151406214 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150554246 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151602822 \ " , \ n \ " _key \ " : \ " 1151602822 \ " , \ n \ " _rev \ " : \ " 1151602822 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150750854 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151996038 \ " , \ n \ " _key \ " : \ " 1151996038 \ " , \ n \ " _rev \ " : \ " 1151996038 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151799430 \ " , \ n \ " _key \ " : \ " 1151799430 \ " , \ n \ " _rev \ " : \ " 1151799430 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1150947462 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151406214 \ " , \ n \ " _key \ " : \ " 1151406214 \ " , \ n \ " _rev \ " : \ " 1151406214 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1150554246 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1151209606 \ " , \ n \ " _key \ " : \ " 1151209606 \ " , \ n \ " _rev \ " : \ " 1151209606 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1151602822 \ " , \ n \ " _key \ " : \ " 1151602822 \ " , \ n \ " _rev \ " : \ " 1151602822 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1150095494 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1150292102 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1150750854 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using < em > postorder < / em > ordering : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " order \ " : \ " postorder \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156714630 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156911238 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156714630 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156911238 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157566598 \ " , \ n \ " _key \ " : \ " 1157566598 \ " , \ n \ " _rev \ " : \ " 1157566598 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156714630 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157763206 \ " , \ n \ " _key \ " : \ " 1157763206 \ " , \ n \ " _rev \ " : \ " 1157763206 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156911238 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157959814 \ " , \ n \ " _key \ " : \ " 1157959814 \ " , \ n \ " _rev \ " : \ " 1157959814 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1158156422 \ " , \ n \ " _key \ " : \ " 1158156422 \ " , \ n \ " _rev \ " : \ " 1158156422 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1157107846 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157566598 \ " , \ n \ " _key \ " : \ " 1157566598 \ " , \ n \ " _rev \ " : \ " 1157566598 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1156714630 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1157763206 \ " , \ n \ " _key \ " : \ " 1157763206 \ " , \ n \ " _rev \ " : \ " 1157763206 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1156911238 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1157369990 \ " , \ n \ " _key \ " : \ " 1157369990 \ " , \ n \ " _rev \ " : \ " 1157369990 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1156452486 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1156255878 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Using < em > backward < / em > item - ordering : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " itemOrder \ " : \ " backward \ " } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163071622 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1162875014 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163071622 \ " , \ n \ " name \ " : \ " Dave \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1162875014 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163923590 \ " , \ n \ " _key \ " : \ " 1163923590 \ " , \ n \ " _rev \ " : \ " 1163923590 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163071622 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163726982 \ " , \ n \ " _key \ " : \ " 1163726982 \ " , \ n \ " _rev \ " : \ " 1163726982 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1162875014 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163923590 \ " , \ n \ " _key \ " : \ " 1163923590 \ " , \ n \ " _rev \ " : \ " 1163923590 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1163071622 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163726982 \ " , \ n \ " _key \ " : \ " 1163726982 \ " , \ n \ " _rev \ " : \ " 1163726982 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1162875014 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1164120198 \ " , \ n \ " _key \ " : \ " 1164120198 \ " , \ n \ " _rev \ " : \ " 1164120198 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1164316806 \ " , \ n \ " _key \ " : \ " 1164316806 \ " , \ n \ " _rev \ " : \ " 1164316806 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1163530374 \ " , \ n \ " _key \ " : \ " 1163530374 \ " , \ n \ " _rev \ " : \ " 1163530374 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1163268230 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1162612870 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1162416262 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > Edges should only be included once globally , but nodes are included every time they are visited : < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " global \ " } } \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " result \ " : { \ n \ " visited \ " : { \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168773254 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1169035398 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1169232006 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] , \ n \ " paths \ " : [ \ n { \ n \ " edges \ " : [ ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170280582 \ " , \ n \ " _key \ " : \ " 1170280582 \ " , \ n \ " _rev \ " : \ " 1170280582 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170280582 \ " , \ n \ " _key \ " : \ " 1170280582 \ " , \ n \ " _rev \ " : \ " 1170280582 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170477190 \ " , \ n \ " _key \ " : \ " 1170477190 \ " , \ n \ " _rev \ " : \ " 1170477190 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168773254 \ " , \ n \ " name \ " : \ " Bob \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170280582 \ " , \ n \ " _key \ " : \ " 1170280582 \ " , \ n \ " _rev \ " : \ " 1170280582 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170477190 \ " , \ n \ " _key \ " : \ " 1170477190 \ " , \ n \ " _rev \ " : \ " 1170477190 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1169690758 \ " , \ n \ " _key \ " : \ " 1169690758 \ " , \ n \ " _rev \ " : \ " 1169690758 \ " , \ n \ " _from \ " : \ " persons / alice \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168773254 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170280582 \ " , \ n \ " _key \ " : \ " 1170280582 \ " , \ n \ " _rev \ " : \ " 1170280582 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170477190 \ " , \ n \ " _key \ " : \ " 1170477190 \ " , \ n \ " _rev \ " : \ " 1170477190 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1169887366 \ " , \ n \ " _key \ " : \ " 1169887366 \ " , \ n \ " _rev \ " : \ " 1169887366 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / charlie \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168773254 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / charlie \ " , \ n \ " _key \ " : \ " charlie \ " , \ n \ " _rev \ " : \ " 1169035398 \ " , \ n \ " name \ " : \ " Charlie \ " \ n } \ n ] \ n } , \ n { \ n \ " edges \ " : [ \ n { \ n \ " _id \ " : \ " knows / 1170280582 \ " , \ n \ " _key \ " : \ " 1170280582 \ " , \ n \ " _rev \ " : \ " 1170280582 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / alice \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170477190 \ " , \ n \ " _key \ " : \ " 1170477190 \ " , \ n \ " _rev \ " : \ " 1170477190 \ " , \ n \ " _from \ " : \ " persons / eve \ " , \ n \ " _to \ " : \ " persons / bob \ " \ n } , \ n { \ n \ " _id \ " : \ " knows / 1170083974 \ " , \ n \ " _key \ " : \ " 1170083974 \ " , \ n \ " _rev \ " : \ " 1170083974 \ " , \ n \ " _from \ " : \ " persons / bob \ " , \ n \ " _to \ " : \ " persons / dave \ " \ n } \ n ] , \ n \ " vertices \ " : [ \ n { \ n \ " _id \ " : \ " persons / alice \ " , \ n \ " _key \ " : \ " alice \ " , \ n \ " _rev \ " : \ " 1168576646 \ " , \ n \ " name \ " : \ " Alice \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / eve \ " , \ n \ " _key \ " : \ " eve \ " , \ n \ " _rev \ " : \ " 1169428614 \ " , \ n \ " name \ " : \ " Eve \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / bob \ " , \ n \ " _key \ " : \ " bob \ " , \ n \ " _rev \ " : \ " 1168773254 \ " , \ n \ " name \ " : \ " Bob \ " \ n } , \ n { \ n \ " _id \ " : \ " persons / dave \ " , \ n \ " _key \ " : \ " dave \ " , \ n \ " _rev \ " : \ " 1169232006 \ " , \ n \ " name \ " : \ " Dave \ " \ n } \ n ] \ n } \ n ] \ n } \ n } , \ n \ " error \ " : false , \ n \ " code \ " : 200 \ n } \ n < / code > < / pre > < br > < br > < br > If the underlying graph is cyclic , < em > maxIterations < / em > should be set : < br > < br > The underlying graph has two vertices < em > Alice < / em > and < em > Bob < / em > . With the directed edges : < ul class = \ " swagger - list \ " > < li > < em > Alice < / em > knows < em > Bob < / em > < / ul > _ < em > Bob < / em > knows < em > Alice < / em > < br > < br > < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - X POST - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / traversal \ n { \ " startVertex \ " : \ " persons / alice \ " , \ " graphName \ " : \ " knows_graph \ " , \ " direction \ " : \ " any \ " , \ " uniqueness \ " : { \ " vertices \ " : \ " none \ " , \ " edges \ " : \ " none \ " } , \ " maxIterations \ " : 5 } \ n \ nHTTP / 1 . 1 500 Internal Error \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " error \ " : true , \ n \ " code \ " : 500 , \ n \ " errorNum \ " : 1909 , \ n \ " errorMessage \ " : \ " too many iterations \ " \ n } \ n < / code > < / pre > < br > < br > < br > @ endDocuBlock " , <nl> " nickname " : " executesATraversal " <nl> } <nl> ] , <nl> mmm a / js / apps / system / aardvark / api - docs / version . json <nl> ppp b / js / apps / system / aardvark / api - docs / version . json <nl> <nl> " notes " : " Returns the server name and version number . The response is a JSON object with the following attributes : < br > < br > < ul class = \ " swagger - list \ " > < li > < em > server < / em > : will always contain < em > arango < / em > < li > < em > version < / em > : the server version string . The string has the format \ " < em > major < / em > . * minor < em > . * sub < / em > \ " . < em > major < / em > and < em > minor < / em > will be numeric , and < em > sub < / em > may contain a number or a textual version . < li > < em > details < / em > : an optional JSON object with additional details . This is returned only if the < em > details < / em > URL parameter is set to < em > true < / em > in the request . " , <nl> " summary " : " Return server version " , <nl> " httpMethod " : " GET " , <nl> - " examples " : " < br > < br > Returns the version information . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / version \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " server \ " : \ " arango \ " , \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Returns the version information with details . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / version ? details = true \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " server \ " : \ " arango \ " , \ n \ " version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " details \ " : { \ n \ " build - date \ " : \ " 2014 - 10 - 02 18 : 29 : 32 \ " , \ n \ " configure \ " : \ " ' . / configure ' ' - - enable - all - in - one - v8 ' ' - - enable - all - in - one - libev ' ' - - enable - all - in - one - icu ' ' - - enable - maintainer - mode ' ' - - disable - mruby ' \ " , \ n \ " icu - version \ " : \ " 52 . 1 \ " , \ n \ " libev - version \ " : \ " 4 . 11 \ " , \ n \ " openssl - version \ " : \ " OpenSSL 0 . 9 . 8y 5 Feb 2013 \ " , \ n \ " repository - version \ " : \ " heads / 2 . 3 - 0 - g6e754299829f024a979aa86d07c608ba3b2dbca9 - dirty \ " , \ n \ " server - version \ " : \ " 2 . 3 . 0 - alpha4 \ " , \ n \ " sizeof int \ " : \ " 4 \ " , \ n \ " sizeof void * \ " : \ " 8 \ " , \ n \ " v8 - version \ " : \ " 3 . 16 . 14 \ " \ n } \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> + " examples " : " < br > < br > Returns the version information . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / version \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " server \ " : \ " arango \ " , \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " \ n } \ n < / code > < / pre > < br > < br > < br > Returns the version information with details . < br > < br > < br > < br > < pre > < code class = \ " json \ " > shell > curl - - data - binary @ - - - dump - http : / / localhost : 8529 / _api / version ? details = true \ n \ nHTTP / 1 . 1 200 OK \ ncontent - type : application / json ; charset = utf - 8 \ n \ n { \ n \ " server \ " : \ " arango \ " , \ n \ " version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " details \ " : { \ n \ " build - date \ " : \ " 2014 - 10 - 03 09 : 54 : 13 \ " , \ n \ " configure \ " : \ " ' . / configure ' ' - - enable - all - in - one - v8 ' ' - - enable - all - in - one - libev ' ' - - enable - all - in - one - icu ' ' - - enable - maintainer - mode ' ' - - disable - mruby ' \ " , \ n \ " icu - version \ " : \ " 52 . 1 \ " , \ n \ " libev - version \ " : \ " 4 . 11 \ " , \ n \ " openssl - version \ " : \ " OpenSSL 0 . 9 . 8y 5 Feb 2013 \ " , \ n \ " repository - version \ " : \ " heads / 2 . 3 - 0 - g4f28c5e3e8dab3894ec0afdc657f27864551cec5 - dirty \ " , \ n \ " server - version \ " : \ " 2 . 3 . 0 - alpha5 \ " , \ n \ " sizeof int \ " : \ " 4 \ " , \ n \ " sizeof void * \ " : \ " 8 \ " , \ n \ " v8 - version \ " : \ " 3 . 16 . 14 \ " \ n } \ n } \ n < / code > < / pre > < br > @ endDocuBlock " , <nl> " nickname " : " ReturnServerVersion " <nl> } <nl> ] , <nl>
release version 2 . 3 . 0 - alpha5
arangodb/arangodb
1492e9f90e4cf9da54816b0997b75a64e7319b78
2014-10-03T08:04:56Z
mmm a / src / base / overflowing - math . h <nl> ppp b / src / base / overflowing - math . h <nl> inline float RecipSqrt ( float a ) { <nl> return - std : : numeric_limits < float > : : infinity ( ) ; <nl> } <nl> <nl> + template < typename T > <nl> + inline T RoundingAverageUnsigned ( T a , T b ) { <nl> + static_assert ( std : : is_unsigned < T > : : value , " Only for unsiged types " ) ; <nl> + static_assert ( sizeof ( T ) < sizeof ( uint64_t ) , " Must be smaller than uint64_t " ) ; <nl> + return ( static_cast < uint64_t > ( a ) + static_cast < uint64_t > ( b ) + 1 ) > > 1 ; <nl> + } <nl> + <nl> } / / namespace base <nl> } / / namespace v8 <nl> <nl> mmm a / src / codegen / x64 / macro - assembler - x64 . h <nl> ppp b / src / codegen / x64 / macro - assembler - x64 . h <nl> class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { <nl> AVX_OP ( Pxor , pxor ) <nl> AVX_OP ( Psubd , psubd ) <nl> AVX_OP ( Pslld , pslld ) <nl> + AVX_OP ( Pavgb , pavgb ) <nl> + AVX_OP ( Pavgw , pavgw ) <nl> AVX_OP ( Psrad , psrad ) <nl> AVX_OP ( Psrld , psrld ) <nl> AVX_OP ( Paddd , paddd ) <nl> mmm a / src / codegen / x64 / sse - instr . h <nl> ppp b / src / codegen / x64 / sse - instr . h <nl> <nl> V ( psllw , 66 , 0F , F1 ) \ <nl> V ( pslld , 66 , 0F , F2 ) \ <nl> V ( psllq , 66 , 0F , F3 ) \ <nl> + V ( pavgb , 66 , 0F , E0 ) \ <nl> V ( psraw , 66 , 0F , E1 ) \ <nl> V ( psrad , 66 , 0F , E2 ) \ <nl> + V ( pavgw , 66 , 0F , E3 ) \ <nl> V ( psrlw , 66 , 0F , D1 ) \ <nl> V ( psrld , 66 , 0F , D2 ) \ <nl> V ( psrlq , 66 , 0F , D3 ) \ <nl> mmm a / src / compiler / backend / instruction - selector . cc <nl> ppp b / src / compiler / backend / instruction - selector . cc <nl> void InstructionSelector : : VisitNode ( Node * node ) { <nl> return MarkAsSimd128 ( node ) , VisitI16x8GtU ( node ) ; <nl> case IrOpcode : : kI16x8GeU : <nl> return MarkAsSimd128 ( node ) , VisitI16x8GeU ( node ) ; <nl> + case IrOpcode : : kI16x8RoundingAverageU : <nl> + return MarkAsSimd128 ( node ) , VisitI16x8RoundingAverageU ( node ) ; <nl> case IrOpcode : : kI8x16Splat : <nl> return MarkAsSimd128 ( node ) , VisitI8x16Splat ( node ) ; <nl> case IrOpcode : : kI8x16ExtractLaneU : <nl> void InstructionSelector : : VisitNode ( Node * node ) { <nl> return MarkAsSimd128 ( node ) , VisitI8x16GtU ( node ) ; <nl> case IrOpcode : : kI8x16GeU : <nl> return MarkAsSimd128 ( node ) , VisitI8x16GeU ( node ) ; <nl> + case IrOpcode : : kI8x16RoundingAverageU : <nl> + return MarkAsSimd128 ( node ) , VisitI8x16RoundingAverageU ( node ) ; <nl> case IrOpcode : : kS128Zero : <nl> return MarkAsSimd128 ( node ) , VisitS128Zero ( node ) ; <nl> case IrOpcode : : kS128And : <nl> void InstructionSelector : : VisitF64x2SConvertI64x2 ( Node * node ) { <nl> void InstructionSelector : : VisitF64x2UConvertI64x2 ( Node * node ) { <nl> UNIMPLEMENTED ( ) ; <nl> } <nl> + void InstructionSelector : : VisitI16x8RoundingAverageU ( Node * node ) { <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> + void InstructionSelector : : VisitI8x16RoundingAverageU ( Node * node ) { <nl> + UNIMPLEMENTED ( ) ; <nl> + } <nl> # if ! V8_TARGET_ARCH_ARM64 <nl> # if ! V8_TARGET_ARCH_ARM <nl> void InstructionSelector : : VisitLoadTransform ( Node * node ) { UNIMPLEMENTED ( ) ; } <nl> mmm a / src / compiler / backend / x64 / code - generator - x64 . cc <nl> ppp b / src / compiler / backend / x64 / code - generator - x64 . cc <nl> CodeGenerator : : CodeGenResult CodeGenerator : : AssembleArchInstruction ( <nl> __ pcmpeqw ( dst , src ) ; <nl> break ; <nl> } <nl> + case kX64I16x8RoundingAverageU : { <nl> + __ Pavgw ( i . OutputSimd128Register ( ) , i . InputSimd128Register ( 1 ) ) ; <nl> + break ; <nl> + } <nl> case kX64I8x16Splat : { <nl> CpuFeatureScope sse_scope ( tasm ( ) , SSSE3 ) ; <nl> XMMRegister dst = i . OutputSimd128Register ( ) ; <nl> CodeGenerator : : CodeGenResult CodeGenerator : : AssembleArchInstruction ( <nl> __ pcmpeqb ( dst , src ) ; <nl> break ; <nl> } <nl> + case kX64I8x16RoundingAverageU : { <nl> + __ Pavgb ( i . OutputSimd128Register ( ) , i . InputSimd128Register ( 1 ) ) ; <nl> + break ; <nl> + } <nl> case kX64S128And : { <nl> __ pand ( i . OutputSimd128Register ( ) , i . InputSimd128Register ( 1 ) ) ; <nl> break ; <nl> mmm a / src / compiler / backend / x64 / instruction - codes - x64 . h <nl> ppp b / src / compiler / backend / x64 / instruction - codes - x64 . h <nl> namespace compiler { <nl> V ( X64I16x8MaxU ) \ <nl> V ( X64I16x8GtU ) \ <nl> V ( X64I16x8GeU ) \ <nl> + V ( X64I16x8RoundingAverageU ) \ <nl> V ( X64I8x16Splat ) \ <nl> V ( X64I8x16ExtractLaneU ) \ <nl> V ( X64I8x16ExtractLaneS ) \ <nl> namespace compiler { <nl> V ( X64I8x16MaxU ) \ <nl> V ( X64I8x16GtU ) \ <nl> V ( X64I8x16GeU ) \ <nl> + V ( X64I8x16RoundingAverageU ) \ <nl> V ( X64S128Zero ) \ <nl> V ( X64S128Not ) \ <nl> V ( X64S128And ) \ <nl> mmm a / src / compiler / backend / x64 / instruction - scheduler - x64 . cc <nl> ppp b / src / compiler / backend / x64 / instruction - scheduler - x64 . cc <nl> int InstructionScheduler : : GetTargetInstructionFlags ( <nl> case kX64I16x8MaxU : <nl> case kX64I16x8GtU : <nl> case kX64I16x8GeU : <nl> + case kX64I16x8RoundingAverageU : <nl> case kX64I8x16Splat : <nl> case kX64I8x16ExtractLaneU : <nl> case kX64I8x16ExtractLaneS : <nl> int InstructionScheduler : : GetTargetInstructionFlags ( <nl> case kX64I8x16MaxU : <nl> case kX64I8x16GtU : <nl> case kX64I8x16GeU : <nl> + case kX64I8x16RoundingAverageU : <nl> case kX64S128And : <nl> case kX64S128Or : <nl> case kX64S128Xor : <nl> mmm a / src / compiler / backend / x64 / instruction - selector - x64 . cc <nl> ppp b / src / compiler / backend / x64 / instruction - selector - x64 . cc <nl> VISIT_ATOMIC_BINOP ( Xor ) <nl> V ( I16x8MinU ) \ <nl> V ( I16x8MaxU ) \ <nl> V ( I16x8GeU ) \ <nl> + V ( I16x8RoundingAverageU ) \ <nl> V ( I8x16SConvertI16x8 ) \ <nl> V ( I8x16Add ) \ <nl> V ( I8x16AddSaturateS ) \ <nl> VISIT_ATOMIC_BINOP ( Xor ) <nl> V ( I8x16MinU ) \ <nl> V ( I8x16MaxU ) \ <nl> V ( I8x16GeU ) \ <nl> + V ( I8x16RoundingAverageU ) \ <nl> V ( S128And ) \ <nl> V ( S128Or ) \ <nl> V ( S128Xor ) <nl> mmm a / src / compiler / machine - operator . cc <nl> ppp b / src / compiler / machine - operator . cc <nl> MachineType AtomicOpType ( Operator const * op ) { <nl> V ( I16x8MaxU , Operator : : kCommutative , 2 , 0 , 1 ) \ <nl> V ( I16x8GtU , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> V ( I16x8GeU , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> + V ( I16x8RoundingAverageU , Operator : : kCommutative , 2 , 0 , 1 ) \ <nl> V ( I8x16Splat , Operator : : kNoProperties , 1 , 0 , 1 ) \ <nl> V ( I8x16Neg , Operator : : kNoProperties , 1 , 0 , 1 ) \ <nl> V ( I8x16Shl , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> MachineType AtomicOpType ( Operator const * op ) { <nl> V ( I8x16MaxU , Operator : : kCommutative , 2 , 0 , 1 ) \ <nl> V ( I8x16GtU , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> V ( I8x16GeU , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> + V ( I8x16RoundingAverageU , Operator : : kCommutative , 2 , 0 , 1 ) \ <nl> V ( S128Load , Operator : : kNoProperties , 2 , 0 , 1 ) \ <nl> V ( S128Store , Operator : : kNoProperties , 3 , 0 , 1 ) \ <nl> V ( S128Zero , Operator : : kNoProperties , 0 , 0 , 1 ) \ <nl> mmm a / src / compiler / machine - operator . h <nl> ppp b / src / compiler / machine - operator . h <nl> class V8_EXPORT_PRIVATE MachineOperatorBuilder final <nl> const Operator * I16x8MaxU ( ) ; <nl> const Operator * I16x8GtU ( ) ; <nl> const Operator * I16x8GeU ( ) ; <nl> + const Operator * I16x8RoundingAverageU ( ) ; <nl> <nl> const Operator * I8x16Splat ( ) ; <nl> const Operator * I8x16ExtractLaneU ( int32_t ) ; <nl> class V8_EXPORT_PRIVATE MachineOperatorBuilder final <nl> const Operator * I8x16MaxU ( ) ; <nl> const Operator * I8x16GtU ( ) ; <nl> const Operator * I8x16GeU ( ) ; <nl> + const Operator * I8x16RoundingAverageU ( ) ; <nl> <nl> const Operator * S128Load ( ) ; <nl> const Operator * S128Store ( ) ; <nl> mmm a / src / compiler / opcodes . h <nl> ppp b / src / compiler / opcodes . h <nl> <nl> V ( I16x8LeU ) \ <nl> V ( I16x8GtU ) \ <nl> V ( I16x8GeU ) \ <nl> + V ( I16x8RoundingAverageU ) \ <nl> V ( I8x16Splat ) \ <nl> V ( I8x16ExtractLaneU ) \ <nl> V ( I8x16ExtractLaneS ) \ <nl> <nl> V ( I8x16LeU ) \ <nl> V ( I8x16GtU ) \ <nl> V ( I8x16GeU ) \ <nl> + V ( I8x16RoundingAverageU ) \ <nl> V ( S128Load ) \ <nl> V ( S128Store ) \ <nl> V ( S128Zero ) \ <nl> mmm a / src / compiler / wasm - compiler . cc <nl> ppp b / src / compiler / wasm - compiler . cc <nl> Node * WasmGraphBuilder : : SimdOp ( wasm : : WasmOpcode opcode , Node * const * inputs ) { <nl> case wasm : : kExprI16x8GeU : <nl> return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > I16x8GeU ( ) , inputs [ 0 ] , <nl> inputs [ 1 ] ) ; <nl> + case wasm : : kExprI16x8RoundingAverageU : <nl> + return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > I16x8RoundingAverageU ( ) , <nl> + inputs [ 0 ] , inputs [ 1 ] ) ; <nl> case wasm : : kExprI8x16Splat : <nl> return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > I8x16Splat ( ) , inputs [ 0 ] ) ; <nl> case wasm : : kExprI8x16Neg : <nl> Node * WasmGraphBuilder : : SimdOp ( wasm : : WasmOpcode opcode , Node * const * inputs ) { <nl> case wasm : : kExprI8x16GeU : <nl> return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > I8x16GeU ( ) , inputs [ 0 ] , <nl> inputs [ 1 ] ) ; <nl> + case wasm : : kExprI8x16RoundingAverageU : <nl> + return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > I8x16RoundingAverageU ( ) , <nl> + inputs [ 0 ] , inputs [ 1 ] ) ; <nl> case wasm : : kExprS128And : <nl> return graph ( ) - > NewNode ( mcgraph ( ) - > machine ( ) - > S128And ( ) , inputs [ 0 ] , <nl> inputs [ 1 ] ) ; <nl> mmm a / src / diagnostics / x64 / disasm - x64 . cc <nl> ppp b / src / diagnostics / x64 / disasm - x64 . cc <nl> int DisassemblerX64 : : TwoByteOpcodeInstruction ( byte * data ) { <nl> mnemonic = " paddusw " ; <nl> } else if ( opcode = = 0xDE ) { <nl> mnemonic = " pmaxub " ; <nl> + } else if ( opcode = = 0xE0 ) { <nl> + mnemonic = " pavgb " ; <nl> } else if ( opcode = = 0xE1 ) { <nl> mnemonic = " psraw " ; <nl> } else if ( opcode = = 0xE2 ) { <nl> mnemonic = " psrad " ; <nl> + } else if ( opcode = = 0xE3 ) { <nl> + mnemonic = " pavgw " ; <nl> } else if ( opcode = = 0xE8 ) { <nl> mnemonic = " psubsb " ; <nl> } else if ( opcode = = 0xE9 ) { <nl> mmm a / src / wasm / wasm - interpreter . cc <nl> ppp b / src / wasm / wasm - interpreter . cc <nl> class ThreadImpl { <nl> BINOP_CASE ( I16x8AddSaturateU , i16x8 , int8 , 8 , SaturateAdd < uint16_t > ( a , b ) ) <nl> BINOP_CASE ( I16x8SubSaturateS , i16x8 , int8 , 8 , SaturateSub < int16_t > ( a , b ) ) <nl> BINOP_CASE ( I16x8SubSaturateU , i16x8 , int8 , 8 , SaturateSub < uint16_t > ( a , b ) ) <nl> + BINOP_CASE ( I16x8RoundingAverageU , i16x8 , int8 , 8 , <nl> + base : : RoundingAverageUnsigned < uint16_t > ( a , b ) ) <nl> BINOP_CASE ( I8x16Add , i8x16 , int16 , 16 , base : : AddWithWraparound ( a , b ) ) <nl> BINOP_CASE ( I8x16Sub , i8x16 , int16 , 16 , base : : SubWithWraparound ( a , b ) ) <nl> BINOP_CASE ( I8x16Mul , i8x16 , int16 , 16 , base : : MulWithWraparound ( a , b ) ) <nl> class ThreadImpl { <nl> BINOP_CASE ( I8x16SubSaturateS , i8x16 , int16 , 16 , SaturateSub < int8_t > ( a , b ) ) <nl> BINOP_CASE ( I8x16SubSaturateU , i8x16 , int16 , 16 , <nl> SaturateSub < uint8_t > ( a , b ) ) <nl> + BINOP_CASE ( I8x16RoundingAverageU , i8x16 , int16 , 16 , <nl> + base : : RoundingAverageUnsigned < uint8_t > ( a , b ) ) <nl> # undef BINOP_CASE <nl> # define UNOP_CASE ( op , name , stype , count , expr ) \ <nl> case kExpr # # op : { \ <nl> mmm a / src / wasm / wasm - opcodes . cc <nl> ppp b / src / wasm / wasm - opcodes . cc <nl> const char * WasmOpcodes : : OpcodeName ( WasmOpcode opcode ) { <nl> CASE_I64x2_OP ( Load32x2S , " load32x2_s " ) <nl> CASE_I64x2_OP ( Load32x2U , " load32x2_u " ) <nl> <nl> + CASE_I8x16_OP ( RoundingAverageU , " avgr_u " ) <nl> + CASE_I16x8_OP ( RoundingAverageU , " avgr_u " ) <nl> + <nl> / / Atomic operations . <nl> CASE_OP ( AtomicNotify , " atomic . notify " ) <nl> CASE_INT_OP ( AtomicWait , " atomic . wait " ) <nl> mmm a / src / wasm / wasm - opcodes . h <nl> ppp b / src / wasm / wasm - opcodes . h <nl> bool IsJSCompatibleSignature ( const FunctionSig * sig , const WasmFeatures & ) ; <nl> V ( I32x4Load16x4U , 0xfdd5 , s_s ) \ <nl> V ( I64x2Load32x2S , 0xfdd6 , s_s ) \ <nl> V ( I64x2Load32x2U , 0xfdd7 , s_s ) \ <nl> + V ( I8x16RoundingAverageU , 0xfdd9 , s_ss ) \ <nl> + V ( I16x8RoundingAverageU , 0xfdda , s_ss ) \ <nl> V ( I16x8AddHoriz , 0xfdbd , s_ss ) \ <nl> V ( I32x4AddHoriz , 0xfdbe , s_ss ) \ <nl> V ( F32x4AddHoriz , 0xfdbf , s_ss ) \ <nl> mmm a / test / cctest / wasm / test - run - wasm - simd . cc <nl> ppp b / test / cctest / wasm / test - run - wasm - simd . cc <nl> WASM_SIMD_TEST ( I16x8Neg ) { <nl> base : : NegateWithWraparound ) ; <nl> } <nl> <nl> + template < typename T = int16_t , typename OpType = T ( * ) ( T , T ) > <nl> void RunI16x8BinOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> - WasmOpcode opcode , Int16BinOp expected_op ) { <nl> - WasmRunner < int32_t , int32_t , int32_t > r ( execution_tier , lower_simd ) ; <nl> + WasmOpcode opcode , OpType expected_op ) { <nl> + WasmRunner < int32_t , T , T > r ( execution_tier , lower_simd ) ; <nl> / / Global to hold output . <nl> - int16_t * g = r . builder ( ) . AddGlobal < int16_t > ( kWasmS128 ) ; <nl> + T * g = r . builder ( ) . template AddGlobal < T > ( kWasmS128 ) ; <nl> / / Build fn to splat test values , perform binop , and write the result . <nl> byte value1 = 0 , value2 = 1 ; <nl> byte temp1 = r . AllocateLocal ( kWasmS128 ) ; <nl> void RunI16x8BinOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> WASM_GET_LOCAL ( temp2 ) ) ) , <nl> WASM_ONE ) ; <nl> <nl> - FOR_INT16_INPUTS ( x ) { <nl> - FOR_INT16_INPUTS ( y ) { <nl> + for ( T x : compiler : : ValueHelper : : GetVector < T > ( ) ) { <nl> + for ( T y : compiler : : ValueHelper : : GetVector < T > ( ) ) { <nl> r . Call ( x , y ) ; <nl> - int16_t expected = expected_op ( x , y ) ; <nl> + T expected = expected_op ( x , y ) ; <nl> for ( int i = 0 ; i < 8 ; i + + ) { <nl> - CHECK_EQ ( expected , ReadLittleEndianValue < int16_t > ( & g [ i ] ) ) ; <nl> + CHECK_EQ ( expected , ReadLittleEndianValue < T > ( & g [ i ] ) ) ; <nl> } <nl> } <nl> } <nl> WASM_SIMD_TEST ( I16x8LeU ) { <nl> UnsignedLessEqual ) ; <nl> } <nl> <nl> + # if V8_TARGET_ARCH_X64 <nl> + WASM_SIMD_TEST_NO_LOWERING ( I16x8RoundingAverageU ) { <nl> + RunI16x8BinOpTest < uint16_t > ( execution_tier , lower_simd , <nl> + kExprI16x8RoundingAverageU , <nl> + base : : RoundingAverageUnsigned ) ; <nl> + } <nl> + # endif / / V8_TARGET_ARCH_X64 <nl> + <nl> void RunI16x8ShiftOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> WasmOpcode opcode , Int16ShiftOp expected_op ) { <nl> / / Intentionally shift by 16 , should be no - op . <nl> WASM_SIMD_TEST ( I8x16ConvertI16x8 ) { <nl> } <nl> } <nl> <nl> + template < typename T = int8_t , typename OpType = T ( * ) ( T , T ) > <nl> void RunI8x16BinOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> - WasmOpcode opcode , Int8BinOp expected_op ) { <nl> - WasmRunner < int32_t , int32_t , int32_t > r ( execution_tier , lower_simd ) ; <nl> + WasmOpcode opcode , OpType expected_op ) { <nl> + WasmRunner < int32_t , T , T > r ( execution_tier , lower_simd ) ; <nl> / / Global to hold output . <nl> - int8_t * g = r . builder ( ) . AddGlobal < int8_t > ( kWasmS128 ) ; <nl> + T * g = r . builder ( ) . template AddGlobal < T > ( kWasmS128 ) ; <nl> / / Build fn to splat test values , perform binop , and write the result . <nl> byte value1 = 0 , value2 = 1 ; <nl> byte temp1 = r . AllocateLocal ( kWasmS128 ) ; <nl> void RunI8x16BinOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> WASM_GET_LOCAL ( temp2 ) ) ) , <nl> WASM_ONE ) ; <nl> <nl> - FOR_INT8_INPUTS ( x ) { <nl> - FOR_INT8_INPUTS ( y ) { <nl> + for ( T x : compiler : : ValueHelper : : GetVector < T > ( ) ) { <nl> + for ( T y : compiler : : ValueHelper : : GetVector < T > ( ) ) { <nl> r . Call ( x , y ) ; <nl> - int8_t expected = expected_op ( x , y ) ; <nl> + T expected = expected_op ( x , y ) ; <nl> for ( int i = 0 ; i < 16 ; i + + ) { <nl> - CHECK_EQ ( expected , ReadLittleEndianValue < int8_t > ( & g [ i ] ) ) ; <nl> + CHECK_EQ ( expected , ReadLittleEndianValue < T > ( & g [ i ] ) ) ; <nl> } <nl> } <nl> } <nl> WASM_SIMD_TEST ( I8x16Mul ) { <nl> base : : MulWithWraparound ) ; <nl> } <nl> <nl> + # if V8_TARGET_ARCH_X64 <nl> + WASM_SIMD_TEST_NO_LOWERING ( I8x16RoundingAverageU ) { <nl> + RunI8x16BinOpTest < uint8_t > ( execution_tier , lower_simd , <nl> + kExprI8x16RoundingAverageU , <nl> + base : : RoundingAverageUnsigned ) ; <nl> + } <nl> + # endif / / V8_TARGET_ARCH_X64 <nl> + <nl> void RunI8x16ShiftOpTest ( ExecutionTier execution_tier , LowerSimd lower_simd , <nl> WasmOpcode opcode , Int8ShiftOp expected_op ) { <nl> / / Intentionally shift by 8 , should be no - op . <nl>
[ wasm - simd ] Implement rounding average on x64 and interpreter
v8/v8
90b42052c6b8f0df8a09d96680721f938c8cd3eb
2019-12-19T22:40:59Z
mmm a / docs / process . md <nl> ppp b / docs / process . md <nl> How : <nl> 2 . Tag the emscripten repo on the emscripten commit used by that release ( which <nl> you can tell from the DEPS file ) , using something like <nl> ` git checkout [ COMMIT ] ` ; ` git tag [ VERSION ] ` ; ` git push - - tags ` . <nl> - 3 . Update <nl> + 3 . Tag the emsdk repo as well , on the commit that does the update , after it <nl> + lands on master . <nl> + 4 . Update <nl> [ emscripten - version . txt ] ( https : / / github . com / emscripten - core / emscripten / blob / master / emscripten - version . txt ) <nl> in the emscripten repo . This is a delayed update , in that the tag will refer <nl> to the actual release , but the update to emscripten - version . txt is a new <nl>
Tag emsdk repo too [ ci skip ] ( )
emscripten-core/emscripten
687e0aec29c0e7510e9b943fe74ae261bc1652e7
2020-04-21T23:11:11Z
mmm a / DeepSpeech . py <nl> ppp b / DeepSpeech . py <nl> def end ( self , session ) : <nl> <nl> except tf . errors . InvalidArgumentError as e : <nl> log_error ( str ( e ) ) <nl> - log_error ( " Provide a - - checkpoint_dir argument to work with models of different shapes . " ) <nl> + log_error ( ' The checkpoint in { 0 } does not match the shapes of the model . ' <nl> + ' Did you change alphabet . txt or the - - n_hidden parameter ' <nl> + ' between train runs using the same checkpoint dir ? Try moving ' <nl> + ' or removing the contents of { 0 } . ' . format ( FLAGS . checkpoint_dir ) ) <nl> sys . exit ( 1 ) <nl> <nl> <nl> mmm a / data / lm / trie <nl> ppp b / data / lm / trie <nl> <nl> version https : / / git - lfs . github . com / spec / v1 <nl> - oid sha256 : 55da7b52ddb19f46301a31d56aff35ed1508fd9bd1e04d907114d89771892219 <nl> - size 43550329 <nl> + oid sha256 : c2f8f1d721eed0ae621160626e803925efa481c8156bb97e72013c0fbf879b75 <nl> + size 43550345 <nl> mmm a / native_client / deepspeech . cc <nl> ppp b / native_client / deepspeech . cc <nl> Model : : Model ( const char * aModelPath , int aNCep , int aNContext , <nl> <nl> Status status = NewSession ( SessionOptions ( ) , & mPriv - > session ) ; <nl> if ( ! status . ok ( ) ) { <nl> + std : : cerr < < status . ToString ( ) < < std : : endl ; <nl> return ; <nl> } <nl> <nl> Model : : Model ( const char * aModelPath , int aNCep , int aNContext , <nl> if ( ! status . ok ( ) ) { <nl> mPriv - > session - > Close ( ) ; <nl> mPriv - > session = NULL ; <nl> + std : : cerr < < status . ToString ( ) < < std : : endl ; <nl> return ; <nl> } <nl> <nl> Model : : Model ( const char * aModelPath , int aNCep , int aNContext , <nl> if ( ! status . ok ( ) ) { <nl> mPriv - > session - > Close ( ) ; <nl> mPriv - > session = NULL ; <nl> + std : : cerr < < status . ToString ( ) < < std : : endl ; <nl> return ; <nl> } <nl> <nl> Model : : Model ( const char * aModelPath , int aNCep , int aNContext , <nl> <nl> mPriv - > alphabet = new Alphabet ( aAlphabetConfigPath ) ; <nl> <nl> + for ( int i = 0 ; i < mPriv - > graph_def . node_size ( ) ; + + i ) { <nl> + NodeDef node = mPriv - > graph_def . node ( i ) ; <nl> + if ( node . name ( ) = = " logits / shape / 2 " ) { <nl> + int final_dim_size = node . attr ( ) . at ( " value " ) . tensor ( ) . int_val ( 0 ) - 1 ; <nl> + if ( final_dim_size ! = mPriv - > alphabet - > GetSize ( ) ) { <nl> + std : : cerr < < " Error : Alphabet size does not match loaded model : alphabet " <nl> + < < " has size " < < mPriv - > alphabet - > GetSize ( ) <nl> + < < " , but model has " < < final_dim_size <nl> + < < " classes in its output . Make sure you ' re passing an alphabet " <nl> + < < " file with the same size as the one used for training . " <nl> + < < std : : endl ; <nl> + mPriv - > session - > Close ( ) ; <nl> + mPriv - > session = NULL ; <nl> + return ; <nl> + } <nl> + break ; <nl> + } <nl> + } <nl> + <nl> mPriv - > scorer = NULL ; <nl> mPriv - > beam_width = 0 ; <nl> } <nl> mmm a / native_client / trie_node . h <nl> ppp b / native_client / trie_node . h <nl> limitations under the License . <nl> <nl> class TrieNode { <nl> public : <nl> + static const int MAGIC = ' TRIE ' ; <nl> + static const int FILE_VERSION = 1 ; <nl> + <nl> TrieNode ( int vocab_size ) <nl> : vocab_size_ ( vocab_size ) <nl> , prefixCount_ ( 0 ) <nl> class TrieNode { <nl> } <nl> <nl> void WriteToStream ( std : : ostream & os ) const { <nl> - WriteNode ( os ) ; <nl> - for ( int i = 0 ; i < vocab_size_ ; i + + ) { <nl> - if ( children_ [ i ] = = nullptr ) { <nl> - os < < - 1 < < std : : endl ; <nl> - } else { <nl> - / / Recursive call <nl> - children_ [ i ] - > WriteToStream ( os ) ; <nl> - } <nl> - } <nl> + os < < MAGIC < < std : : endl < < FILE_VERSION < < std : : endl < < vocab_size_ < < std : : endl ; <nl> + WriteNodeAndChildren ( os ) ; <nl> } <nl> <nl> static void ReadFromStream ( std : : istream & is , TrieNode * & obj , int vocab_size ) { <nl> - int prefixCount ; <nl> - is > > prefixCount ; <nl> + int magic ; <nl> + is > > magic ; <nl> + if ( magic ! = MAGIC ) { <nl> + std : : cerr < < " Error : Can ' t parse trie file , invalid header . Try updating " <nl> + " your trie file . " < < std : : endl ; <nl> + obj = nullptr ; <nl> + return ; <nl> + } <nl> <nl> - if ( prefixCount = = - 1 ) { <nl> - / / This is an undefined child <nl> + int version ; <nl> + is > > version ; <nl> + if ( version ! = FILE_VERSION ) { <nl> + std : : cerr < < " Error : Trie file version mismatch . Update your trie file . " <nl> + < < std : : endl ; <nl> obj = nullptr ; <nl> return ; <nl> } <nl> <nl> - obj = new TrieNode ( vocab_size ) ; <nl> - obj - > ReadNode ( is , prefixCount ) ; <nl> - for ( int i = 0 ; i < vocab_size ; i + + ) { <nl> - ReadFromStream ( is , obj - > children_ [ i ] , vocab_size ) ; <nl> + int fileVocabSize ; <nl> + is > > fileVocabSize ; <nl> + if ( fileVocabSize ! = vocab_size ) { <nl> + std : : cerr < < " Error : Mismatching alphabet size in trie file and alphabet " <nl> + " file . Trie file will not be loaded . " < < std : : endl ; <nl> + obj = nullptr ; <nl> + return ; <nl> } <nl> + <nl> + ReadPrefixAndNode ( is , obj , vocab_size ) ; <nl> } <nl> <nl> void Insert ( const std : : string & word , std : : function < int ( const std : : string & ) > translator , <nl> class TrieNode { <nl> is > > min_unigram_score_ ; <nl> } <nl> <nl> + void WriteNodeAndChildren ( std : : ostream & os ) const { <nl> + WriteNode ( os ) ; <nl> + for ( int i = 0 ; i < vocab_size_ ; i + + ) { <nl> + if ( children_ [ i ] = = nullptr ) { <nl> + os < < - 1 < < std : : endl ; <nl> + } else { <nl> + / / Recursive call <nl> + children_ [ i ] - > WriteNodeAndChildren ( os ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + static void ReadPrefixAndNode ( std : : istream & is , TrieNode * & obj , int vocab_size ) { <nl> + int prefixCount ; <nl> + is > > prefixCount ; <nl> + <nl> + if ( prefixCount = = - 1 ) { <nl> + / / This is an undefined child <nl> + obj = nullptr ; <nl> + return ; <nl> + } <nl> + <nl> + obj = new TrieNode ( vocab_size ) ; <nl> + obj - > ReadNode ( is , prefixCount ) ; <nl> + for ( int i = 0 ; i < vocab_size ; i + + ) { <nl> + ReadPrefixAndNode ( is , obj - > children_ [ i ] , vocab_size ) ; <nl> + } <nl> + } <nl> } ; <nl> <nl> # endif / / TRIE_NODE_H <nl> mmm a / util / feeding . py <nl> ppp b / util / feeding . py <nl> def _populate_batch_queue ( self , session , coord ) : <nl> source_len = len ( source ) <nl> target = text_to_char_array ( transcript , self . _alphabet ) <nl> target_len = len ( target ) <nl> + if source_len < target_len : <nl> + raise ValueError ( ' Error : Audio file { } is too short for transcription . ' . format ( wav_file ) ) <nl> try : <nl> session . run ( self . _enqueue_op , feed_dict = { self . _model_feeder . ph_x : source , <nl> self . _model_feeder . ph_x_length : source_len , <nl>
Merge pull request from mozilla / error - messages
mozilla/DeepSpeech
41636f031371f8eab8de08bde6311b0ca55cbdd5
2017-11-02T13:32:03Z
mmm a / LICENSE <nl> ppp b / LICENSE <nl> This license applies to all parts of V8 that are not externally <nl> maintained libraries . The externally maintained libraries used by V8 <nl> are : <nl> <nl> - - Jscre , located under third_party / jscre . This code is copyrighted <nl> - by the University of Cambridge and Apple Inc . and released under a <nl> - 2 - clause BSD license . <nl> - <nl> - PCRE test suite , located in test / mjsunit / regexp - pcre . js . This is <nl> based on the test suite from PCRE - 7 . 3 , which is copyrighted by the <nl> - University of Cambridge and Google , Inc . The copyright notive and <nl> + University of Cambridge and Google , Inc . The copyright notice and <nl> license are embedded in regexp - pcre . js . <nl> <nl> - Dtoa , located under third_party / dtoa . This code is copyrighted by <nl> mmm a / SConstruct <nl> ppp b / SConstruct <nl> MKSNAPSHOT_EXTRA_FLAGS = { <nl> } <nl> <nl> <nl> - JSCRE_EXTRA_FLAGS = { <nl> - ' gcc ' : { <nl> - ' all ' : { <nl> - ' CPPDEFINES ' : [ ' SUPPORT_UTF8 ' , ' NO_RECURSE ' , ' SUPPORT_UCP ' ] , <nl> - ' WARNINGFLAGS ' : [ ' - w ' ] <nl> - } , <nl> - } , <nl> - ' msvc ' : { <nl> - ' all ' : { <nl> - ' CPPDEFINES ' : [ ' SUPPORT_UTF8 ' , ' NO_RECURSE ' , ' SUPPORT_UCP ' ] , <nl> - ' WARNINGFLAGS ' : [ ' / W3 ' , ' / WX ' , ' / wd4355 ' , ' / wd4800 ' ] <nl> - } , <nl> - ' library : shared ' : { <nl> - ' CPPDEFINES ' : [ ' BUILDING_V8_SHARED ' ] <nl> - } <nl> - } <nl> - } <nl> - <nl> - <nl> DTOA_EXTRA_FLAGS = { <nl> ' gcc ' : { <nl> ' all ' : { <nl> def BuildSpecific ( env , mode , env_overrides ) : <nl> library_flags = context . AddRelevantFlags ( os . environ , LIBRARY_FLAGS ) <nl> v8_flags = context . AddRelevantFlags ( library_flags , V8_EXTRA_FLAGS ) <nl> mksnapshot_flags = context . AddRelevantFlags ( library_flags , MKSNAPSHOT_EXTRA_FLAGS ) <nl> - jscre_flags = context . AddRelevantFlags ( library_flags , JSCRE_EXTRA_FLAGS ) <nl> dtoa_flags = context . AddRelevantFlags ( library_flags , DTOA_EXTRA_FLAGS ) <nl> cctest_flags = context . AddRelevantFlags ( v8_flags , CCTEST_EXTRA_FLAGS ) <nl> sample_flags = context . AddRelevantFlags ( os . environ , SAMPLE_FLAGS ) <nl> def BuildSpecific ( env , mode , env_overrides ) : <nl> context . flags = { <nl> ' v8 ' : v8_flags , <nl> ' mksnapshot ' : mksnapshot_flags , <nl> - ' jscre ' : jscre_flags , <nl> ' dtoa ' : dtoa_flags , <nl> ' cctest ' : cctest_flags , <nl> ' sample ' : sample_flags , <nl> mmm a / src / SConscript <nl> ppp b / src / SConscript <nl> regexp - delay . js <nl> ' ' ' . split ( ) <nl> <nl> <nl> - JSCRE_FILES = ' ' ' <nl> - pcre_compile . cpp <nl> - pcre_exec . cpp <nl> - pcre_tables . cpp <nl> - pcre_ucp_searchfuncs . cpp <nl> - pcre_xclass . cpp <nl> - ' ' ' . split ( ) <nl> - <nl> - <nl> def Abort ( message ) : <nl> print message <nl> sys . exit ( 1 ) <nl> def ConfigureObjectFiles ( ) : <nl> libraries_src , libraries_empty_src = env . JS2C ( [ ' libraries . cc ' , ' libraries - empty . cc ' ] , library_files , TYPE = ' CORE ' ) <nl> libraries_obj = context . ConfigureObject ( env , libraries_src , CPPPATH = [ ' . ' ] ) <nl> <nl> - # Build JSCRE . <nl> - jscre_env = env . Copy ( ) <nl> - jscre_env . Replace ( * * context . flags [ ' jscre ' ] ) <nl> - jscre_files = [ join ( ' third_party ' , ' jscre ' , s ) for s in JSCRE_FILES ] <nl> - jscre_obj = context . ConfigureObject ( jscre_env , jscre_files ) <nl> - <nl> # Build dtoa . <nl> dtoa_env = env . Copy ( ) <nl> dtoa_env . Replace ( * * context . flags [ ' dtoa ' ] ) <nl> def ConfigureObjectFiles ( ) : <nl> dtoa_obj = context . ConfigureObject ( dtoa_env , dtoa_files ) <nl> <nl> source_objs = context . ConfigureObject ( env , source_files ) <nl> - non_snapshot_files = [ jscre_obj , dtoa_obj , source_objs ] <nl> + non_snapshot_files = [ dtoa_obj , source_objs ] <nl> <nl> # Create snapshot if necessary . <nl> empty_snapshot_obj = context . ConfigureObject ( env , ' snapshot - empty . cc ' ) <nl> mmm a / src / flag - definitions . h <nl> ppp b / src / flag - definitions . h <nl> DEFINE_bool ( usage_computation , true , " compute variable usage counts " ) <nl> DEFINE_bool ( preemption , false , <nl> " activate a 100ms timer that switches between V8 threads " ) <nl> <nl> - / / Irregexp <nl> - DEFINE_bool ( irregexp , true , " new regular expression code " ) <nl> - DEFINE_bool ( trace_regexps , false , " trace Irregexp execution " ) <nl> - DEFINE_bool ( irregexp_native , true , " use native code Irregexp implementation ( IA32 only ) " ) <nl> - DEFINE_bool ( irregexp_optimization , true , " generate optimized regexp code " ) <nl> + / / Regexp <nl> + DEFINE_bool ( trace_regexps , false , " trace regexp execution " ) <nl> + DEFINE_bool ( regexp_native , true , " use native code regexp implementation ( IA32 only ) " ) <nl> + DEFINE_bool ( regexp_optimization , true , " generate optimized regexp code " ) <nl> <nl> / / Testing flags test / cctest / test - { flags , api , serialization } . cc <nl> DEFINE_bool ( testing_bool_flag , true , " testing_bool_flag " ) <nl> DEFINE_bool ( collect_heap_spill_statistics , false , <nl> " report heap spill statistics along with heap_stats " <nl> " ( requires heap_stats ) " ) <nl> <nl> - / / irregexp <nl> - DEFINE_bool ( trace_regexp_bytecodes , false , " trace Irregexp bytecode execution " ) <nl> - DEFINE_bool ( trace_regexp_assembler , false , <nl> - " trace Irregexp macro assembler calls . " ) <nl> + / / Regexp <nl> + DEFINE_bool ( trace_regexp_bytecodes , false , " trace regexp bytecode execution " ) <nl> + DEFINE_bool ( trace_regexp_assembler , <nl> + false , <nl> + " trace regexp macro assembler calls . " ) <nl> <nl> / / <nl> / / Logging and profiling only flags <nl> mmm a / src / jsregexp . cc <nl> ppp b / src / jsregexp . cc <nl> <nl> <nl> # include " interpreter - irregexp . h " <nl> <nl> - / / Including pcre . h undefines DEBUG to avoid getting debug output from <nl> - / / the JSCRE implementation . Make sure to redefine it in debug mode <nl> - / / after having included the header file . <nl> - # ifdef DEBUG <nl> - # include " third_party / jscre / pcre . h " <nl> - # define DEBUG <nl> - # else <nl> - # include " third_party / jscre / pcre . h " <nl> - # endif <nl> - <nl> <nl> namespace v8 { namespace internal { <nl> <nl> <nl> - static Failure * malloc_failure ; <nl> - <nl> - static void * JSREMalloc ( size_t size ) { <nl> - Object * obj = Heap : : AllocateByteArray ( size ) ; <nl> - <nl> - / / If allocation failed , return a NULL pointer to JSRE , and jsRegExpCompile <nl> - / / will return NULL to the caller , performs GC there . <nl> - / / Also pass failure information to the caller . <nl> - if ( obj - > IsFailure ( ) ) { <nl> - malloc_failure = Failure : : cast ( obj ) ; <nl> - return NULL ; <nl> - } <nl> - <nl> - / / Note : object is unrooted , the caller of jsRegExpCompile must <nl> - / / create a handle for the return value before doing heap allocation . <nl> - return reinterpret_cast < void * > ( ByteArray : : cast ( obj ) - > GetDataStartAddress ( ) ) ; <nl> - } <nl> - <nl> - <nl> - static void JSREFree ( void * p ) { <nl> - USE ( p ) ; / / Do nothing , memory is garbage collected . <nl> - } <nl> - <nl> - <nl> String * RegExpImpl : : last_ascii_string_ = NULL ; <nl> String * RegExpImpl : : two_byte_cached_string_ = NULL ; <nl> <nl> Handle < Object > RegExpImpl : : Compile ( Handle < JSRegExp > re , <nl> Vector < const uc16 > atom_pattern = atom - > data ( ) ; <nl> Handle < String > atom_string = Factory : : NewStringFromTwoByte ( atom_pattern ) ; <nl> result = AtomCompile ( re , pattern , flags , atom_string ) ; <nl> - } else if ( FLAG_irregexp ) { <nl> - result = IrregexpPrepare ( re , pattern , flags ) ; <nl> } else { <nl> - result = JscrePrepare ( re , pattern , flags ) ; <nl> + result = IrregexpPrepare ( re , pattern , flags ) ; <nl> } <nl> Object * data = re - > data ( ) ; <nl> if ( data - > IsFixedArray ( ) ) { <nl> Handle < Object > RegExpImpl : : Exec ( Handle < JSRegExp > regexp , <nl> ASSERT ( ! result . is_null ( ) | | Top : : has_pending_exception ( ) ) ; <nl> return result ; <nl> } <nl> - case JSRegExp : : JSCRE : <nl> - return JscreExec ( regexp , subject , index ) ; <nl> default : <nl> UNREACHABLE ( ) ; <nl> return Handle < Object > : : null ( ) ; <nl> Handle < Object > RegExpImpl : : ExecGlobal ( Handle < JSRegExp > regexp , <nl> ASSERT ( ! result . is_null ( ) | | Top : : has_pending_exception ( ) ) ; <nl> return result ; <nl> } <nl> - case JSRegExp : : JSCRE : <nl> - return JscreExecGlobal ( regexp , subject ) ; <nl> default : <nl> UNREACHABLE ( ) ; <nl> return Handle < Object > : : null ( ) ; <nl> Handle < Object > RegExpImpl : : AtomExecGlobal ( Handle < JSRegExp > re , <nl> } <nl> <nl> <nl> - / / JSCRE implementation . <nl> - <nl> - <nl> - int RegExpImpl : : JscreNumberOfCaptures ( Handle < JSRegExp > re ) { <nl> - FixedArray * value = FixedArray : : cast ( re - > DataAt ( JSRegExp : : kJscreDataIndex ) ) ; <nl> - return Smi : : cast ( value - > get ( kJscreNumberOfCapturesIndex ) ) - > value ( ) ; <nl> - } <nl> - <nl> - <nl> - ByteArray * RegExpImpl : : JscreInternal ( Handle < JSRegExp > re ) { <nl> - FixedArray * value = FixedArray : : cast ( re - > DataAt ( JSRegExp : : kJscreDataIndex ) ) ; <nl> - return ByteArray : : cast ( value - > get ( kJscreInternalIndex ) ) ; <nl> - } <nl> - <nl> - <nl> - Handle < Object > RegExpImpl : : JscrePrepare ( Handle < JSRegExp > re , <nl> - Handle < String > pattern , <nl> - JSRegExp : : Flags flags ) { <nl> - Handle < Object > value ( Heap : : undefined_value ( ) ) ; <nl> - Factory : : SetRegExpData ( re , JSRegExp : : JSCRE , pattern , flags , value ) ; <nl> - return re ; <nl> - } <nl> - <nl> - <nl> - static inline Object * JscreDoCompile ( String * pattern , <nl> - JSRegExp : : Flags flags , <nl> - unsigned * number_of_captures , <nl> - const char * * error_message , <nl> - v8 : : jscre : : JscreRegExp * * code ) { <nl> - v8 : : jscre : : JSRegExpIgnoreCaseOption case_option = flags . is_ignore_case ( ) <nl> - ? v8 : : jscre : : JSRegExpIgnoreCase <nl> - : v8 : : jscre : : JSRegExpDoNotIgnoreCase ; <nl> - v8 : : jscre : : JSRegExpMultilineOption multiline_option = flags . is_multiline ( ) <nl> - ? v8 : : jscre : : JSRegExpMultiline <nl> - : v8 : : jscre : : JSRegExpSingleLine ; <nl> - * error_message = NULL ; <nl> - malloc_failure = Failure : : Exception ( ) ; <nl> - * code = v8 : : jscre : : jsRegExpCompile ( pattern - > GetTwoByteData ( ) , <nl> - pattern - > length ( ) , <nl> - case_option , <nl> - multiline_option , <nl> - number_of_captures , <nl> - error_message , <nl> - & JSREMalloc , <nl> - & JSREFree ) ; <nl> - if ( * code = = NULL & & ( malloc_failure - > IsRetryAfterGC ( ) | | <nl> - malloc_failure - > IsOutOfMemoryFailure ( ) ) ) { <nl> - return malloc_failure ; <nl> - } else { <nl> - / / It doesn ' t matter which object we return here , we just need to return <nl> - / / a non - failure to indicate to the GC - retry code that there was no <nl> - / / allocation failure . <nl> - return pattern ; <nl> - } <nl> - } <nl> - <nl> - <nl> - static void JscreCompileWithRetryAfterGC ( Handle < String > pattern , <nl> - JSRegExp : : Flags flags , <nl> - unsigned * number_of_captures , <nl> - const char * * error_message , <nl> - v8 : : jscre : : JscreRegExp * * code ) { <nl> - CALL_HEAP_FUNCTION_VOID ( JscreDoCompile ( * pattern , <nl> - flags , <nl> - number_of_captures , <nl> - error_message , <nl> - code ) ) ; <nl> - } <nl> - <nl> - <nl> - Handle < Object > RegExpImpl : : JscreCompile ( Handle < JSRegExp > re ) { <nl> - ASSERT_EQ ( re - > TypeTag ( ) , JSRegExp : : JSCRE ) ; <nl> - ASSERT ( re - > DataAt ( JSRegExp : : kJscreDataIndex ) - > IsUndefined ( ) ) ; <nl> - <nl> - Handle < String > pattern ( re - > Pattern ( ) ) ; <nl> - JSRegExp : : Flags flags = re - > GetFlags ( ) ; <nl> - <nl> - Handle < String > two_byte_pattern = StringToTwoByte ( pattern ) ; <nl> - <nl> - unsigned number_of_captures ; <nl> - const char * error_message = NULL ; <nl> - <nl> - v8 : : jscre : : JscreRegExp * code = NULL ; <nl> - FlattenString ( pattern ) ; <nl> - <nl> - JscreCompileWithRetryAfterGC ( two_byte_pattern , <nl> - flags , <nl> - & number_of_captures , <nl> - & error_message , <nl> - & code ) ; <nl> - <nl> - if ( code = = NULL ) { <nl> - / / Throw an exception . <nl> - Handle < JSArray > array = Factory : : NewJSArray ( 2 ) ; <nl> - SetElement ( array , 0 , pattern ) ; <nl> - const char * message = <nl> - ( error_message = = NULL ) ? " Unknown regexp error " : error_message ; <nl> - SetElement ( array , 1 , Factory : : NewStringFromUtf8 ( CStrVector ( message ) ) ) ; <nl> - Handle < Object > regexp_err = <nl> - Factory : : NewSyntaxError ( " malformed_regexp " , array ) ; <nl> - Top : : Throw ( * regexp_err ) ; <nl> - return Handle < Object > ( ) ; <nl> - } <nl> - <nl> - / / Convert the return address to a ByteArray pointer . <nl> - Handle < ByteArray > internal ( <nl> - ByteArray : : FromDataStartAddress ( reinterpret_cast < Address > ( code ) ) ) ; <nl> - <nl> - Handle < FixedArray > value = Factory : : NewFixedArray ( kJscreDataLength ) ; <nl> - value - > set ( kJscreNumberOfCapturesIndex , Smi : : FromInt ( number_of_captures ) ) ; <nl> - value - > set ( kJscreInternalIndex , * internal ) ; <nl> - Factory : : SetRegExpData ( re , JSRegExp : : JSCRE , pattern , flags , value ) ; <nl> - <nl> - return re ; <nl> - } <nl> - <nl> - <nl> - Handle < Object > RegExpImpl : : JscreExec ( Handle < JSRegExp > regexp , <nl> - Handle < String > subject , <nl> - Handle < Object > index ) { <nl> - ASSERT_EQ ( regexp - > TypeTag ( ) , JSRegExp : : JSCRE ) ; <nl> - if ( regexp - > DataAt ( JSRegExp : : kJscreDataIndex ) - > IsUndefined ( ) ) { <nl> - Handle < Object > compile_result = JscreCompile ( regexp ) ; <nl> - if ( compile_result . is_null ( ) ) return compile_result ; <nl> - } <nl> - ASSERT ( regexp - > DataAt ( JSRegExp : : kJscreDataIndex ) - > IsFixedArray ( ) ) ; <nl> - <nl> - int num_captures = JscreNumberOfCaptures ( regexp ) ; <nl> - <nl> - OffsetsVector offsets ( ( num_captures + 1 ) * 3 ) ; <nl> - <nl> - int previous_index = static_cast < int > ( DoubleToInteger ( index - > Number ( ) ) ) ; <nl> - <nl> - Handle < String > subject16 = CachedStringToTwoByte ( subject ) ; <nl> - <nl> - return JscreExecOnce ( regexp , <nl> - num_captures , <nl> - subject , <nl> - previous_index , <nl> - subject16 - > GetTwoByteData ( ) , <nl> - offsets . vector ( ) , <nl> - offsets . length ( ) ) ; <nl> - } <nl> - <nl> - <nl> - Handle < Object > RegExpImpl : : JscreExecOnce ( Handle < JSRegExp > regexp , <nl> - int num_captures , <nl> - Handle < String > subject , <nl> - int previous_index , <nl> - const uc16 * two_byte_subject , <nl> - int * offsets_vector , <nl> - int offsets_vector_length ) { <nl> - int rc ; <nl> - { <nl> - AssertNoAllocation a ; <nl> - ByteArray * internal = JscreInternal ( regexp ) ; <nl> - const v8 : : jscre : : JscreRegExp * js_regexp = <nl> - reinterpret_cast < v8 : : jscre : : JscreRegExp * > ( <nl> - internal - > GetDataStartAddress ( ) ) ; <nl> - <nl> - rc = v8 : : jscre : : jsRegExpExecute ( js_regexp , <nl> - two_byte_subject , <nl> - subject - > length ( ) , <nl> - previous_index , <nl> - offsets_vector , <nl> - offsets_vector_length ) ; <nl> - } <nl> - <nl> - / / The KJS JavaScript engine returns null ( ie , a failed match ) when <nl> - / / JSRE ' s internal match limit is exceeded . We duplicate that behavior here . <nl> - if ( rc = = v8 : : jscre : : JSRegExpErrorNoMatch <nl> - | | rc = = v8 : : jscre : : JSRegExpErrorHitLimit ) { <nl> - return Factory : : null_value ( ) ; <nl> - } <nl> - <nl> - / / Other JSRE errors : <nl> - if ( rc < 0 ) { <nl> - / / Throw an exception . <nl> - Handle < Object > code ( Smi : : FromInt ( rc ) ) ; <nl> - Handle < Object > args [ 2 ] = { Factory : : LookupAsciiSymbol ( " jsre_exec " ) , code } ; <nl> - Handle < Object > regexp_err ( <nl> - Factory : : NewTypeError ( " jsre_error " , HandleVector ( args , 2 ) ) ) ; <nl> - return Handle < Object > ( Top : : Throw ( * regexp_err ) ) ; <nl> - } <nl> - <nl> - Handle < FixedArray > array = Factory : : NewFixedArray ( 2 * ( num_captures + 1 ) ) ; <nl> - / / The captures come in ( start , end + 1 ) pairs . <nl> - for ( int i = 0 ; i < 2 * ( num_captures + 1 ) ; i + = 2 ) { <nl> - array - > set ( i , Smi : : FromInt ( offsets_vector [ i ] ) ) ; <nl> - array - > set ( i + 1 , Smi : : FromInt ( offsets_vector [ i + 1 ] ) ) ; <nl> - } <nl> - return Factory : : NewJSArrayWithElements ( array ) ; <nl> - } <nl> - <nl> - <nl> - Handle < Object > RegExpImpl : : JscreExecGlobal ( Handle < JSRegExp > regexp , <nl> - Handle < String > subject ) { <nl> - ASSERT_EQ ( regexp - > TypeTag ( ) , JSRegExp : : JSCRE ) ; <nl> - if ( regexp - > DataAt ( JSRegExp : : kJscreDataIndex ) - > IsUndefined ( ) ) { <nl> - Handle < Object > compile_result = JscreCompile ( regexp ) ; <nl> - if ( compile_result . is_null ( ) ) return compile_result ; <nl> - } <nl> - ASSERT ( regexp - > DataAt ( JSRegExp : : kJscreDataIndex ) - > IsFixedArray ( ) ) ; <nl> - <nl> - / / Prepare space for the return values . <nl> - int num_captures = JscreNumberOfCaptures ( regexp ) ; <nl> - <nl> - OffsetsVector offsets ( ( num_captures + 1 ) * 3 ) ; <nl> - <nl> - int previous_index = 0 ; <nl> - <nl> - Handle < JSArray > result = Factory : : NewJSArray ( 0 ) ; <nl> - int i = 0 ; <nl> - Handle < Object > matches ; <nl> - <nl> - Handle < String > subject16 = CachedStringToTwoByte ( subject ) ; <nl> - <nl> - do { <nl> - if ( previous_index > subject - > length ( ) | | previous_index < 0 ) { <nl> - / / Per ECMA - 262 15 . 10 . 6 . 2 , if the previous index is greater than the <nl> - / / string length , there is no match . <nl> - matches = Factory : : null_value ( ) ; <nl> - } else { <nl> - matches = JscreExecOnce ( regexp , <nl> - num_captures , <nl> - subject , <nl> - previous_index , <nl> - subject16 - > GetTwoByteData ( ) , <nl> - offsets . vector ( ) , <nl> - offsets . length ( ) ) ; <nl> - <nl> - if ( matches - > IsJSArray ( ) ) { <nl> - SetElement ( result , i , matches ) ; <nl> - i + + ; <nl> - previous_index = offsets . vector ( ) [ 1 ] ; <nl> - if ( offsets . vector ( ) [ 0 ] = = offsets . vector ( ) [ 1 ] ) { <nl> - previous_index + + ; <nl> - } <nl> - } <nl> - } <nl> - } while ( matches - > IsJSArray ( ) ) ; <nl> - <nl> - / / If we exited the loop with an exception , throw it . <nl> - if ( matches - > IsNull ( ) ) { <nl> - / / Exited loop normally . <nl> - return result ; <nl> - } else { <nl> - / / Exited loop with the exception in matches . <nl> - return matches ; <nl> - } <nl> - } <nl> - <nl> - <nl> / / Irregexp implementation . <nl> <nl> <nl> RegExpNode : : LimitResult RegExpNode : : LimitVersions ( RegExpCompiler * compiler , <nl> / / We are being asked to make a non - generic version . Keep track of how many <nl> / / non - generic versions we generate so as not to overdo it . <nl> trace_count_ + + ; <nl> - if ( FLAG_irregexp_optimization & & <nl> + if ( FLAG_regexp_optimization & & <nl> trace_count_ < kMaxCopiesCodeGenerated & & <nl> compiler - > recursion_depth ( ) < = RegExpCompiler : : kMaxRecursion ) { <nl> return CONTINUE ; <nl> void ChoiceNode : : Emit ( RegExpCompiler * compiler , Trace * trace ) { <nl> if ( not_at_start_ ) new_trace . set_at_start ( Trace : : FALSE ) ; <nl> alt_gen - > expects_preload = preload_is_current ; <nl> bool generate_full_check_inline = false ; <nl> - if ( FLAG_irregexp_optimization & & <nl> + if ( FLAG_regexp_optimization & & <nl> try_to_emit_quick_check_for_alternative ( i ) & & <nl> alternative . node ( ) - > EmitQuickCheck ( compiler , <nl> & new_trace , <nl> RegExpNode * RegExpQuantifier : : ToNode ( int min , <nl> bool needs_capture_clearing = ! capture_registers . is_empty ( ) ; <nl> if ( body_can_be_empty ) { <nl> body_start_reg = compiler - > AllocateRegister ( ) ; <nl> - } else if ( FLAG_irregexp_optimization & & ! needs_capture_clearing ) { <nl> + } else if ( FLAG_regexp_optimization & & ! needs_capture_clearing ) { <nl> / / Only unroll if there are no captures and the body can ' t be <nl> / / empty . <nl> if ( min > 0 & & min < = kMaxUnrolledMinMatches ) { <nl> Handle < FixedArray > RegExpEngine : : Compile ( RegExpCompileData * data , <nl> <nl> NodeInfo info = * node - > info ( ) ; <nl> <nl> - if ( FLAG_irregexp_native ) { <nl> + if ( FLAG_regexp_native ) { <nl> # ifdef ARM <nl> / / Unimplemented , fall - through to bytecode implementation . <nl> # else / / IA32 <nl> mmm a / src / jsregexp . h <nl> ppp b / src / jsregexp . h <nl> class RegExpImpl { <nl> static Handle < Object > ExecGlobal ( Handle < JSRegExp > regexp , <nl> Handle < String > subject ) ; <nl> <nl> - / / Stores an uncompiled RegExp pattern in the JSRegExp object . <nl> - / / It will be compiled by JSCRE when first executed . <nl> - static Handle < Object > JscrePrepare ( Handle < JSRegExp > re , <nl> - Handle < String > pattern , <nl> - JSRegExp : : Flags flags ) ; <nl> - <nl> / / Prepares a JSRegExp object with Irregexp - specific data . <nl> static Handle < Object > IrregexpPrepare ( Handle < JSRegExp > re , <nl> Handle < String > pattern , <nl> JSRegExp : : Flags flags ) ; <nl> <nl> <nl> - / / Compile the pattern using JSCRE and store the result in the <nl> - / / JSRegExp object . <nl> - static Handle < Object > JscreCompile ( Handle < JSRegExp > re ) ; <nl> - <nl> static Handle < Object > AtomCompile ( Handle < JSRegExp > re , <nl> Handle < String > pattern , <nl> JSRegExp : : Flags flags , <nl> class RegExpImpl { <nl> static Handle < Object > AtomExecGlobal ( Handle < JSRegExp > regexp , <nl> Handle < String > subject ) ; <nl> <nl> - static Handle < Object > JscreCompile ( Handle < JSRegExp > re , <nl> - Handle < String > pattern , <nl> - JSRegExp : : Flags flags ) ; <nl> - <nl> - / / Execute a compiled JSCRE pattern . <nl> - static Handle < Object > JscreExec ( Handle < JSRegExp > regexp , <nl> - Handle < String > subject , <nl> - Handle < Object > index ) ; <nl> - <nl> / / Execute an Irregexp bytecode pattern . <nl> static Handle < Object > IrregexpExec ( Handle < JSRegExp > regexp , <nl> Handle < String > subject , <nl> Handle < Object > index ) ; <nl> <nl> - static Handle < Object > JscreExecGlobal ( Handle < JSRegExp > regexp , <nl> - Handle < String > subject ) ; <nl> - <nl> static Handle < Object > IrregexpExecGlobal ( Handle < JSRegExp > regexp , <nl> Handle < String > subject ) ; <nl> <nl> class RegExpImpl { <nl> static const int kIrregexpCodeIndex = 3 ; <nl> static const int kIrregexpDataLength = 4 ; <nl> <nl> - static const int kJscreNumberOfCapturesIndex = 0 ; <nl> - static const int kJscreInternalIndex = 1 ; <nl> - static const int kJscreDataLength = 2 ; <nl> - <nl> private : <nl> static String * last_ascii_string_ ; <nl> static String * two_byte_cached_string_ ; <nl> <nl> - static int JscreNumberOfCaptures ( Handle < JSRegExp > re ) ; <nl> - static ByteArray * JscreInternal ( Handle < JSRegExp > re ) ; <nl> - <nl> static int IrregexpNumberOfCaptures ( Handle < FixedArray > re ) ; <nl> static int IrregexpNumberOfRegisters ( Handle < FixedArray > re ) ; <nl> static Handle < ByteArray > IrregexpByteCode ( Handle < FixedArray > re ) ; <nl> static Handle < Code > IrregexpNativeCode ( Handle < FixedArray > re ) ; <nl> <nl> - / / Call jsRegExpExecute once <nl> - static Handle < Object > JscreExecOnce ( Handle < JSRegExp > regexp , <nl> - int num_captures , <nl> - Handle < String > subject , <nl> - int previous_index , <nl> - const uc16 * utf8_subject , <nl> - int * ovector , <nl> - int ovector_length ) ; <nl> - <nl> / / On a successful match , the result is a JSArray containing <nl> / / captured positions . On a failure , the result is the null value . <nl> / / Returns an empty handle in case of an exception . <nl> mmm a / src / objects - debug . cc <nl> ppp b / src / objects - debug . cc <nl> void JSRegExp : : JSRegExpVerify ( ) { <nl> ASSERT ( arr - > get ( JSRegExp : : kAtomPatternIndex ) - > IsString ( ) ) ; <nl> break ; <nl> } <nl> - case JSRegExp : : JSCRE : { <nl> - FixedArray * arr = FixedArray : : cast ( data ( ) ) ; <nl> - Object * jscre_data = arr - > get ( JSRegExp : : kJscreDataIndex ) ; <nl> - ASSERT ( jscre_data - > IsFixedArray ( ) | | jscre_data - > IsUndefined ( ) ) ; <nl> - break ; <nl> - } <nl> case JSRegExp : : IRREGEXP : { <nl> FixedArray * arr = FixedArray : : cast ( data ( ) ) ; <nl> - Object * jscre_data = arr - > get ( JSRegExp : : kJscreDataIndex ) ; <nl> - ASSERT ( jscre_data - > IsFixedArray ( ) ) ; <nl> + Object * irregexp_data = arr - > get ( JSRegExp : : kIrregexpDataIndex ) ; <nl> + ASSERT ( irregexp_data - > IsFixedArray ( ) ) ; <nl> break ; <nl> } <nl> default : <nl> mmm a / src / objects . h <nl> ppp b / src / objects . h <nl> class JSRegExp : public JSObject { <nl> public : <nl> / / Meaning of Type : <nl> / / NOT_COMPILED : Initial value . No data has been stored in the JSRegExp yet . <nl> - / / JSCRE : A complex RegExp for JSCRE <nl> / / ATOM : A simple string to match against using an indexOf operation . <nl> / / IRREGEXP : Compiled with Irregexp . <nl> / / IRREGEXP_NATIVE : Compiled to native code with Irregexp . <nl> - enum Type { NOT_COMPILED , JSCRE , ATOM , IRREGEXP } ; <nl> + enum Type { NOT_COMPILED , ATOM , IRREGEXP } ; <nl> enum Flag { NONE = 0 , GLOBAL = 1 , IGNORE_CASE = 2 , MULTILINE = 4 } ; <nl> <nl> class Flags { <nl> class JSRegExp : public JSObject { <nl> static const int kTagIndex = 0 ; <nl> static const int kSourceIndex = kTagIndex + 1 ; <nl> static const int kFlagsIndex = kSourceIndex + 1 ; <nl> - / / These three are the same since the same entry is shared for <nl> + / / These two are the same since the same entry is shared for <nl> / / different purposes in different types of regexps . <nl> static const int kAtomPatternIndex = kFlagsIndex + 1 ; <nl> - static const int kJscreDataIndex = kFlagsIndex + 1 ; <nl> static const int kIrregexpDataIndex = kFlagsIndex + 1 ; <nl> static const int kDataSize = kAtomPatternIndex + 1 ; <nl> } ; <nl> deleted file mode 100644 <nl> index 40547d2b564 . . 00000000000 <nl> mmm a / src / third_party / jscre / ASCIICType . h <nl> ppp / dev / null <nl> <nl> - / * <nl> - * Copyright ( C ) 2007 , 2008 Apple Inc . All rights reserved . <nl> - * <nl> - * Redistribution and use in source and binary forms , with or without <nl> - * modification , are permitted provided that the following conditions <nl> - * are met : <nl> - * <nl> - * 1 . Redistributions of source code must retain the above copyright <nl> - * notice , this list of conditions and the following disclaimer . <nl> - * 2 . Redistributions in binary form must reproduce the above copyright <nl> - * notice , this list of conditions and the following disclaimer in the <nl> - * documentation and / or other materials provided with the distribution . <nl> - * 3 . Neither the name of Apple Computer , Inc . ( " Apple " ) nor the names of <nl> - * its contributors may be used to endorse or promote products derived <nl> - * from this software without specific prior written permission . <nl> - * <nl> - * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS " AS IS " AND ANY <nl> - * EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED <nl> - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE <nl> - * DISCLAIMED . IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY <nl> - * DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES <nl> - * ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; <nl> - * LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND <nl> - * ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> - * ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF <nl> - * THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> - * / <nl> - <nl> - # ifndef WTF_ASCIICType_h <nl> - # define WTF_ASCIICType_h <nl> - <nl> - / / The behavior of many of the functions in the < ctype . h > header is dependent <nl> - / / on the current locale . But in the WebKit project , all uses of those functions <nl> - / / are in code processing something that ' s not locale - specific . These equivalents <nl> - / / for some of the < ctype . h > functions are named more explicitly , not dependent <nl> - / / on the C library locale , and we should also optimize them as needed . <nl> - <nl> - / / All functions return false or leave the character unchanged if passed a character <nl> - / / that is outside the range 0 - 7F . So they can be used on Unicode strings or <nl> - / / characters if the intent is to do processing only if the character is ASCII . <nl> - <nl> - inline bool isASCIIAlpha ( char c ) { return ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - inline bool isASCIIAlpha ( unsigned short c ) { return ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - inline bool isASCIIAlpha ( int c ) { return ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - <nl> - inline bool isASCIIAlphanumeric ( char c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - inline bool isASCIIAlphanumeric ( unsigned short c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - inline bool isASCIIAlphanumeric ( int c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ; } <nl> - <nl> - inline bool isASCIIDigit ( char c ) { return ( c > = ' 0 ' ) & ( c < = ' 9 ' ) ; } <nl> - inline bool isASCIIDigit ( unsigned short c ) { return ( c > = ' 0 ' ) & ( c < = ' 9 ' ) ; } <nl> - inline bool isASCIIDigit ( int c ) { return ( c > = ' 0 ' ) & ( c < = ' 9 ' ) ; } <nl> - <nl> - inline bool isASCIIHexDigit ( char c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' f ' ; } <nl> - inline bool isASCIIHexDigit ( unsigned short c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' f ' ; } <nl> - inline bool isASCIIHexDigit ( int c ) { return c > = ' 0 ' & & c < = ' 9 ' | | ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' f ' ; } <nl> - <nl> - inline bool isASCIILower ( char c ) { return c > = ' a ' & & c < = ' z ' ; } <nl> - inline bool isASCIILower ( unsigned short c ) { return c > = ' a ' & & c < = ' z ' ; } <nl> - inline bool isASCIILower ( int c ) { return c > = ' a ' & & c < = ' z ' ; } <nl> - <nl> - inline bool isASCIIUpper ( char c ) { return c > = ' A ' & & c < = ' Z ' ; } <nl> - inline bool isASCIIUpper ( unsigned short c ) { return c > = ' A ' & & c < = ' Z ' ; } <nl> - inline bool isASCIIUpper ( int c ) { return c > = ' A ' & & c < = ' Z ' ; } <nl> - <nl> - / * <nl> - Statistics from a run of Apple ' s page load test for callers of isASCIISpace : <nl> - <nl> - character count <nl> - mmmmmmmmm mmm - - <nl> - non - spaces 689383 <nl> - 20 space 294720 <nl> - 0A \ n 89059 <nl> - 09 \ t 28320 <nl> - 0D \ r 0 <nl> - 0C \ f 0 <nl> - 0B \ v 0 <nl> - * / <nl> - inline bool isASCIISpace ( char c ) { return c < = ' ' & & ( c = = ' ' | | ( c < = 0xD & & c > = 0x9 ) ) ; } <nl> - inline bool isASCIISpace ( unsigned short c ) { return c < = ' ' & & ( c = = ' ' | | ( c < = 0xD & & c > = 0x9 ) ) ; } <nl> - inline bool isASCIISpace ( int c ) { return c < = ' ' & & ( c = = ' ' | | ( c < = 0xD & & c > = 0x9 ) ) ; } <nl> - <nl> - inline char toASCIILower ( char c ) { return c | ( ( c > = ' A ' & & c < = ' Z ' ) < < 5 ) ; } <nl> - inline unsigned short toASCIILower ( unsigned short c ) { return c | ( ( c > = ' A ' & & c < = ' Z ' ) < < 5 ) ; } <nl> - inline int toASCIILower ( int c ) { return c | ( ( c > = ' A ' & & c < = ' Z ' ) < < 5 ) ; } <nl> - <nl> - inline char toASCIIUpper ( char c ) { return static_cast < char > ( c & ~ ( ( c > = ' a ' & & c < = ' z ' ) < < 5 ) ) ; } <nl> - inline unsigned short toASCIIUpper ( unsigned short c ) { return static_cast < unsigned short > ( c & ~ ( ( c > = ' a ' & & c < = ' z ' ) < < 5 ) ) ; } <nl> - inline int toASCIIUpper ( int c ) { return static_cast < int > ( c & ~ ( ( c > = ' a ' & & c < = ' z ' ) < < 5 ) ) ; } <nl> - <nl> - inline int toASCIIHexValue ( char c ) { ASSERT ( isASCIIHexDigit ( c ) ) ; return c < ' A ' ? c - ' 0 ' : ( c - ' A ' + 10 ) & 0xF ; } <nl> - inline int toASCIIHexValue ( unsigned short c ) { ASSERT ( isASCIIHexDigit ( c ) ) ; return c < ' A ' ? c - ' 0 ' : ( c - ' A ' + 10 ) & 0xF ; } <nl> - inline int toASCIIHexValue ( int c ) { ASSERT ( isASCIIHexDigit ( c ) ) ; return c < ' A ' ? c - ' 0 ' : ( c - ' A ' + 10 ) & 0xF ; } <nl> - <nl> - # endif <nl> deleted file mode 100644 <nl> index dbac2a54834 . . 00000000000 <nl> mmm a / src / third_party / jscre / AUTHORS <nl> ppp / dev / null <nl> <nl> - Originally written by : Philip Hazel <nl> - Email local part : ph10 <nl> - Email domain : cam . ac . uk <nl> - <nl> - University of Cambridge Computing Service , <nl> - Cambridge , England . Phone : + 44 1223 334714 . <nl> - <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge . All rights reserved . <nl> - <nl> - Adapted for JavaScriptCore and WebKit by Apple Inc . <nl> - <nl> - Copyright ( c ) 2005 , 2006 , 2007 Apple Inc . All rights reserved . <nl> deleted file mode 100644 <nl> index 6ffdc24342d . . 00000000000 <nl> mmm a / src / third_party / jscre / COPYING <nl> ppp / dev / null <nl> <nl> - PCRE is a library of functions to support regular expressions whose syntax <nl> - and semantics are as close as possible to those of the Perl 5 language . <nl> - <nl> - This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . <nl> - <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge . All rights reserved . <nl> - <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the name of Apple <nl> - Inc . nor the names of their contributors may be used to endorse or <nl> - promote products derived from this software without specific prior <nl> - written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> deleted file mode 100644 <nl> index 020c45ff883 . . 00000000000 <nl> mmm a / src / third_party / jscre / LICENSE <nl> ppp / dev / null <nl> <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - The following license text is extracted from the header of the file <nl> - ASCIICType . h and applies only to that file . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - <nl> - Copyright ( C ) 2007 , 2008 Apple Inc . All rights reserved . <nl> - <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions <nl> - are met : <nl> - <nl> - 1 . Redistributions of source code must retain the above copyright <nl> - notice , this list of conditions and the following disclaimer . <nl> - 2 . Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - 3 . Neither the name of Apple Computer , Inc . ( " Apple " ) nor the names of <nl> - its contributors may be used to endorse or promote products derived <nl> - from this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS " AS IS " AND ANY <nl> - EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED <nl> - WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE <nl> - DISCLAIMED . IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY <nl> - DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES <nl> - ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; <nl> - LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND <nl> - ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> - ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF <nl> - THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - The following license text is from the file COPYING and applies to the other <nl> - source files in this directory . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - <nl> - PCRE is a library of functions to support regular expressions whose syntax <nl> - and semantics are as close as possible to those of the Perl 5 language . <nl> - <nl> - This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . <nl> - <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge . All rights reserved . <nl> - <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the name of Apple <nl> - Inc . nor the names of their contributors may be used to endorse or <nl> - promote products derived from this software without specific prior <nl> - written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - The following copyright lines are found in individual files other than <nl> - ASCIICType . h <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - <nl> - <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 , 2008 Apple Inc . All rights reserved . <nl> - Copyright ( C ) 2007 Eric Seidel < eric @ webkit . org > <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge . All rights reserved . <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( c ) 2005 , 2006 , 2007 Apple Inc . All rights reserved . <nl> deleted file mode 100644 <nl> index 05914ad4956 . . 00000000000 <nl> mmm a / src / third_party / jscre / config . h <nl> ppp / dev / null <nl> <nl> - <nl> - / * On Unix - like systems config . in is converted by " configure " into config . h . <nl> - Some other environments also support the use of " configure " . PCRE is written in <nl> - Standard C , but there are a few non - standard things it can cope with , allowing <nl> - it to run on SunOS4 and other " close to standard " systems . <nl> - <nl> - On a non - Unix - like system you should just copy this file into config . h , and set <nl> - up the macros the way you need them . You should normally change the definitions <nl> - of HAVE_STRERROR and HAVE_MEMMOVE to 1 . Unfortunately , because of the way <nl> - autoconf works , these cannot be made the defaults . If your system has bcopy ( ) <nl> - and not memmove ( ) , change the definition of HAVE_BCOPY instead of HAVE_MEMMOVE . <nl> - If your system has neither bcopy ( ) nor memmove ( ) , leave them both as 0 ; an <nl> - emulation function will be used . * / <nl> - <nl> - / * If you are compiling for a system that uses EBCDIC instead of ASCII <nl> - character codes , define this macro as 1 . On systems that can use " configure " , <nl> - this can be done via - - enable - ebcdic . * / <nl> - <nl> - # ifndef EBCDIC <nl> - # define EBCDIC 0 <nl> - # endif <nl> - <nl> - / * If you are compiling for a system other than a Unix - like system or Win32 , <nl> - and it needs some magic to be inserted before the definition of a function that <nl> - is exported by the library , define this macro to contain the relevant magic . If <nl> - you do not define this macro , it defaults to " extern " for a C compiler and <nl> - " extern C " for a C + + compiler on non - Win32 systems . This macro apears at the <nl> - start of every exported function that is part of the external API . It does not <nl> - appear on functions that are " external " in the C sense , but which are internal <nl> - to the library . * / <nl> - <nl> - / * # define PCRE_DATA_SCOPE * / <nl> - <nl> - / * Define the following macro to empty if the " const " keyword does not work . * / <nl> - <nl> - # undef const <nl> - <nl> - / * Define the following macro to " unsigned " if < stddef . h > does not define <nl> - size_t . * / <nl> - <nl> - # undef size_t <nl> - <nl> - / * The following two definitions are mainly for the benefit of SunOS4 , which <nl> - does not have the strerror ( ) or memmove ( ) functions that should be present in <nl> - all Standard C libraries . The macros HAVE_STRERROR and HAVE_MEMMOVE should <nl> - normally be defined with the value 1 for other systems , but unfortunately we <nl> - cannot make this the default because " configure " files generated by autoconf <nl> - will only change 0 to 1 ; they won ' t change 1 to 0 if the functions are not <nl> - found . * / <nl> - <nl> - # define HAVE_STRERROR 1 <nl> - # define HAVE_MEMMOVE 1 <nl> - <nl> - / * There are some non - Unix - like systems that don ' t even have bcopy ( ) . If this <nl> - macro is false , an emulation is used . If HAVE_MEMMOVE is set to 1 , the value of <nl> - HAVE_BCOPY is not relevant . * / <nl> - <nl> - # define HAVE_BCOPY 0 <nl> - <nl> - / * The value of NEWLINE determines the newline character . The default is to <nl> - leave it up to the compiler , but some sites want to force a particular value . <nl> - On Unix - like systems , " configure " can be used to override this default . * / <nl> - <nl> - # ifndef NEWLINE <nl> - # define NEWLINE ' \ n ' <nl> - # endif <nl> - <nl> - / * The value of LINK_SIZE determines the number of bytes used to store links as <nl> - offsets within the compiled regex . The default is 2 , which allows for compiled <nl> - patterns up to 64K long . This covers the vast majority of cases . However , PCRE <nl> - can also be compiled to use 3 or 4 bytes instead . This allows for longer <nl> - patterns in extreme cases . On systems that support it , " configure " can be used <nl> - to override this default . * / <nl> - <nl> - # ifndef LINK_SIZE <nl> - # define LINK_SIZE 2 <nl> - # endif <nl> - <nl> - / * When calling PCRE via the POSIX interface , additional working storage is <nl> - required for holding the pointers to capturing substrings because PCRE requires <nl> - three integers per substring , whereas the POSIX interface provides only two . If <nl> - the number of expected substrings is small , the wrapper function uses space on <nl> - the stack , because this is faster than using malloc ( ) for each call . The <nl> - threshold above which the stack is no longer used is defined by POSIX_MALLOC_ <nl> - THRESHOLD . On systems that support it , " configure " can be used to override this <nl> - default . * / <nl> - <nl> - # ifndef POSIX_MALLOC_THRESHOLD <nl> - # define POSIX_MALLOC_THRESHOLD 10 <nl> - # endif <nl> - <nl> - / * PCRE uses recursive function calls to handle backtracking while matching . <nl> - This can sometimes be a problem on systems that have stacks of limited size . <nl> - Define NO_RECURSE to get a version that doesn ' t use recursion in the match ( ) <nl> - function ; instead it creates its own stack by steam using pcre_recurse_malloc ( ) <nl> - to obtain memory from the heap . For more detail , see the comments and other <nl> - stuff just above the match ( ) function . On systems that support it , " configure " <nl> - can be used to set this in the Makefile ( use - - disable - stack - for - recursion ) . * / <nl> - <nl> - / * # define NO_RECURSE * / <nl> - <nl> - / * The value of MATCH_LIMIT determines the default number of times the internal <nl> - match ( ) function can be called during a single execution of pcre_exec ( ) . There <nl> - is a runtime interface for setting a different limit . The limit exists in order <nl> - to catch runaway regular expressions that take for ever to determine that they <nl> - do not match . The default is set very large so that it does not accidentally <nl> - catch legitimate cases . On systems that support it , " configure " can be used to <nl> - override this default default . * / <nl> - <nl> - # ifndef MATCH_LIMIT <nl> - # define MATCH_LIMIT 10000000 <nl> - # endif <nl> - <nl> - / * The above limit applies to all calls of match ( ) , whether or not they <nl> - increase the recursion depth . In some environments it is desirable to limit the <nl> - depth of recursive calls of match ( ) more strictly , in order to restrict the <nl> - maximum amount of stack ( or heap , if NO_RECURSE is defined ) that is used . The <nl> - value of MATCH_LIMIT_RECURSION applies only to recursive calls of match ( ) . To <nl> - have any useful effect , it must be less than the value of MATCH_LIMIT . There is <nl> - a runtime method for setting a different limit . On systems that support it , <nl> - " configure " can be used to override this default default . * / <nl> - <nl> - # ifndef MATCH_LIMIT_RECURSION <nl> - # define MATCH_LIMIT_RECURSION MATCH_LIMIT <nl> - # endif <nl> - <nl> - / * These three limits are parameterized just in case anybody ever wants to <nl> - change them . Care must be taken if they are increased , because they guard <nl> - against integer overflow caused by enormously large patterns . * / <nl> - <nl> - # ifndef MAX_NAME_SIZE <nl> - # define MAX_NAME_SIZE 32 <nl> - # endif <nl> - <nl> - # ifndef MAX_NAME_COUNT <nl> - # define MAX_NAME_COUNT 10000 <nl> - # endif <nl> - <nl> - # ifndef MAX_DUPLENGTH <nl> - # define MAX_DUPLENGTH 30000 <nl> - # endif <nl> - <nl> - / * End * / <nl> deleted file mode 100644 <nl> index 46380f3f4c5 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre . h <nl> ppp / dev / null <nl> <nl> - / * This is the public header file for JavaScriptCore ' s variant of the PCRE <nl> - library . While this library started out as a copy of PCRE , many of the <nl> - features of PCRE have been removed . This library now supports only the <nl> - regular expression features required by the JavaScript language <nl> - specification , and has only the functions needed by JavaScriptCore and the <nl> - rest of WebKit . <nl> - <nl> - Copyright ( c ) 1997 - 2005 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / / FIXME : This file needs to be renamed to JSRegExp . h ; it ' s no longer PCRE . <nl> - <nl> - # ifndef JSRegExp_h <nl> - # define JSRegExp_h <nl> - <nl> - # include " . . / . . / . . / include / v8 . h " <nl> - <nl> - / / JSCRE is very chatty in debug mode , so in order to keep it slient <nl> - / / while still importing v8 . h correctly ( it contains # ifdef DEBUGs ) <nl> - / / we allow DEBUG to be set and undef it manually . <nl> - # undef DEBUG <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - typedef uint16_t UChar ; <nl> - <nl> - struct JSRegExp ; <nl> - typedef struct JSRegExp JscreRegExp ; <nl> - <nl> - enum JSRegExpIgnoreCaseOption { JSRegExpDoNotIgnoreCase , JSRegExpIgnoreCase } ; <nl> - enum JSRegExpMultilineOption { JSRegExpSingleLine , JSRegExpMultiline } ; <nl> - <nl> - / * jsRegExpExecute error codes * / <nl> - const int JSRegExpErrorNoMatch = - 1 ; <nl> - const int JSRegExpErrorHitLimit = - 2 ; <nl> - const int JSRegExpErrorNoMemory = - 3 ; <nl> - const int JSRegExpErrorInternal = - 4 ; <nl> - <nl> - typedef void * malloc_t ( size_t size ) ; <nl> - typedef void free_t ( void * address ) ; <nl> - <nl> - JSRegExp * jsRegExpCompile ( const UChar * pattern , int patternLength , <nl> - JSRegExpIgnoreCaseOption , JSRegExpMultilineOption , <nl> - unsigned * numSubpatterns , const char * * errorMessage , <nl> - malloc_t * allocate_function , free_t * free_function ) ; <nl> - <nl> - int jsRegExpExecute ( const JSRegExp * , <nl> - const UChar * subject , int subjectLength , int startOffset , <nl> - int * offsetsVector , int offsetsVectorLength ) ; <nl> - <nl> - void jsRegExpFree ( JSRegExp * ) ; <nl> - <nl> - } } / / namespace v8 : : jscre <nl> - <nl> - # endif <nl> deleted file mode 100644 <nl> index c5fe2c5ccc0 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_chartables . c <nl> ppp / dev / null <nl> <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Perl - Compatible Regular Expressions * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This file is automatically written by the dftables auxiliary <nl> - program . If you edit it by hand , you might like to edit the Makefile to <nl> - prevent its ever being regenerated . <nl> - <nl> - This file contains the default tables for characters with codes less than <nl> - 128 ( ASCII characters ) . These tables are used when no external tables are <nl> - passed to PCRE . * / <nl> - <nl> - const unsigned char kjs_pcre_default_tables [ 480 ] = { <nl> - <nl> - / * This table is a lower casing table . * / <nl> - <nl> - 0x00 , 0x01 , 0x02 , 0x03 , 0x04 , 0x05 , 0x06 , 0x07 , <nl> - 0x08 , 0x09 , 0x0A , 0x0B , 0x0C , 0x0D , 0x0E , 0x0F , <nl> - 0x10 , 0x11 , 0x12 , 0x13 , 0x14 , 0x15 , 0x16 , 0x17 , <nl> - 0x18 , 0x19 , 0x1A , 0x1B , 0x1C , 0x1D , 0x1E , 0x1F , <nl> - 0x20 , 0x21 , 0x22 , 0x23 , 0x24 , 0x25 , 0x26 , 0x27 , <nl> - 0x28 , 0x29 , 0x2A , 0x2B , 0x2C , 0x2D , 0x2E , 0x2F , <nl> - 0x30 , 0x31 , 0x32 , 0x33 , 0x34 , 0x35 , 0x36 , 0x37 , <nl> - 0x38 , 0x39 , 0x3A , 0x3B , 0x3C , 0x3D , 0x3E , 0x3F , <nl> - 0x40 , 0x61 , 0x62 , 0x63 , 0x64 , 0x65 , 0x66 , 0x67 , <nl> - 0x68 , 0x69 , 0x6A , 0x6B , 0x6C , 0x6D , 0x6E , 0x6F , <nl> - 0x70 , 0x71 , 0x72 , 0x73 , 0x74 , 0x75 , 0x76 , 0x77 , <nl> - 0x78 , 0x79 , 0x7A , 0x5B , 0x5C , 0x5D , 0x5E , 0x5F , <nl> - 0x60 , 0x61 , 0x62 , 0x63 , 0x64 , 0x65 , 0x66 , 0x67 , <nl> - 0x68 , 0x69 , 0x6A , 0x6B , 0x6C , 0x6D , 0x6E , 0x6F , <nl> - 0x70 , 0x71 , 0x72 , 0x73 , 0x74 , 0x75 , 0x76 , 0x77 , <nl> - 0x78 , 0x79 , 0x7A , 0x7B , 0x7C , 0x7D , 0x7E , 0x7F , <nl> - <nl> - / * This table is a case flipping table . * / <nl> - <nl> - 0x00 , 0x01 , 0x02 , 0x03 , 0x04 , 0x05 , 0x06 , 0x07 , <nl> - 0x08 , 0x09 , 0x0A , 0x0B , 0x0C , 0x0D , 0x0E , 0x0F , <nl> - 0x10 , 0x11 , 0x12 , 0x13 , 0x14 , 0x15 , 0x16 , 0x17 , <nl> - 0x18 , 0x19 , 0x1A , 0x1B , 0x1C , 0x1D , 0x1E , 0x1F , <nl> - 0x20 , 0x21 , 0x22 , 0x23 , 0x24 , 0x25 , 0x26 , 0x27 , <nl> - 0x28 , 0x29 , 0x2A , 0x2B , 0x2C , 0x2D , 0x2E , 0x2F , <nl> - 0x30 , 0x31 , 0x32 , 0x33 , 0x34 , 0x35 , 0x36 , 0x37 , <nl> - 0x38 , 0x39 , 0x3A , 0x3B , 0x3C , 0x3D , 0x3E , 0x3F , <nl> - 0x40 , 0x61 , 0x62 , 0x63 , 0x64 , 0x65 , 0x66 , 0x67 , <nl> - 0x68 , 0x69 , 0x6A , 0x6B , 0x6C , 0x6D , 0x6E , 0x6F , <nl> - 0x70 , 0x71 , 0x72 , 0x73 , 0x74 , 0x75 , 0x76 , 0x77 , <nl> - 0x78 , 0x79 , 0x7A , 0x5B , 0x5C , 0x5D , 0x5E , 0x5F , <nl> - 0x60 , 0x41 , 0x42 , 0x43 , 0x44 , 0x45 , 0x46 , 0x47 , <nl> - 0x48 , 0x49 , 0x4A , 0x4B , 0x4C , 0x4D , 0x4E , 0x4F , <nl> - 0x50 , 0x51 , 0x52 , 0x53 , 0x54 , 0x55 , 0x56 , 0x57 , <nl> - 0x58 , 0x59 , 0x5A , 0x7B , 0x7C , 0x7D , 0x7E , 0x7F , <nl> - <nl> - / * This table contains bit maps for various character classes . <nl> - Each map is 32 bytes long and the bits run from the least <nl> - significant end of each byte . The classes are : space , digit , word . * / <nl> - <nl> - 0x00 , 0x3E , 0x00 , 0x00 , 0x01 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0xFF , 0x03 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0xFF , 0x03 , <nl> - 0xFE , 0xFF , 0xFF , 0x87 , 0xFE , 0xFF , 0xFF , 0x07 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , <nl> - <nl> - / * This table identifies various classes of character by individual bits : <nl> - 0x01 white space character <nl> - 0x08 hexadecimal digit <nl> - 0x10 alphanumeric or ' _ ' <nl> - * / <nl> - <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * 0 - 7 * / <nl> - 0x00 , 0x01 , 0x01 , 0x01 , 0x01 , 0x01 , 0x00 , 0x00 , / * 8 - 15 * / <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * 16 - 23 * / <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * 24 - 31 * / <nl> - 0x01 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * - ' * / <nl> - 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * ( - / * / <nl> - 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , / * 0 - 7 * / <nl> - 0x18 , 0x18 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , / * 8 - ? * / <nl> - 0x00 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x10 , / * @ - G * / <nl> - 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , / * H - O * / <nl> - 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , / * P - W * / <nl> - 0x10 , 0x10 , 0x10 , 0x00 , 0x00 , 0x00 , 0x00 , 0x10 , / * X - _ * / <nl> - 0x00 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x18 , 0x10 , / * ` - g * / <nl> - 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , / * h - o * / <nl> - 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , 0x10 , / * p - w * / <nl> - 0x10 , 0x10 , 0x10 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 } ; / * x - 127 * / <nl> - <nl> - <nl> - / * End of chartables . c * / <nl> deleted file mode 100644 <nl> index 40f55ad79ae . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_compile . cpp <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - Copyright ( C ) 2007 Eric Seidel < eric @ webkit . org > <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * This module contains the external function jsRegExpExecute ( ) , along with <nl> - supporting internal functions that are not used by other modules . * / <nl> - <nl> - # include " config . h " <nl> - <nl> - # include " pcre_internal . h " <nl> - <nl> - # include < string . h > <nl> - # include " ASCIICType . h " <nl> - <nl> - / * Negative values for the firstchar and reqchar variables * / <nl> - <nl> - # define REQ_UNSET ( - 2 ) <nl> - # define REQ_NONE ( - 1 ) <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Code parameters and static tables * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Maximum number of items on the nested bracket stacks at compile time . This <nl> - applies to the nesting of all kinds of parentheses . It does not limit <nl> - un - nested , non - capturing parentheses . This number can be made bigger if <nl> - necessary - it is used to dimension one int and one unsigned char vector at <nl> - compile time . * / <nl> - <nl> - # define BRASTACK_SIZE 200 <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - / * Table for handling escaped characters in the range ' 0 ' - ' z ' . Positive returns <nl> - are simple data values ; negative values are for special things like \ d and so <nl> - on . Zero means further processing is needed ( for things like \ x ) , or the escape <nl> - is invalid . * / <nl> - <nl> - static const short escapes [ ] = { <nl> - 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , / * 0 - 7 * / <nl> - 0 , 0 , ' : ' , ' ; ' , ' < ' , ' = ' , ' > ' , ' ? ' , / * 8 - ? * / <nl> - ' @ ' , 0 , - ESC_B , 0 , - ESC_D , 0 , 0 , 0 , / * @ - G * / <nl> - 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , / * H - O * / <nl> - 0 , 0 , 0 , - ESC_S , 0 , 0 , 0 , - ESC_W , / * P - W * / <nl> - 0 , 0 , 0 , ' [ ' , ' \ \ ' , ' ] ' , ' ^ ' , ' _ ' , / * X - _ * / <nl> - ' ` ' , 7 , - ESC_b , 0 , - ESC_d , 0 , ' \ f ' , 0 , / * ` - g * / <nl> - 0 , 0 , 0 , 0 , 0 , 0 , ' \ n ' , 0 , / * h - o * / <nl> - 0 , 0 , ' \ r ' , - ESC_s , ' \ t ' , 0 , ' \ v ' , - ESC_w , / * p - w * / <nl> - 0 , 0 , 0 / * x - z * / <nl> - } ; <nl> - <nl> - / * Error code numbers . They are given names so that they can more easily be <nl> - tracked . * / <nl> - <nl> - enum ErrorCode { <nl> - ERR0 , ERR1 , ERR2 , ERR3 , ERR4 , ERR5 , ERR6 , ERR7 , ERR8 , ERR9 , <nl> - ERR10 , ERR11 , ERR12 , ERR13 , ERR14 , ERR15 , ERR16 , ERR17 <nl> - } ; <nl> - <nl> - / * The texts of compile - time error messages . These are " char * " because they <nl> - are passed to the outside world . * / <nl> - <nl> - static const char * errorText ( ErrorCode code ) <nl> - { <nl> - static const char errorTexts [ ] = <nl> - / * 1 * / <nl> - " \ \ at end of pattern \ 0 " <nl> - " \ \ c at end of pattern \ 0 " <nl> - " character value in \ \ x { . . . } sequence is too large \ 0 " <nl> - " numbers out of order in { } quantifier \ 0 " <nl> - / * 5 * / <nl> - " number too big in { } quantifier \ 0 " <nl> - " missing terminating ] for character class \ 0 " <nl> - " internal error : code overflow \ 0 " <nl> - " range out of order in character class \ 0 " <nl> - " nothing to repeat \ 0 " <nl> - / * 10 * / <nl> - " unmatched parentheses \ 0 " <nl> - " internal error : unexpected repeat \ 0 " <nl> - " unrecognized character after ( ? \ 0 " <nl> - " failed to get memory \ 0 " <nl> - " missing ) \ 0 " <nl> - / * 15 * / <nl> - " reference to non - existent subpattern \ 0 " <nl> - " regular expression too large \ 0 " <nl> - " parentheses nested too deeply " <nl> - ; <nl> - <nl> - int i = code ; <nl> - const char * text = errorTexts ; <nl> - while ( i > 1 ) <nl> - i - = ! * text + + ; <nl> - return text ; <nl> - } <nl> - <nl> - / * Structure for passing " static " information around between the functions <nl> - doing the compiling . * / <nl> - <nl> - struct CompileData { <nl> - CompileData ( ) { <nl> - top_backref = 0 ; <nl> - backrefMap = 0 ; <nl> - req_varyopt = 0 ; <nl> - needOuterBracket = false ; <nl> - numCapturingBrackets = 0 ; <nl> - } <nl> - int top_backref ; / * Maximum back reference * / <nl> - unsigned backrefMap ; / * Bitmap of low back refs * / <nl> - int req_varyopt ; / * " After variable item " flag for reqbyte * / <nl> - bool needOuterBracket ; <nl> - int numCapturingBrackets ; <nl> - } ; <nl> - <nl> - / * Definitions to allow mutual recursion * / <nl> - <nl> - static bool compileBracket ( int , int * , unsigned char * * , const UChar * * , const UChar * , ErrorCode * , int , int * , int * , CompileData & ) ; <nl> - static bool bracketIsAnchored ( const unsigned char * code ) ; <nl> - static bool bracketNeedsLineStart ( const unsigned char * code , unsigned captureMap , unsigned backrefMap ) ; <nl> - static int bracketFindFirstAssertedCharacter ( const unsigned char * code , bool inassert ) ; <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Handle escapes * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function is called when a \ has been encountered . It either returns a <nl> - positive value for a simple escape such as \ n , or a negative value which <nl> - encodes one of the more complicated things such as \ d . When UTF - 8 is enabled , <nl> - a positive value greater than 255 may be returned . On entry , ptr is pointing at <nl> - the \ . On exit , it is on the final character of the escape sequence . <nl> - <nl> - Arguments : <nl> - ptrptr points to the pattern position pointer <nl> - errorcodeptr points to the errorcode variable <nl> - bracount number of previous extracting brackets <nl> - options the options bits <nl> - isclass true if inside a character class <nl> - <nl> - Returns : zero or positive = > a data character <nl> - negative = > a special escape sequence <nl> - on error , errorptr is set <nl> - * / <nl> - <nl> - static int checkEscape ( const UChar * * ptrptr , const UChar * patternEnd , ErrorCode * errorcodeptr , int bracount , bool isclass ) <nl> - { <nl> - const UChar * ptr = * ptrptr + 1 ; <nl> - <nl> - / * If backslash is at the end of the pattern , it ' s an error . * / <nl> - if ( ptr = = patternEnd ) { <nl> - * errorcodeptr = ERR1 ; <nl> - * ptrptr = ptr ; <nl> - return 0 ; <nl> - } <nl> - <nl> - int c = * ptr ; <nl> - <nl> - / * Non - alphamerics are literals . For digits or letters , do an initial lookup in <nl> - a table . A non - zero result is something that can be returned immediately . <nl> - Otherwise further processing may be required . * / <nl> - <nl> - if ( c < ' 0 ' | | c > ' z ' ) { / * Not alphameric * / <nl> - } else if ( int escapeValue = escapes [ c - ' 0 ' ] ) { <nl> - c = escapeValue ; <nl> - if ( isclass ) { <nl> - if ( - c = = ESC_b ) <nl> - c = ' \ b ' ; / * \ b is backslash in a class * / <nl> - else if ( - c = = ESC_B ) <nl> - c = ' B ' ; / * and \ B is a capital B in a class ( in browsers event though ECMAScript 15 . 10 . 2 . 19 says it raises an error ) * / <nl> - } <nl> - / * Escapes that need further processing , or are illegal . * / <nl> - <nl> - } else { <nl> - switch ( c ) { <nl> - case ' 1 ' : <nl> - case ' 2 ' : <nl> - case ' 3 ' : <nl> - case ' 4 ' : <nl> - case ' 5 ' : <nl> - case ' 6 ' : <nl> - case ' 7 ' : <nl> - case ' 8 ' : <nl> - case ' 9 ' : <nl> - / * Escape sequences starting with a non - zero digit are backreferences , <nl> - unless there are insufficient brackets , in which case they are octal <nl> - escape sequences . Those sequences end on the first non - octal character <nl> - or when we overflow 0 - 255 , whichever comes first . * / <nl> - <nl> - if ( ! isclass ) { <nl> - const UChar * oldptr = ptr ; <nl> - c - = ' 0 ' ; <nl> - while ( ( ptr + 1 < patternEnd ) & & isASCIIDigit ( ptr [ 1 ] ) & & c < = bracount ) <nl> - c = c * 10 + * ( + + ptr ) - ' 0 ' ; <nl> - if ( c < = bracount ) { <nl> - c = - ( ESC_REF + c ) ; <nl> - break ; <nl> - } <nl> - ptr = oldptr ; / * Put the pointer back and fall through * / <nl> - } <nl> - <nl> - / * Handle an octal number following \ . If the first digit is 8 or 9 , <nl> - this is not octal . * / <nl> - <nl> - if ( ( c = * ptr ) > = ' 8 ' ) <nl> - break ; <nl> - <nl> - / * \ 0 always starts an octal number , but we may drop through to here with a <nl> - larger first octal digit . * / <nl> - <nl> - case ' 0 ' : { <nl> - c - = ' 0 ' ; <nl> - int i ; <nl> - for ( i = 1 ; i < = 2 ; + + i ) { <nl> - if ( ptr + i > = patternEnd | | ptr [ i ] < ' 0 ' | | ptr [ i ] > ' 7 ' ) <nl> - break ; <nl> - int cc = c * 8 + ptr [ i ] - ' 0 ' ; <nl> - if ( cc > 255 ) <nl> - break ; <nl> - c = cc ; <nl> - } <nl> - ptr + = i - 1 ; <nl> - break ; <nl> - } <nl> - <nl> - case ' x ' : { <nl> - c = 0 ; <nl> - int i ; <nl> - for ( i = 1 ; i < = 2 ; + + i ) { <nl> - if ( ptr + i > = patternEnd | | ! isASCIIHexDigit ( ptr [ i ] ) ) { <nl> - c = ' x ' ; <nl> - i = 1 ; <nl> - break ; <nl> - } <nl> - int cc = ptr [ i ] ; <nl> - if ( cc > = ' a ' ) <nl> - cc - = 32 ; / * Convert to upper case * / <nl> - c = c * 16 + cc - ( ( cc < ' A ' ) ? ' 0 ' : ( ' A ' - 10 ) ) ; <nl> - } <nl> - ptr + = i - 1 ; <nl> - break ; <nl> - } <nl> - <nl> - case ' u ' : { <nl> - c = 0 ; <nl> - int i ; <nl> - for ( i = 1 ; i < = 4 ; + + i ) { <nl> - if ( ptr + i > = patternEnd | | ! isASCIIHexDigit ( ptr [ i ] ) ) { <nl> - c = ' u ' ; <nl> - i = 1 ; <nl> - break ; <nl> - } <nl> - int cc = ptr [ i ] ; <nl> - if ( cc > = ' a ' ) <nl> - cc - = 32 ; / * Convert to upper case * / <nl> - c = c * 16 + cc - ( ( cc < ' A ' ) ? ' 0 ' : ( ' A ' - 10 ) ) ; <nl> - } <nl> - ptr + = i - 1 ; <nl> - break ; <nl> - } <nl> - <nl> - case ' c ' : <nl> - if ( + + ptr = = patternEnd ) { <nl> - * errorcodeptr = ERR2 ; <nl> - return 0 ; <nl> - } <nl> - c = * ptr ; <nl> - <nl> - / * A letter is upper - cased ; then the 0x40 bit is flipped . This coding <nl> - is ASCII - specific , but then the whole concept of \ cx is ASCII - specific . * / <nl> - c = toASCIIUpper ( c ) ^ 0x40 ; <nl> - break ; <nl> - } <nl> - } <nl> - <nl> - * ptrptr = ptr ; <nl> - return c ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Check for counted repeat * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function is called when a ' { ' is encountered in a place where it might <nl> - start a quantifier . It looks ahead to see if it really is a quantifier or not . <nl> - It is only a quantifier if it is one of the forms { ddd } { ddd , } or { ddd , ddd } <nl> - where the ddds are digits . <nl> - <nl> - Arguments : <nl> - p pointer to the first char after ' { ' <nl> - <nl> - Returns : true or false <nl> - * / <nl> - <nl> - static bool isCountedRepeat ( const UChar * p , const UChar * patternEnd ) <nl> - { <nl> - if ( p > = patternEnd | | ! isASCIIDigit ( * p ) ) <nl> - return false ; <nl> - p + + ; <nl> - while ( p < patternEnd & & isASCIIDigit ( * p ) ) <nl> - p + + ; <nl> - if ( p < patternEnd & & * p = = ' } ' ) <nl> - return true ; <nl> - <nl> - if ( p > = patternEnd | | * p + + ! = ' , ' ) <nl> - return false ; <nl> - if ( p < patternEnd & & * p = = ' } ' ) <nl> - return true ; <nl> - <nl> - if ( p > = patternEnd | | ! isASCIIDigit ( * p ) ) <nl> - return false ; <nl> - p + + ; <nl> - while ( p < patternEnd & & isASCIIDigit ( * p ) ) <nl> - p + + ; <nl> - <nl> - return ( p < patternEnd & & * p = = ' } ' ) ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Read repeat counts * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Read an item of the form { n , m } and return the values . This is called only <nl> - after isCountedRepeat ( ) has confirmed that a repeat - count quantifier exists , <nl> - so the syntax is guaranteed to be correct , but we need to check the values . <nl> - <nl> - Arguments : <nl> - p pointer to first char after ' { ' <nl> - minp pointer to int for min <nl> - maxp pointer to int for max <nl> - returned as - 1 if no max <nl> - errorcodeptr points to error code variable <nl> - <nl> - Returns : pointer to ' } ' on success ; <nl> - current ptr on error , with errorcodeptr set non - zero <nl> - * / <nl> - <nl> - static const UChar * readRepeatCounts ( const UChar * p , int * minp , int * maxp , ErrorCode * errorcodeptr ) <nl> - { <nl> - int min = 0 ; <nl> - int max = - 1 ; <nl> - <nl> - / * Read the minimum value and do a paranoid check : a negative value indicates <nl> - an integer overflow . * / <nl> - <nl> - while ( isASCIIDigit ( * p ) ) <nl> - min = min * 10 + * p + + - ' 0 ' ; <nl> - if ( min < 0 | | min > 65535 ) { <nl> - * errorcodeptr = ERR5 ; <nl> - return p ; <nl> - } <nl> - <nl> - / * Read the maximum value if there is one , and again do a paranoid on its size . <nl> - Also , max must not be less than min . * / <nl> - <nl> - if ( * p = = ' } ' ) <nl> - max = min ; <nl> - else { <nl> - if ( * ( + + p ) ! = ' } ' ) { <nl> - max = 0 ; <nl> - while ( isASCIIDigit ( * p ) ) <nl> - max = max * 10 + * p + + - ' 0 ' ; <nl> - if ( max < 0 | | max > 65535 ) { <nl> - * errorcodeptr = ERR5 ; <nl> - return p ; <nl> - } <nl> - if ( max < min ) { <nl> - * errorcodeptr = ERR4 ; <nl> - return p ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / * Fill in the required variables , and pass back the pointer to the terminating <nl> - ' } ' . * / <nl> - <nl> - * minp = min ; <nl> - * maxp = max ; <nl> - return p ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Find first significant op code * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This is called by several functions that scan a compiled expression looking <nl> - for a fixed first character , or an anchoring op code etc . It skips over things <nl> - that do not influence this . <nl> - <nl> - Arguments : <nl> - code pointer to the start of the group <nl> - Returns : pointer to the first significant opcode <nl> - * / <nl> - <nl> - static const unsigned char * firstSignificantOpcode ( const unsigned char * code ) <nl> - { <nl> - while ( * code = = OP_BRANUMBER ) <nl> - code + = 3 ; <nl> - return code ; <nl> - } <nl> - <nl> - static const unsigned char * firstSignificantOpcodeSkippingAssertions ( const unsigned char * code ) <nl> - { <nl> - while ( true ) { <nl> - switch ( * code ) { <nl> - case OP_ASSERT_NOT : <nl> - advanceToEndOfBracket ( code ) ; <nl> - code + = 1 + LINK_SIZE ; <nl> - break ; <nl> - case OP_WORD_BOUNDARY : <nl> - case OP_NOT_WORD_BOUNDARY : <nl> - + + code ; <nl> - break ; <nl> - case OP_BRANUMBER : <nl> - code + = 3 ; <nl> - break ; <nl> - default : <nl> - return code ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Get othercase range * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function is passed the start and end of a class range , in UTF - 8 mode <nl> - with UCP support . It searches up the characters , looking for internal ranges of <nl> - characters in the " other " case . Each call returns the next one , updating the <nl> - start address . <nl> - <nl> - Arguments : <nl> - cptr points to starting character value ; updated <nl> - d end value <nl> - ocptr where to put start of othercase range <nl> - odptr where to put end of othercase range <nl> - <nl> - Yield : true when range returned ; false when no more <nl> - * / <nl> - <nl> - static bool getOthercaseRange ( int * cptr , int d , int * ocptr , int * odptr ) <nl> - { <nl> - int c , othercase = 0 ; <nl> - <nl> - for ( c = * cptr ; c < = d ; c + + ) { <nl> - if ( ( othercase = kjs_pcre_ucp_othercase ( c ) ) > = 0 ) <nl> - break ; <nl> - } <nl> - <nl> - if ( c > d ) <nl> - return false ; <nl> - <nl> - * ocptr = othercase ; <nl> - int next = othercase + 1 ; <nl> - <nl> - for ( + + c ; c < = d ; c + + ) { <nl> - if ( kjs_pcre_ucp_othercase ( c ) ! = next ) <nl> - break ; <nl> - next + + ; <nl> - } <nl> - <nl> - * odptr = next - 1 ; <nl> - * cptr = c ; <nl> - <nl> - return true ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Convert character value to UTF - 8 * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function takes an integer value in the range 0 - 0x7fffffff <nl> - and encodes it as a UTF - 8 character in 0 to 6 bytes . <nl> - <nl> - Arguments : <nl> - cvalue the character value <nl> - buffer pointer to buffer for result - at least 6 bytes long <nl> - <nl> - Returns : number of characters placed in the buffer <nl> - * / <nl> - <nl> - static int encodeUTF8 ( int cvalue , unsigned char * buffer ) <nl> - { <nl> - int i ; <nl> - for ( i = 0 ; i < kjs_pcre_utf8_table1_size ; i + + ) <nl> - if ( cvalue < = kjs_pcre_utf8_table1 [ i ] ) <nl> - break ; <nl> - buffer + = i ; <nl> - for ( int j = i ; j > 0 ; j - - ) { <nl> - * buffer - - = 0x80 | ( cvalue & 0x3f ) ; <nl> - cvalue > > = 6 ; <nl> - } <nl> - * buffer = kjs_pcre_utf8_table2 [ i ] | cvalue ; <nl> - return i + 1 ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Compile one branch * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Scan the pattern , compiling it into the code vector . <nl> - <nl> - Arguments : <nl> - options the option bits <nl> - brackets points to number of extracting brackets used <nl> - codeptr points to the pointer to the current code point <nl> - ptrptr points to the current pattern pointer <nl> - errorcodeptr points to error code variable <nl> - firstbyteptr set to initial literal character , or < 0 ( REQ_UNSET , REQ_NONE ) <nl> - reqbyteptr set to the last literal character required , else < 0 <nl> - cd contains pointers to tables etc . <nl> - <nl> - Returns : true on success <nl> - false , with * errorcodeptr set non - zero on error <nl> - * / <nl> - <nl> - static inline bool safelyCheckNextChar ( const UChar * ptr , const UChar * patternEnd , UChar expected ) <nl> - { <nl> - return ( ( ptr + 1 < patternEnd ) & & ptr [ 1 ] = = expected ) ; <nl> - } <nl> - <nl> - static bool <nl> - compileBranch ( int options , int * brackets , unsigned char * * codeptr , <nl> - const UChar * * ptrptr , const UChar * patternEnd , ErrorCode * errorcodeptr , int * firstbyteptr , <nl> - int * reqbyteptr , CompileData & cd ) <nl> - { <nl> - int repeat_type , op_type ; <nl> - int repeat_min = 0 , repeat_max = 0 ; / * To please picky compilers * / <nl> - int bravalue = 0 ; <nl> - int reqvary , tempreqvary ; <nl> - int c ; <nl> - unsigned char * code = * codeptr ; <nl> - unsigned char * tempcode ; <nl> - bool groupsetfirstbyte = false ; <nl> - const UChar * ptr = * ptrptr ; <nl> - const UChar * tempptr ; <nl> - unsigned char * previous = NULL ; <nl> - unsigned char classbits [ 32 ] ; <nl> - <nl> - bool class_utf8 ; <nl> - unsigned char * class_utf8data ; <nl> - unsigned char utf8_char [ 6 ] ; <nl> - <nl> - / * Initialize no first byte , no required byte . REQ_UNSET means " no char <nl> - matching encountered yet " . It gets changed to REQ_NONE if we hit something that <nl> - matches a non - fixed char first char ; reqbyte just remains unset if we never <nl> - find one . <nl> - <nl> - When we hit a repeat whose minimum is zero , we may have to adjust these values <nl> - to take the zero repeat into account . This is implemented by setting them to <nl> - zerofirstbyte and zeroreqbyte when such a repeat is encountered . The individual <nl> - item types that can be repeated set these backoff variables appropriately . * / <nl> - <nl> - int firstbyte = REQ_UNSET ; <nl> - int reqbyte = REQ_UNSET ; <nl> - int zeroreqbyte = REQ_UNSET ; <nl> - int zerofirstbyte = REQ_UNSET ; <nl> - <nl> - / * The variable req_caseopt contains either the REQ_IGNORE_CASE value or zero , <nl> - according to the current setting of the ignores - case flag . REQ_IGNORE_CASE is a bit <nl> - value > 255 . It is added into the firstbyte or reqbyte variables to record the <nl> - case status of the value . This is used only for ASCII characters . * / <nl> - <nl> - int req_caseopt = ( options & IgnoreCaseOption ) ? REQ_IGNORE_CASE : 0 ; <nl> - <nl> - / * Switch on next character until the end of the branch * / <nl> - <nl> - for ( ; ; ptr + + ) { <nl> - bool negate_class ; <nl> - bool should_flip_negation ; / * If a negative special such as \ S is used , we should negate the whole class to properly support Unicode . * / <nl> - int class_charcount ; <nl> - int class_lastchar ; <nl> - int skipbytes ; <nl> - int subreqbyte ; <nl> - int subfirstbyte ; <nl> - int mclength ; <nl> - unsigned char mcbuffer [ 8 ] ; <nl> - <nl> - / * Next byte in the pattern * / <nl> - <nl> - c = ptr < patternEnd ? * ptr : 0 ; <nl> - <nl> - / * Fill in length of a previous callout , except when the next thing is <nl> - a quantifier . * / <nl> - <nl> - bool is_quantifier = c = = ' * ' | | c = = ' + ' | | c = = ' ? ' | | ( c = = ' { ' & & isCountedRepeat ( ptr + 1 , patternEnd ) ) ; <nl> - <nl> - switch ( c ) { <nl> - / * The branch terminates at end of string , | , or ) . * / <nl> - <nl> - case 0 : <nl> - if ( ptr < patternEnd ) <nl> - goto NORMAL_CHAR ; <nl> - / / End of string ; fall through <nl> - case ' | ' : <nl> - case ' ) ' : <nl> - * firstbyteptr = firstbyte ; <nl> - * reqbyteptr = reqbyte ; <nl> - * codeptr = code ; <nl> - * ptrptr = ptr ; <nl> - return true ; <nl> - <nl> - / * Handle single - character metacharacters . In multiline mode , ^ disables <nl> - the setting of any following char as a first character . * / <nl> - <nl> - case ' ^ ' : <nl> - if ( options & MatchAcrossMultipleLinesOption ) { <nl> - if ( firstbyte = = REQ_UNSET ) <nl> - firstbyte = REQ_NONE ; <nl> - * code + + = OP_BOL ; <nl> - } else <nl> - * code + + = OP_CIRC ; <nl> - previous = NULL ; <nl> - break ; <nl> - <nl> - case ' $ ' : <nl> - previous = NULL ; <nl> - if ( options & MatchAcrossMultipleLinesOption ) <nl> - * code + + = OP_EOL ; <nl> - else <nl> - * code + + = OP_DOLL ; <nl> - break ; <nl> - <nl> - / * There can never be a first char if ' . ' is first , whatever happens about <nl> - repeats . The value of reqbyte doesn ' t change either . * / <nl> - <nl> - case ' . ' : <nl> - if ( firstbyte = = REQ_UNSET ) <nl> - firstbyte = REQ_NONE ; <nl> - zerofirstbyte = firstbyte ; <nl> - zeroreqbyte = reqbyte ; <nl> - previous = code ; <nl> - * code + + = OP_NOT_NEWLINE ; <nl> - break ; <nl> - <nl> - / * Character classes . If the included characters are all < 256 , we build a <nl> - 32 - byte bitmap of the permitted characters , except in the special case <nl> - where there is only one such character . For negated classes , we build the <nl> - map as usual , then invert it at the end . However , we use a different opcode <nl> - so that data characters > 255 can be handled correctly . <nl> - <nl> - If the class contains characters outside the 0 - 255 range , a different <nl> - opcode is compiled . It may optionally have a bit map for characters < 256 , <nl> - but those above are are explicitly listed afterwards . A flag byte tells <nl> - whether the bitmap is present , and whether this is a negated class or not . <nl> - * / <nl> - <nl> - case ' [ ' : { <nl> - previous = code ; <nl> - should_flip_negation = false ; <nl> - <nl> - / * PCRE supports POSIX class stuff inside a class . Perl gives an error if <nl> - they are encountered at the top level , so we ' ll do that too . * / <nl> - <nl> - / * If the first character is ' ^ ' , set the negation flag and skip it . * / <nl> - <nl> - if ( ptr + 1 > = patternEnd ) { <nl> - * errorcodeptr = ERR6 ; <nl> - return false ; <nl> - } <nl> - <nl> - if ( ptr [ 1 ] = = ' ^ ' ) { <nl> - negate_class = true ; <nl> - + + ptr ; <nl> - } else <nl> - negate_class = false ; <nl> - <nl> - / * Keep a count of chars with values < 256 so that we can optimize the case <nl> - of just a single character ( as long as it ' s < 256 ) . For higher valued UTF - 8 <nl> - characters , we don ' t yet do any optimization . * / <nl> - <nl> - class_charcount = 0 ; <nl> - class_lastchar = - 1 ; <nl> - <nl> - class_utf8 = false ; / * No chars > = 256 * / <nl> - class_utf8data = code + LINK_SIZE + 34 ; / * For UTF - 8 items * / <nl> - <nl> - / * Initialize the 32 - char bit map to all zeros . We have to build the <nl> - map in a temporary bit of store , in case the class contains only 1 <nl> - character ( < 256 ) , because in that case the compiled code doesn ' t use the <nl> - bit map . * / <nl> - <nl> - memset ( classbits , 0 , 32 * sizeof ( unsigned char ) ) ; <nl> - <nl> - / * Process characters until ] is reached . The first pass <nl> - through the regex checked the overall syntax , so we don ' t need to be very <nl> - strict here . At the start of the loop , c contains the first byte of the <nl> - character . * / <nl> - <nl> - while ( ( + + ptr < patternEnd ) & & ( c = * ptr ) ! = ' ] ' ) { <nl> - / * Backslash may introduce a single character , or it may introduce one <nl> - of the specials , which just set a flag . Escaped items are checked for <nl> - validity in the pre - compiling pass . The sequence \ b is a special case . <nl> - Inside a class ( and only there ) it is treated as backspace . Elsewhere <nl> - it marks a word boundary . Other escapes have preset maps ready to <nl> - or into the one we are building . We assume they have more than one <nl> - character in them , so set class_charcount bigger than one . * / <nl> - <nl> - if ( c = = ' \ \ ' ) { <nl> - c = checkEscape ( & ptr , patternEnd , errorcodeptr , cd . numCapturingBrackets , true ) ; <nl> - if ( c < 0 ) { <nl> - class_charcount + = 2 ; / * Greater than 1 is what matters * / <nl> - switch ( - c ) { <nl> - case ESC_d : <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = classBitmapForChar ( c + cbit_digit ) ; <nl> - continue ; <nl> - <nl> - case ESC_D : <nl> - should_flip_negation = true ; <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = ~ classBitmapForChar ( c + cbit_digit ) ; <nl> - continue ; <nl> - <nl> - case ESC_w : <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = classBitmapForChar ( c + cbit_word ) ; <nl> - continue ; <nl> - <nl> - case ESC_W : <nl> - should_flip_negation = true ; <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = ~ classBitmapForChar ( c + cbit_word ) ; <nl> - continue ; <nl> - <nl> - case ESC_s : <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = classBitmapForChar ( c + cbit_space ) ; <nl> - continue ; <nl> - <nl> - case ESC_S : <nl> - should_flip_negation = true ; <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - classbits [ c ] | = ~ classBitmapForChar ( c + cbit_space ) ; <nl> - continue ; <nl> - <nl> - / * Unrecognized escapes are faulted if PCRE is running in its <nl> - strict mode . By default , for compatibility with Perl , they are <nl> - treated as literals . * / <nl> - <nl> - default : <nl> - c = * ptr ; / * The final character * / <nl> - class_charcount - = 2 ; / * Undo the default count from above * / <nl> - } <nl> - } <nl> - <nl> - / * Fall through if we have a single character ( c > = 0 ) . This may be <nl> - > 256 in UTF - 8 mode . * / <nl> - <nl> - } / * End of backslash handling * / <nl> - <nl> - / * A single character may be followed by ' - ' to form a range . However , <nl> - Perl does not permit ' ] ' to be the end of the range . A ' - ' character <nl> - here is treated as a literal . * / <nl> - <nl> - if ( ( ptr + 2 < patternEnd ) & & ptr [ 1 ] = = ' - ' & & ptr [ 2 ] ! = ' ] ' ) { <nl> - ptr + = 2 ; <nl> - <nl> - int d = * ptr ; <nl> - <nl> - / * The second part of a range can be a single - character escape , but <nl> - not any of the other escapes . Perl 5 . 6 treats a hyphen as a literal <nl> - in such circumstances . * / <nl> - <nl> - if ( d = = ' \ \ ' ) { <nl> - const UChar * oldptr = ptr ; <nl> - d = checkEscape ( & ptr , patternEnd , errorcodeptr , cd . numCapturingBrackets , true ) ; <nl> - <nl> - / * \ X is literal X ; any other special means the ' - ' was literal * / <nl> - if ( d < 0 ) { <nl> - ptr = oldptr - 2 ; <nl> - goto LONE_SINGLE_CHARACTER ; / * A few lines below * / <nl> - } <nl> - } <nl> - <nl> - / * The check that the two values are in the correct order happens in <nl> - the pre - pass . Optimize one - character ranges * / <nl> - <nl> - if ( d = = c ) <nl> - goto LONE_SINGLE_CHARACTER ; / * A few lines below * / <nl> - <nl> - / * In UTF - 8 mode , if the upper limit is > 255 , or > 127 for caseless <nl> - matching , we have to use an XCLASS with extra data items . Caseless <nl> - matching for characters > 127 is available only if UCP support is <nl> - available . * / <nl> - <nl> - if ( ( d > 255 | | ( ( options & IgnoreCaseOption ) & & d > 127 ) ) ) { <nl> - class_utf8 = true ; <nl> - <nl> - / * With UCP support , we can find the other case equivalents of <nl> - the relevant characters . There may be several ranges . Optimize how <nl> - they fit with the basic range . * / <nl> - <nl> - if ( options & IgnoreCaseOption ) { <nl> - int occ , ocd ; <nl> - int cc = c ; <nl> - int origd = d ; <nl> - while ( getOthercaseRange ( & cc , origd , & occ , & ocd ) ) { <nl> - if ( occ > = c & & ocd < = d ) <nl> - continue ; / * Skip embedded ranges * / <nl> - <nl> - if ( occ < c & & ocd > = c - 1 ) / * Extend the basic range * / <nl> - { / * if there is overlap , * / <nl> - c = occ ; / * noting that if occ < c * / <nl> - continue ; / * we can ' t have ocd > d * / <nl> - } / * because a subrange is * / <nl> - if ( ocd > d & & occ < = d + 1 ) / * always shorter than * / <nl> - { / * the basic range . * / <nl> - d = ocd ; <nl> - continue ; <nl> - } <nl> - <nl> - if ( occ = = ocd ) <nl> - * class_utf8data + + = XCL_SINGLE ; <nl> - else { <nl> - * class_utf8data + + = XCL_RANGE ; <nl> - class_utf8data + = encodeUTF8 ( occ , class_utf8data ) ; <nl> - } <nl> - class_utf8data + = encodeUTF8 ( ocd , class_utf8data ) ; <nl> - } <nl> - } <nl> - <nl> - / * Now record the original range , possibly modified for UCP caseless <nl> - overlapping ranges . * / <nl> - <nl> - * class_utf8data + + = XCL_RANGE ; <nl> - class_utf8data + = encodeUTF8 ( c , class_utf8data ) ; <nl> - class_utf8data + = encodeUTF8 ( d , class_utf8data ) ; <nl> - <nl> - / * With UCP support , we are done . Without UCP support , there is no <nl> - caseless matching for UTF - 8 characters > 127 ; we can use the bit map <nl> - for the smaller ones . * / <nl> - <nl> - continue ; / * With next character in the class * / <nl> - } <nl> - <nl> - / * We use the bit map for all cases when not in UTF - 8 mode ; else <nl> - ranges that lie entirely within 0 - 127 when there is UCP support ; else <nl> - for partial ranges without UCP support . * / <nl> - <nl> - for ( ; c < = d ; c + + ) { <nl> - classbits [ c / 8 ] | = ( 1 < < ( c & 7 ) ) ; <nl> - if ( options & IgnoreCaseOption ) { <nl> - int uc = flipCase ( c ) ; <nl> - classbits [ uc / 8 ] | = ( 1 < < ( uc & 7 ) ) ; <nl> - } <nl> - class_charcount + + ; / * in case a one - char range * / <nl> - class_lastchar = c ; <nl> - } <nl> - <nl> - continue ; / * Go get the next char in the class * / <nl> - } <nl> - <nl> - / * Handle a lone single character - we can get here for a normal <nl> - non - escape char , or after \ that introduces a single character or for an <nl> - apparent range that isn ' t . * / <nl> - <nl> - LONE_SINGLE_CHARACTER : <nl> - <nl> - / * Handle a character that cannot go in the bit map * / <nl> - <nl> - if ( ( c > 255 | | ( ( options & IgnoreCaseOption ) & & c > 127 ) ) ) { <nl> - class_utf8 = true ; <nl> - * class_utf8data + + = XCL_SINGLE ; <nl> - class_utf8data + = encodeUTF8 ( c , class_utf8data ) ; <nl> - <nl> - if ( options & IgnoreCaseOption ) { <nl> - int othercase ; <nl> - if ( ( othercase = kjs_pcre_ucp_othercase ( c ) ) > = 0 ) { <nl> - * class_utf8data + + = XCL_SINGLE ; <nl> - class_utf8data + = encodeUTF8 ( othercase , class_utf8data ) ; <nl> - } <nl> - } <nl> - } else { <nl> - / * Handle a single - byte character * / <nl> - classbits [ c / 8 ] | = ( 1 < < ( c & 7 ) ) ; <nl> - if ( options & IgnoreCaseOption ) { <nl> - c = flipCase ( c ) ; <nl> - classbits [ c / 8 ] | = ( 1 < < ( c & 7 ) ) ; <nl> - } <nl> - class_charcount + + ; <nl> - class_lastchar = c ; <nl> - } <nl> - } <nl> - <nl> - / * If class_charcount is 1 , we saw precisely one character whose value is <nl> - less than 256 . In non - UTF - 8 mode we can always optimize . In UTF - 8 mode , we <nl> - can optimize the negative case only if there were no characters > = 128 <nl> - because OP_NOT and the related opcodes like OP_NOTSTAR operate on <nl> - single - bytes only . This is an historical hangover . Maybe one day we can <nl> - tidy these opcodes to handle multi - byte characters . <nl> - <nl> - The optimization throws away the bit map . We turn the item into a <nl> - 1 - character OP_CHAR [ NC ] if it ' s positive , or OP_NOT if it ' s negative . Note <nl> - that OP_NOT does not support multibyte characters . In the positive case , it <nl> - can cause firstbyte to be set . Otherwise , there can be no first char if <nl> - this item is first , whatever repeat count may follow . In the case of <nl> - reqbyte , save the previous value for reinstating . * / <nl> - <nl> - if ( class_charcount = = 1 & & ( ! class_utf8 & & ( ! negate_class | | class_lastchar < 128 ) ) ) { <nl> - zeroreqbyte = reqbyte ; <nl> - <nl> - / * The OP_NOT opcode works on one - byte characters only . * / <nl> - <nl> - if ( negate_class ) { <nl> - if ( firstbyte = = REQ_UNSET ) <nl> - firstbyte = REQ_NONE ; <nl> - zerofirstbyte = firstbyte ; <nl> - * code + + = OP_NOT ; <nl> - * code + + = class_lastchar ; <nl> - break ; <nl> - } <nl> - <nl> - / * For a single , positive character , get the value into c , and <nl> - then we can handle this with the normal one - character code . * / <nl> - <nl> - c = class_lastchar ; <nl> - goto NORMAL_CHAR ; <nl> - } / * End of 1 - char optimization * / <nl> - <nl> - / * The general case - not the one - char optimization . If this is the first <nl> - thing in the branch , there can be no first char setting , whatever the <nl> - repeat count . Any reqbyte setting must remain unchanged after any kind of <nl> - repeat . * / <nl> - <nl> - if ( firstbyte = = REQ_UNSET ) firstbyte = REQ_NONE ; <nl> - zerofirstbyte = firstbyte ; <nl> - zeroreqbyte = reqbyte ; <nl> - <nl> - / * If there are characters with values > 255 , we have to compile an <nl> - extended class , with its own opcode . If there are no characters < 256 , <nl> - we can omit the bitmap . * / <nl> - <nl> - if ( class_utf8 & & ! should_flip_negation ) { <nl> - * class_utf8data + + = XCL_END ; / * Marks the end of extra data * / <nl> - * code + + = OP_XCLASS ; <nl> - code + = LINK_SIZE ; <nl> - * code = negate_class ? XCL_NOT : 0 ; <nl> - <nl> - / * If the map is required , install it , and move on to the end of <nl> - the extra data * / <nl> - <nl> - if ( class_charcount > 0 ) { <nl> - * code + + | = XCL_MAP ; <nl> - memcpy ( code , classbits , 32 ) ; <nl> - code = class_utf8data ; <nl> - } <nl> - <nl> - / * If the map is not required , slide down the extra data . * / <nl> - <nl> - else { <nl> - int len = class_utf8data - ( code + 33 ) ; <nl> - memmove ( code + 1 , code + 33 , len ) ; <nl> - code + = len + 1 ; <nl> - } <nl> - <nl> - / * Now fill in the complete length of the item * / <nl> - <nl> - putLinkValue ( previous + 1 , code - previous ) ; <nl> - break ; / * End of class handling * / <nl> - } <nl> - <nl> - / * If there are no characters > 255 , negate the 32 - byte map if necessary , <nl> - and copy it into the code vector . If this is the first thing in the branch , <nl> - there can be no first char setting , whatever the repeat count . Any reqbyte <nl> - setting must remain unchanged after any kind of repeat . * / <nl> - <nl> - * code + + = ( negate_class = = should_flip_negation ) ? OP_CLASS : OP_NCLASS ; <nl> - if ( negate_class ) <nl> - for ( c = 0 ; c < 32 ; c + + ) <nl> - code [ c ] = ~ classbits [ c ] ; <nl> - else <nl> - memcpy ( code , classbits , 32 ) ; <nl> - code + = 32 ; <nl> - break ; <nl> - } <nl> - <nl> - / * Various kinds of repeat ; ' { ' is not necessarily a quantifier , but this <nl> - has been tested above . * / <nl> - <nl> - case ' { ' : <nl> - if ( ! is_quantifier ) <nl> - goto NORMAL_CHAR ; <nl> - ptr = readRepeatCounts ( ptr + 1 , & repeat_min , & repeat_max , errorcodeptr ) ; <nl> - if ( * errorcodeptr ) <nl> - goto FAILED ; <nl> - goto REPEAT ; <nl> - <nl> - case ' * ' : <nl> - repeat_min = 0 ; <nl> - repeat_max = - 1 ; <nl> - goto REPEAT ; <nl> - <nl> - case ' + ' : <nl> - repeat_min = 1 ; <nl> - repeat_max = - 1 ; <nl> - goto REPEAT ; <nl> - <nl> - case ' ? ' : <nl> - repeat_min = 0 ; <nl> - repeat_max = 1 ; <nl> - <nl> - REPEAT : <nl> - if ( ! previous ) { <nl> - * errorcodeptr = ERR9 ; <nl> - goto FAILED ; <nl> - } <nl> - <nl> - if ( repeat_min = = 0 ) { <nl> - firstbyte = zerofirstbyte ; / * Adjust for zero repeat * / <nl> - reqbyte = zeroreqbyte ; / * Ditto * / <nl> - } <nl> - <nl> - / * Remember whether this is a variable length repeat * / <nl> - <nl> - reqvary = ( repeat_min = = repeat_max ) ? 0 : REQ_VARY ; <nl> - <nl> - op_type = 0 ; / * Default single - char op codes * / <nl> - <nl> - / * Save start of previous item , in case we have to move it up to make space <nl> - for an inserted OP_ONCE for the additional ' + ' extension . * / <nl> - / * FIXME : Probably don ' t need this because we don ' t use OP_ONCE . * / <nl> - <nl> - tempcode = previous ; <nl> - <nl> - / * If the next character is ' + ' , we have a possessive quantifier . This <nl> - implies greediness , whatever the setting of the PCRE_UNGREEDY option . <nl> - If the next character is ' ? ' this is a minimizing repeat , by default , <nl> - but if PCRE_UNGREEDY is set , it works the other way round . We change the <nl> - repeat type to the non - default . * / <nl> - <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' ? ' ) ) { <nl> - repeat_type = 1 ; <nl> - ptr + + ; <nl> - } else <nl> - repeat_type = 0 ; <nl> - <nl> - / * If previous was a character match , abolish the item and generate a <nl> - repeat item instead . If a char item has a minumum of more than one , ensure <nl> - that it is set in reqbyte - it might not be if a sequence such as x { 3 } is <nl> - the first thing in a branch because the x will have gone into firstbyte <nl> - instead . * / <nl> - <nl> - if ( * previous = = OP_CHAR | | * previous = = OP_CHAR_IGNORING_CASE ) { <nl> - / * Deal with UTF - 8 characters that take up more than one byte . It ' s <nl> - easier to write this out separately than try to macrify it . Use c to <nl> - hold the length of the character in bytes , plus 0x80 to flag that it ' s a <nl> - length rather than a small character . * / <nl> - <nl> - if ( code [ - 1 ] & 0x80 ) { <nl> - unsigned char * lastchar = code - 1 ; <nl> - while ( ( * lastchar & 0xc0 ) = = 0x80 ) <nl> - lastchar - - ; <nl> - c = code - lastchar ; / * Length of UTF - 8 character * / <nl> - memcpy ( utf8_char , lastchar , c ) ; / * Save the char * / <nl> - c | = 0x80 ; / * Flag c as a length * / <nl> - } <nl> - else { <nl> - c = code [ - 1 ] ; <nl> - if ( repeat_min > 1 ) <nl> - reqbyte = c | req_caseopt | cd . req_varyopt ; <nl> - } <nl> - <nl> - goto OUTPUT_SINGLE_REPEAT ; / * Code shared with single character types * / <nl> - } <nl> - <nl> - else if ( * previous = = OP_ASCII_CHAR | | * previous = = OP_ASCII_LETTER_IGNORING_CASE ) { <nl> - c = previous [ 1 ] ; <nl> - if ( repeat_min > 1 ) <nl> - reqbyte = c | req_caseopt | cd . req_varyopt ; <nl> - goto OUTPUT_SINGLE_REPEAT ; <nl> - } <nl> - <nl> - / * If previous was a single negated character ( [ ^ a ] or similar ) , we use <nl> - one of the special opcodes , replacing it . The code is shared with single - <nl> - character repeats by setting opt_type to add a suitable offset into <nl> - repeat_type . OP_NOT is currently used only for single - byte chars . * / <nl> - <nl> - else if ( * previous = = OP_NOT ) { <nl> - op_type = OP_NOTSTAR - OP_STAR ; / * Use " not " opcodes * / <nl> - c = previous [ 1 ] ; <nl> - goto OUTPUT_SINGLE_REPEAT ; <nl> - } <nl> - <nl> - / * If previous was a character type match ( \ d or similar ) , abolish it and <nl> - create a suitable repeat item . The code is shared with single - character <nl> - repeats by setting op_type to add a suitable offset into repeat_type . * / <nl> - <nl> - else if ( * previous < = OP_NOT_NEWLINE ) { <nl> - op_type = OP_TYPESTAR - OP_STAR ; / * Use type opcodes * / <nl> - c = * previous ; <nl> - <nl> - OUTPUT_SINGLE_REPEAT : <nl> - int prop_type = - 1 ; <nl> - int prop_value = - 1 ; <nl> - <nl> - unsigned char * oldcode = code ; <nl> - code = previous ; / * Usually overwrite previous item * / <nl> - <nl> - / * If the maximum is zero then the minimum must also be zero ; Perl allows <nl> - this case , so we do too - by simply omitting the item altogether . * / <nl> - <nl> - if ( repeat_max = = 0 ) <nl> - goto END_REPEAT ; <nl> - <nl> - / * Combine the op_type with the repeat_type * / <nl> - <nl> - repeat_type + = op_type ; <nl> - <nl> - / * A minimum of zero is handled either as the special case * or ? , or as <nl> - an UPTO , with the maximum given . * / <nl> - <nl> - if ( repeat_min = = 0 ) { <nl> - if ( repeat_max = = - 1 ) <nl> - * code + + = OP_STAR + repeat_type ; <nl> - else if ( repeat_max = = 1 ) <nl> - * code + + = OP_QUERY + repeat_type ; <nl> - else { <nl> - * code + + = OP_UPTO + repeat_type ; <nl> - put2ByteValueAndAdvance ( code , repeat_max ) ; <nl> - } <nl> - } <nl> - <nl> - / * A repeat minimum of 1 is optimized into some special cases . If the <nl> - maximum is unlimited , we use OP_PLUS . Otherwise , the original item it <nl> - left in place and , if the maximum is greater than 1 , we use OP_UPTO with <nl> - one less than the maximum . * / <nl> - <nl> - else if ( repeat_min = = 1 ) { <nl> - if ( repeat_max = = - 1 ) <nl> - * code + + = OP_PLUS + repeat_type ; <nl> - else { <nl> - code = oldcode ; / * leave previous item in place * / <nl> - if ( repeat_max = = 1 ) <nl> - goto END_REPEAT ; <nl> - * code + + = OP_UPTO + repeat_type ; <nl> - put2ByteValueAndAdvance ( code , repeat_max - 1 ) ; <nl> - } <nl> - } <nl> - <nl> - / * The case { n , n } is just an EXACT , while the general case { n , m } is <nl> - handled as an EXACT followed by an UPTO . * / <nl> - <nl> - else { <nl> - * code + + = OP_EXACT + op_type ; / * NB EXACT doesn ' t have repeat_type * / <nl> - put2ByteValueAndAdvance ( code , repeat_min ) ; <nl> - <nl> - / * If the maximum is unlimited , insert an OP_STAR . Before doing so , <nl> - we have to insert the character for the previous code . For a repeated <nl> - Unicode property match , there are two extra bytes that define the <nl> - required property . In UTF - 8 mode , long characters have their length in <nl> - c , with the 0x80 bit as a flag . * / <nl> - <nl> - if ( repeat_max < 0 ) { <nl> - if ( c > = 128 ) { <nl> - memcpy ( code , utf8_char , c & 7 ) ; <nl> - code + = c & 7 ; <nl> - } else { <nl> - * code + + = c ; <nl> - if ( prop_type > = 0 ) { <nl> - * code + + = prop_type ; <nl> - * code + + = prop_value ; <nl> - } <nl> - } <nl> - * code + + = OP_STAR + repeat_type ; <nl> - } <nl> - <nl> - / * Else insert an UPTO if the max is greater than the min , again <nl> - preceded by the character , for the previously inserted code . * / <nl> - <nl> - else if ( repeat_max ! = repeat_min ) { <nl> - if ( c > = 128 ) { <nl> - memcpy ( code , utf8_char , c & 7 ) ; <nl> - code + = c & 7 ; <nl> - } else <nl> - * code + + = c ; <nl> - if ( prop_type > = 0 ) { <nl> - * code + + = prop_type ; <nl> - * code + + = prop_value ; <nl> - } <nl> - repeat_max - = repeat_min ; <nl> - * code + + = OP_UPTO + repeat_type ; <nl> - put2ByteValueAndAdvance ( code , repeat_max ) ; <nl> - } <nl> - } <nl> - <nl> - / * The character or character type itself comes last in all cases . * / <nl> - <nl> - if ( c > = 128 ) { <nl> - memcpy ( code , utf8_char , c & 7 ) ; <nl> - code + = c & 7 ; <nl> - } else <nl> - * code + + = c ; <nl> - <nl> - / * For a repeated Unicode property match , there are two extra bytes that <nl> - define the required property . * / <nl> - <nl> - if ( prop_type > = 0 ) { <nl> - * code + + = prop_type ; <nl> - * code + + = prop_value ; <nl> - } <nl> - } <nl> - <nl> - / * If previous was a character class or a back reference , we put the repeat <nl> - stuff after it , but just skip the item if the repeat was { 0 , 0 } . * / <nl> - <nl> - else if ( * previous = = OP_CLASS | | <nl> - * previous = = OP_NCLASS | | <nl> - * previous = = OP_XCLASS | | <nl> - * previous = = OP_REF ) <nl> - { <nl> - if ( repeat_max = = 0 ) { <nl> - code = previous ; <nl> - goto END_REPEAT ; <nl> - } <nl> - <nl> - if ( repeat_min = = 0 & & repeat_max = = - 1 ) <nl> - * code + + = OP_CRSTAR + repeat_type ; <nl> - else if ( repeat_min = = 1 & & repeat_max = = - 1 ) <nl> - * code + + = OP_CRPLUS + repeat_type ; <nl> - else if ( repeat_min = = 0 & & repeat_max = = 1 ) <nl> - * code + + = OP_CRQUERY + repeat_type ; <nl> - else { <nl> - * code + + = OP_CRRANGE + repeat_type ; <nl> - put2ByteValueAndAdvance ( code , repeat_min ) ; <nl> - if ( repeat_max = = - 1 ) <nl> - repeat_max = 0 ; / * 2 - byte encoding for max * / <nl> - put2ByteValueAndAdvance ( code , repeat_max ) ; <nl> - } <nl> - } <nl> - <nl> - / * If previous was a bracket group , we may have to replicate it in certain <nl> - cases . * / <nl> - <nl> - else if ( * previous > = OP_BRA ) { <nl> - int ketoffset = 0 ; <nl> - int len = code - previous ; <nl> - unsigned char * bralink = NULL ; <nl> - <nl> - / * If the maximum repeat count is unlimited , find the end of the bracket <nl> - by scanning through from the start , and compute the offset back to it <nl> - from the current code pointer . There may be an OP_OPT setting following <nl> - the final KET , so we can ' t find the end just by going back from the code <nl> - pointer . * / <nl> - <nl> - if ( repeat_max = = - 1 ) { <nl> - const unsigned char * ket = previous ; <nl> - advanceToEndOfBracket ( ket ) ; <nl> - ketoffset = code - ket ; <nl> - } <nl> - <nl> - / * The case of a zero minimum is special because of the need to stick <nl> - OP_BRAZERO in front of it , and because the group appears once in the <nl> - data , whereas in other cases it appears the minimum number of times . For <nl> - this reason , it is simplest to treat this case separately , as otherwise <nl> - the code gets far too messy . There are several special subcases when the <nl> - minimum is zero . * / <nl> - <nl> - if ( repeat_min = = 0 ) { <nl> - / * If the maximum is also zero , we just omit the group from the output <nl> - altogether . * / <nl> - <nl> - if ( repeat_max = = 0 ) { <nl> - code = previous ; <nl> - goto END_REPEAT ; <nl> - } <nl> - <nl> - / * If the maximum is 1 or unlimited , we just have to stick in the <nl> - BRAZERO and do no more at this point . However , we do need to adjust <nl> - any OP_RECURSE calls inside the group that refer to the group itself or <nl> - any internal group , because the offset is from the start of the whole <nl> - regex . Temporarily terminate the pattern while doing this . * / <nl> - <nl> - if ( repeat_max < = 1 ) { <nl> - * code = OP_END ; <nl> - memmove ( previous + 1 , previous , len ) ; <nl> - code + + ; <nl> - * previous + + = OP_BRAZERO + repeat_type ; <nl> - } <nl> - <nl> - / * If the maximum is greater than 1 and limited , we have to replicate <nl> - in a nested fashion , sticking OP_BRAZERO before each set of brackets . <nl> - The first one has to be handled carefully because it ' s the original <nl> - copy , which has to be moved up . The remainder can be handled by code <nl> - that is common with the non - zero minimum case below . We have to <nl> - adjust the value of repeat_max , since one less copy is required . * / <nl> - <nl> - else { <nl> - * code = OP_END ; <nl> - memmove ( previous + 2 + LINK_SIZE , previous , len ) ; <nl> - code + = 2 + LINK_SIZE ; <nl> - * previous + + = OP_BRAZERO + repeat_type ; <nl> - * previous + + = OP_BRA ; <nl> - <nl> - / * We chain together the bracket offset fields that have to be <nl> - filled in later when the ends of the brackets are reached . * / <nl> - <nl> - int offset = ( ! bralink ) ? 0 : previous - bralink ; <nl> - bralink = previous ; <nl> - putLinkValueAllowZeroAndAdvance ( previous , offset ) ; <nl> - } <nl> - <nl> - repeat_max - - ; <nl> - } <nl> - <nl> - / * If the minimum is greater than zero , replicate the group as many <nl> - times as necessary , and adjust the maximum to the number of subsequent <nl> - copies that we need . If we set a first char from the group , and didn ' t <nl> - set a required char , copy the latter from the former . * / <nl> - <nl> - else { <nl> - if ( repeat_min > 1 ) { <nl> - if ( groupsetfirstbyte & & reqbyte < 0 ) <nl> - reqbyte = firstbyte ; <nl> - for ( int i = 1 ; i < repeat_min ; i + + ) { <nl> - memcpy ( code , previous , len ) ; <nl> - code + = len ; <nl> - } <nl> - } <nl> - if ( repeat_max > 0 ) <nl> - repeat_max - = repeat_min ; <nl> - } <nl> - <nl> - / * This code is common to both the zero and non - zero minimum cases . If <nl> - the maximum is limited , it replicates the group in a nested fashion , <nl> - remembering the bracket starts on a stack . In the case of a zero minimum , <nl> - the first one was set up above . In all cases the repeat_max now specifies <nl> - the number of additional copies needed . * / <nl> - <nl> - if ( repeat_max > = 0 ) { <nl> - for ( int i = repeat_max - 1 ; i > = 0 ; i - - ) { <nl> - * code + + = OP_BRAZERO + repeat_type ; <nl> - <nl> - / * All but the final copy start a new nesting , maintaining the <nl> - chain of brackets outstanding . * / <nl> - <nl> - if ( i ! = 0 ) { <nl> - * code + + = OP_BRA ; <nl> - int offset = ( ! bralink ) ? 0 : code - bralink ; <nl> - bralink = code ; <nl> - putLinkValueAllowZeroAndAdvance ( code , offset ) ; <nl> - } <nl> - <nl> - memcpy ( code , previous , len ) ; <nl> - code + = len ; <nl> - } <nl> - <nl> - / * Now chain through the pending brackets , and fill in their length <nl> - fields ( which are holding the chain links pro tem ) . * / <nl> - <nl> - while ( bralink ) { <nl> - int offset = code - bralink + 1 ; <nl> - unsigned char * bra = code - offset ; <nl> - int oldlinkoffset = getLinkValueAllowZero ( bra + 1 ) ; <nl> - bralink = ( ! oldlinkoffset ) ? 0 : bralink - oldlinkoffset ; <nl> - * code + + = OP_KET ; <nl> - putLinkValueAndAdvance ( code , offset ) ; <nl> - putLinkValue ( bra + 1 , offset ) ; <nl> - } <nl> - } <nl> - <nl> - / * If the maximum is unlimited , set a repeater in the final copy . We <nl> - can ' t just offset backwards from the current code point , because we <nl> - don ' t know if there ' s been an options resetting after the ket . The <nl> - correct offset was computed above . * / <nl> - <nl> - else <nl> - code [ - ketoffset ] = OP_KETRMAX + repeat_type ; <nl> - } <nl> - <nl> - / * Else there ' s some kind of shambles * / <nl> - <nl> - else { <nl> - * errorcodeptr = ERR11 ; <nl> - goto FAILED ; <nl> - } <nl> - <nl> - / * In all case we no longer have a previous item . We also set the <nl> - " follows varying string " flag for subsequently encountered reqbytes if <nl> - it isn ' t already set and we have just passed a varying length item . * / <nl> - <nl> - END_REPEAT : <nl> - previous = NULL ; <nl> - cd . req_varyopt | = reqvary ; <nl> - break ; <nl> - <nl> - / * Start of nested bracket sub - expression , or comment or lookahead or <nl> - lookbehind or option setting or condition . First deal with special things <nl> - that can come after a bracket ; all are introduced by ? , and the appearance <nl> - of any of them means that this is not a referencing group . They were <nl> - checked for validity in the first pass over the string , so we don ' t have to <nl> - check for syntax errors here . * / <nl> - <nl> - case ' ( ' : <nl> - skipbytes = 0 ; <nl> - <nl> - if ( * ( + + ptr ) = = ' ? ' ) { <nl> - switch ( * ( + + ptr ) ) { <nl> - case ' : ' : / * Non - extracting bracket * / <nl> - bravalue = OP_BRA ; <nl> - ptr + + ; <nl> - break ; <nl> - <nl> - case ' = ' : / * Positive lookahead * / <nl> - bravalue = OP_ASSERT ; <nl> - ptr + + ; <nl> - break ; <nl> - <nl> - case ' ! ' : / * Negative lookahead * / <nl> - bravalue = OP_ASSERT_NOT ; <nl> - ptr + + ; <nl> - break ; <nl> - <nl> - / * Character after ( ? not specially recognized * / <nl> - <nl> - default : <nl> - * errorcodeptr = ERR12 ; <nl> - goto FAILED ; <nl> - } <nl> - } <nl> - <nl> - / * Else we have a referencing group ; adjust the opcode . If the bracket <nl> - number is greater than EXTRACT_BASIC_MAX , we set the opcode one higher , and <nl> - arrange for the true number to follow later , in an OP_BRANUMBER item . * / <nl> - <nl> - else { <nl> - if ( + + ( * brackets ) > EXTRACT_BASIC_MAX ) { <nl> - bravalue = OP_BRA + EXTRACT_BASIC_MAX + 1 ; <nl> - code [ 1 + LINK_SIZE ] = OP_BRANUMBER ; <nl> - put2ByteValue ( code + 2 + LINK_SIZE , * brackets ) ; <nl> - skipbytes = 3 ; <nl> - } <nl> - else <nl> - bravalue = OP_BRA + * brackets ; <nl> - } <nl> - <nl> - / * Process nested bracketed re . Assertions may not be repeated , but other <nl> - kinds can be . We copy code into a non - variable in order to be able <nl> - to pass its address because some compilers complain otherwise . Pass in a <nl> - new setting for the ims options if they have changed . * / <nl> - <nl> - previous = ( bravalue > = OP_BRAZERO ) ? code : 0 ; <nl> - * code = bravalue ; <nl> - tempcode = code ; <nl> - tempreqvary = cd . req_varyopt ; / * Save value before bracket * / <nl> - <nl> - if ( ! compileBracket ( <nl> - options , <nl> - brackets , / * Extracting bracket count * / <nl> - & tempcode , / * Where to put code ( updated ) * / <nl> - & ptr , / * Input pointer ( updated ) * / <nl> - patternEnd , <nl> - errorcodeptr , / * Where to put an error message * / <nl> - skipbytes , / * Skip over OP_BRANUMBER * / <nl> - & subfirstbyte , / * For possible first char * / <nl> - & subreqbyte , / * For possible last char * / <nl> - cd ) ) / * Tables block * / <nl> - goto FAILED ; <nl> - <nl> - / * At the end of compiling , code is still pointing to the start of the <nl> - group , while tempcode has been updated to point past the end of the group <nl> - and any option resetting that may follow it . The pattern pointer ( ptr ) <nl> - is on the bracket . * / <nl> - <nl> - / * Handle updating of the required and first characters . Update for normal <nl> - brackets of all kinds , and conditions with two branches ( see code above ) . <nl> - If the bracket is followed by a quantifier with zero repeat , we have to <nl> - back off . Hence the definition of zeroreqbyte and zerofirstbyte outside the <nl> - main loop so that they can be accessed for the back off . * / <nl> - <nl> - zeroreqbyte = reqbyte ; <nl> - zerofirstbyte = firstbyte ; <nl> - groupsetfirstbyte = false ; <nl> - <nl> - if ( bravalue > = OP_BRA ) { <nl> - / * If we have not yet set a firstbyte in this branch , take it from the <nl> - subpattern , remembering that it was set here so that a repeat of more <nl> - than one can replicate it as reqbyte if necessary . If the subpattern has <nl> - no firstbyte , set " none " for the whole branch . In both cases , a zero <nl> - repeat forces firstbyte to " none " . * / <nl> - <nl> - if ( firstbyte = = REQ_UNSET ) { <nl> - if ( subfirstbyte > = 0 ) { <nl> - firstbyte = subfirstbyte ; <nl> - groupsetfirstbyte = true ; <nl> - } <nl> - else <nl> - firstbyte = REQ_NONE ; <nl> - zerofirstbyte = REQ_NONE ; <nl> - } <nl> - <nl> - / * If firstbyte was previously set , convert the subpattern ' s firstbyte <nl> - into reqbyte if there wasn ' t one , using the vary flag that was in <nl> - existence beforehand . * / <nl> - <nl> - else if ( subfirstbyte > = 0 & & subreqbyte < 0 ) <nl> - subreqbyte = subfirstbyte | tempreqvary ; <nl> - <nl> - / * If the subpattern set a required byte ( or set a first byte that isn ' t <nl> - really the first byte - see above ) , set it . * / <nl> - <nl> - if ( subreqbyte > = 0 ) <nl> - reqbyte = subreqbyte ; <nl> - } <nl> - <nl> - / * For a forward assertion , we take the reqbyte , if set . This can be <nl> - helpful if the pattern that follows the assertion doesn ' t set a different <nl> - char . For example , it ' s useful for / ( ? = abcde ) . + / . We can ' t set firstbyte <nl> - for an assertion , however because it leads to incorrect effect for patterns <nl> - such as / ( ? = a ) a . + / when the " real " " a " would then become a reqbyte instead <nl> - of a firstbyte . This is overcome by a scan at the end if there ' s no <nl> - firstbyte , looking for an asserted first char . * / <nl> - <nl> - else if ( bravalue = = OP_ASSERT & & subreqbyte > = 0 ) <nl> - reqbyte = subreqbyte ; <nl> - <nl> - / * Now update the main code pointer to the end of the group . * / <nl> - <nl> - code = tempcode ; <nl> - <nl> - / * Error if hit end of pattern * / <nl> - <nl> - if ( ptr > = patternEnd | | * ptr ! = ' ) ' ) { <nl> - * errorcodeptr = ERR14 ; <nl> - goto FAILED ; <nl> - } <nl> - break ; <nl> - <nl> - / * Check \ for being a real metacharacter ; if not , fall through and handle <nl> - it as a data character at the start of a string . Escape items are checked <nl> - for validity in the pre - compiling pass . * / <nl> - <nl> - case ' \ \ ' : <nl> - tempptr = ptr ; <nl> - c = checkEscape ( & ptr , patternEnd , errorcodeptr , cd . numCapturingBrackets , false ) ; <nl> - <nl> - / * Handle metacharacters introduced by \ . For ones like \ d , the ESC_ values <nl> - are arranged to be the negation of the corresponding OP_values . For the <nl> - back references , the values are ESC_REF plus the reference number . Only <nl> - back references and those types that consume a character may be repeated . <nl> - We can test for values between ESC_b and ESC_w for the latter ; this may <nl> - have to change if any new ones are ever created . * / <nl> - <nl> - if ( c < 0 ) { <nl> - / * For metasequences that actually match a character , we disable the <nl> - setting of a first character if it hasn ' t already been set . * / <nl> - <nl> - if ( firstbyte = = REQ_UNSET & & - c > ESC_b & & - c < = ESC_w ) <nl> - firstbyte = REQ_NONE ; <nl> - <nl> - / * Set values to reset to if this is followed by a zero repeat . * / <nl> - <nl> - zerofirstbyte = firstbyte ; <nl> - zeroreqbyte = reqbyte ; <nl> - <nl> - / * Back references are handled specially * / <nl> - <nl> - if ( - c > = ESC_REF ) { <nl> - int number = - c - ESC_REF ; <nl> - previous = code ; <nl> - * code + + = OP_REF ; <nl> - put2ByteValueAndAdvance ( code , number ) ; <nl> - } <nl> - <nl> - / * For the rest , we can obtain the OP value by negating the escape <nl> - value * / <nl> - <nl> - else { <nl> - previous = ( - c > ESC_b & & - c < = ESC_w ) ? code : NULL ; <nl> - * code + + = - c ; <nl> - } <nl> - continue ; <nl> - } <nl> - <nl> - / * Fall through . * / <nl> - <nl> - / * Handle a literal character . It is guaranteed not to be whitespace or # <nl> - when the extended flag is set . If we are in UTF - 8 mode , it may be a <nl> - multi - byte literal character . * / <nl> - <nl> - default : <nl> - NORMAL_CHAR : <nl> - <nl> - previous = code ; <nl> - <nl> - if ( c < 128 ) { <nl> - mclength = 1 ; <nl> - mcbuffer [ 0 ] = c ; <nl> - <nl> - if ( ( options & IgnoreCaseOption ) & & ( c | 0x20 ) > = ' a ' & & ( c | 0x20 ) < = ' z ' ) { <nl> - * code + + = OP_ASCII_LETTER_IGNORING_CASE ; <nl> - * code + + = c | 0x20 ; <nl> - } else { <nl> - * code + + = OP_ASCII_CHAR ; <nl> - * code + + = c ; <nl> - } <nl> - } else { <nl> - mclength = encodeUTF8 ( c , mcbuffer ) ; <nl> - <nl> - * code + + = ( options & IgnoreCaseOption ) ? OP_CHAR_IGNORING_CASE : OP_CHAR ; <nl> - for ( c = 0 ; c < mclength ; c + + ) <nl> - * code + + = mcbuffer [ c ] ; <nl> - } <nl> - <nl> - / * Set the first and required bytes appropriately . If no previous first <nl> - byte , set it from this character , but revert to none on a zero repeat . <nl> - Otherwise , leave the firstbyte value alone , and don ' t change it on a zero <nl> - repeat . * / <nl> - <nl> - if ( firstbyte = = REQ_UNSET ) { <nl> - zerofirstbyte = REQ_NONE ; <nl> - zeroreqbyte = reqbyte ; <nl> - <nl> - / * If the character is more than one byte long , we can set firstbyte <nl> - only if it is not to be matched caselessly . * / <nl> - <nl> - if ( mclength = = 1 | | req_caseopt = = 0 ) { <nl> - firstbyte = mcbuffer [ 0 ] | req_caseopt ; <nl> - if ( mclength ! = 1 ) <nl> - reqbyte = code [ - 1 ] | cd . req_varyopt ; <nl> - } <nl> - else <nl> - firstbyte = reqbyte = REQ_NONE ; <nl> - } <nl> - <nl> - / * firstbyte was previously set ; we can set reqbyte only the length is <nl> - 1 or the matching is caseful . * / <nl> - <nl> - else { <nl> - zerofirstbyte = firstbyte ; <nl> - zeroreqbyte = reqbyte ; <nl> - if ( mclength = = 1 | | req_caseopt = = 0 ) <nl> - reqbyte = code [ - 1 ] | req_caseopt | cd . req_varyopt ; <nl> - } <nl> - <nl> - break ; / * End of literal character handling * / <nl> - } <nl> - } / * end of big loop * / <nl> - <nl> - / * Control never reaches here by falling through , only by a goto for all the <nl> - error states . Pass back the position in the pattern so that it can be displayed <nl> - to the user for diagnosing the error . * / <nl> - <nl> - FAILED : <nl> - * ptrptr = ptr ; <nl> - return false ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Compile sequence of alternatives * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * On entry , ptr is pointing past the bracket character , but on return <nl> - it points to the closing bracket , or vertical bar , or end of string . <nl> - The code variable is pointing at the byte into which the BRA operator has been <nl> - stored . If the ims options are changed at the start ( for a ( ? ims : group ) or <nl> - during any branch , we need to insert an OP_OPT item at the start of every <nl> - following branch to ensure they get set correctly at run time , and also pass <nl> - the new options into every subsequent branch compile . <nl> - <nl> - Argument : <nl> - options option bits , including any changes for this subpattern <nl> - brackets - > int containing the number of extracting brackets used <nl> - codeptr - > the address of the current code pointer <nl> - ptrptr - > the address of the current pattern pointer <nl> - errorcodeptr - > pointer to error code variable <nl> - skipbytes skip this many bytes at start ( for OP_BRANUMBER ) <nl> - firstbyteptr place to put the first required character , or a negative number <nl> - reqbyteptr place to put the last required character , or a negative number <nl> - cd points to the data block with tables pointers etc . <nl> - <nl> - Returns : true on success <nl> - * / <nl> - <nl> - static bool <nl> - compileBracket ( int options , int * brackets , unsigned char * * codeptr , <nl> - const UChar * * ptrptr , const UChar * patternEnd , ErrorCode * errorcodeptr , int skipbytes , <nl> - int * firstbyteptr , int * reqbyteptr , CompileData & cd ) <nl> - { <nl> - const UChar * ptr = * ptrptr ; <nl> - unsigned char * code = * codeptr ; <nl> - unsigned char * last_branch = code ; <nl> - unsigned char * start_bracket = code ; <nl> - int firstbyte = REQ_UNSET ; <nl> - int reqbyte = REQ_UNSET ; <nl> - <nl> - / * Offset is set zero to mark that this bracket is still open * / <nl> - <nl> - putLinkValueAllowZero ( code + 1 , 0 ) ; <nl> - code + = 1 + LINK_SIZE + skipbytes ; <nl> - <nl> - / * Loop for each alternative branch * / <nl> - <nl> - while ( true ) { <nl> - / * Now compile the branch * / <nl> - <nl> - int branchfirstbyte ; <nl> - int branchreqbyte ; <nl> - if ( ! compileBranch ( options , brackets , & code , & ptr , patternEnd , errorcodeptr , <nl> - & branchfirstbyte , & branchreqbyte , cd ) ) { <nl> - * ptrptr = ptr ; <nl> - return false ; <nl> - } <nl> - <nl> - / * If this is the first branch , the firstbyte and reqbyte values for the <nl> - branch become the values for the regex . * / <nl> - <nl> - if ( * last_branch ! = OP_ALT ) { <nl> - firstbyte = branchfirstbyte ; <nl> - reqbyte = branchreqbyte ; <nl> - } <nl> - <nl> - / * If this is not the first branch , the first char and reqbyte have to <nl> - match the values from all the previous branches , except that if the previous <nl> - value for reqbyte didn ' t have REQ_VARY set , it can still match , and we set <nl> - REQ_VARY for the regex . * / <nl> - <nl> - else { <nl> - / * If we previously had a firstbyte , but it doesn ' t match the new branch , <nl> - we have to abandon the firstbyte for the regex , but if there was previously <nl> - no reqbyte , it takes on the value of the old firstbyte . * / <nl> - <nl> - if ( firstbyte > = 0 & & firstbyte ! = branchfirstbyte ) { <nl> - if ( reqbyte < 0 ) <nl> - reqbyte = firstbyte ; <nl> - firstbyte = REQ_NONE ; <nl> - } <nl> - <nl> - / * If we ( now or from before ) have no firstbyte , a firstbyte from the <nl> - branch becomes a reqbyte if there isn ' t a branch reqbyte . * / <nl> - <nl> - if ( firstbyte < 0 & & branchfirstbyte > = 0 & & branchreqbyte < 0 ) <nl> - branchreqbyte = branchfirstbyte ; <nl> - <nl> - / * Now ensure that the reqbytes match * / <nl> - <nl> - if ( ( reqbyte & ~ REQ_VARY ) ! = ( branchreqbyte & ~ REQ_VARY ) ) <nl> - reqbyte = REQ_NONE ; <nl> - else <nl> - reqbyte | = branchreqbyte ; / * To " or " REQ_VARY * / <nl> - } <nl> - <nl> - / * Reached end of expression , either ' ) ' or end of pattern . Go back through <nl> - the alternative branches and reverse the chain of offsets , with the field in <nl> - the BRA item now becoming an offset to the first alternative . If there are <nl> - no alternatives , it points to the end of the group . The length in the <nl> - terminating ket is always the length of the whole bracketed item . If any of <nl> - the ims options were changed inside the group , compile a resetting op - code <nl> - following , except at the very end of the pattern . Return leaving the pointer <nl> - at the terminating char . * / <nl> - <nl> - if ( ptr > = patternEnd | | * ptr ! = ' | ' ) { <nl> - int length = code - last_branch ; <nl> - do { <nl> - int prev_length = getLinkValueAllowZero ( last_branch + 1 ) ; <nl> - putLinkValue ( last_branch + 1 , length ) ; <nl> - length = prev_length ; <nl> - last_branch - = length ; <nl> - } while ( length > 0 ) ; <nl> - <nl> - / * Fill in the ket * / <nl> - <nl> - * code = OP_KET ; <nl> - putLinkValue ( code + 1 , code - start_bracket ) ; <nl> - code + = 1 + LINK_SIZE ; <nl> - <nl> - / * Set values to pass back * / <nl> - <nl> - * codeptr = code ; <nl> - * ptrptr = ptr ; <nl> - * firstbyteptr = firstbyte ; <nl> - * reqbyteptr = reqbyte ; <nl> - return true ; <nl> - } <nl> - <nl> - / * Another branch follows ; insert an " or " node . Its length field points back <nl> - to the previous branch while the bracket remains open . At the end the chain <nl> - is reversed . It ' s done like this so that the start of the bracket has a <nl> - zero offset until it is closed , making it possible to detect recursion . * / <nl> - <nl> - * code = OP_ALT ; <nl> - putLinkValue ( code + 1 , code - last_branch ) ; <nl> - last_branch = code ; <nl> - code + = 1 + LINK_SIZE ; <nl> - ptr + + ; <nl> - } <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Check for anchored expression * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Try to find out if this is an anchored regular expression . Consider each <nl> - alternative branch . If they all start OP_CIRC , or with a bracket <nl> - all of whose alternatives start OP_CIRC ( recurse ad lib ) , then <nl> - it ' s anchored . <nl> - <nl> - Arguments : <nl> - code points to start of expression ( the bracket ) <nl> - captureMap a bitmap of which brackets we are inside while testing ; this <nl> - handles up to substring 31 ; all brackets after that share <nl> - the zero bit <nl> - backrefMap the back reference bitmap <nl> - * / <nl> - <nl> - static bool branchIsAnchored ( const unsigned char * code ) <nl> - { <nl> - const unsigned char * scode = firstSignificantOpcode ( code ) ; <nl> - int op = * scode ; <nl> - <nl> - / * Brackets * / <nl> - if ( op > = OP_BRA | | op = = OP_ASSERT ) <nl> - return bracketIsAnchored ( scode ) ; <nl> - <nl> - / * Check for explicit anchoring * / <nl> - return op = = OP_CIRC ; <nl> - } <nl> - <nl> - static bool bracketIsAnchored ( const unsigned char * code ) <nl> - { <nl> - do { <nl> - if ( ! branchIsAnchored ( code + 1 + LINK_SIZE ) ) <nl> - return false ; <nl> - code + = getLinkValue ( code + 1 ) ; <nl> - } while ( * code = = OP_ALT ) ; / * Loop for each alternative * / <nl> - return true ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Check for starting with ^ or . * * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This is called to find out if every branch starts with ^ or . * so that <nl> - " first char " processing can be done to speed things up in multiline <nl> - matching and for non - DOTALL patterns that start with . * ( which must start at <nl> - the beginning or after \ n ) <nl> - <nl> - Except when the . * appears inside capturing parentheses , and there is a <nl> - subsequent back reference to those parentheses . By keeping a bitmap of the <nl> - first 31 back references , we can catch some of the more common cases more <nl> - precisely ; all the greater back references share a single bit . <nl> - <nl> - Arguments : <nl> - code points to start of expression ( the bracket ) <nl> - captureMap a bitmap of which brackets we are inside while testing ; this <nl> - handles up to substring 31 ; all brackets after that share <nl> - the zero bit <nl> - backrefMap the back reference bitmap <nl> - * / <nl> - <nl> - static bool branchNeedsLineStart ( const unsigned char * code , unsigned captureMap , unsigned backrefMap ) <nl> - { <nl> - const unsigned char * scode = firstSignificantOpcode ( code ) ; <nl> - int op = * scode ; <nl> - <nl> - / * Capturing brackets * / <nl> - if ( op > OP_BRA ) { <nl> - int captureNum = op - OP_BRA ; <nl> - if ( captureNum > EXTRACT_BASIC_MAX ) <nl> - captureNum = get2ByteValue ( scode + 2 + LINK_SIZE ) ; <nl> - int bracketMask = ( captureNum < 32 ) ? ( 1 < < captureNum ) : 1 ; <nl> - return bracketNeedsLineStart ( scode , captureMap | bracketMask , backrefMap ) ; <nl> - } <nl> - <nl> - / * Other brackets * / <nl> - if ( op = = OP_BRA | | op = = OP_ASSERT ) <nl> - return bracketNeedsLineStart ( scode , captureMap , backrefMap ) ; <nl> - <nl> - / * . * means " start at start or after \ n " if it isn ' t in brackets that <nl> - may be referenced . * / <nl> - <nl> - if ( op = = OP_TYPESTAR | | op = = OP_TYPEMINSTAR ) <nl> - return scode [ 1 ] = = OP_NOT_NEWLINE & & ! ( captureMap & backrefMap ) ; <nl> - <nl> - / * Explicit ^ * / <nl> - return op = = OP_CIRC | | op = = OP_BOL ; <nl> - } <nl> - <nl> - static bool bracketNeedsLineStart ( const unsigned char * code , unsigned captureMap , unsigned backrefMap ) <nl> - { <nl> - do { <nl> - if ( ! branchNeedsLineStart ( code + 1 + LINK_SIZE , captureMap , backrefMap ) ) <nl> - return false ; <nl> - code + = getLinkValue ( code + 1 ) ; <nl> - } while ( * code = = OP_ALT ) ; / * Loop for each alternative * / <nl> - return true ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Check for asserted fixed first char * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * During compilation , the " first char " settings from forward assertions are <nl> - discarded , because they can cause conflicts with actual literals that follow . <nl> - However , if we end up without a first char setting for an unanchored pattern , <nl> - it is worth scanning the regex to see if there is an initial asserted first <nl> - char . If all branches start with the same asserted char , or with a bracket all <nl> - of whose alternatives start with the same asserted char ( recurse ad lib ) , then <nl> - we return that char , otherwise - 1 . <nl> - <nl> - Arguments : <nl> - code points to start of expression ( the bracket ) <nl> - options pointer to the options ( used to check casing changes ) <nl> - inassert true if in an assertion <nl> - <nl> - Returns : - 1 or the fixed first char <nl> - * / <nl> - <nl> - static int branchFindFirstAssertedCharacter ( const unsigned char * code , bool inassert ) <nl> - { <nl> - const unsigned char * scode = firstSignificantOpcodeSkippingAssertions ( code ) ; <nl> - int op = * scode ; <nl> - <nl> - if ( op > = OP_BRA ) <nl> - op = OP_BRA ; <nl> - <nl> - switch ( op ) { <nl> - default : <nl> - return - 1 ; <nl> - <nl> - case OP_BRA : <nl> - case OP_ASSERT : <nl> - return bracketFindFirstAssertedCharacter ( scode , op = = OP_ASSERT ) ; <nl> - <nl> - case OP_EXACT : <nl> - scode + = 2 ; <nl> - / * Fall through * / <nl> - <nl> - case OP_CHAR : <nl> - case OP_CHAR_IGNORING_CASE : <nl> - case OP_ASCII_CHAR : <nl> - case OP_ASCII_LETTER_IGNORING_CASE : <nl> - case OP_PLUS : <nl> - case OP_MINPLUS : <nl> - if ( ! inassert ) <nl> - return - 1 ; <nl> - return scode [ 1 ] ; <nl> - } <nl> - } <nl> - <nl> - static int bracketFindFirstAssertedCharacter ( const unsigned char * code , bool inassert ) <nl> - { <nl> - int c = - 1 ; <nl> - do { <nl> - int d = branchFindFirstAssertedCharacter ( code + 1 + LINK_SIZE , inassert ) ; <nl> - if ( d < 0 ) <nl> - return - 1 ; <nl> - if ( c < 0 ) <nl> - c = d ; <nl> - else if ( c ! = d ) <nl> - return - 1 ; <nl> - code + = getLinkValue ( code + 1 ) ; <nl> - } while ( * code = = OP_ALT ) ; <nl> - return c ; <nl> - } <nl> - <nl> - static inline int multiplyWithOverflowCheck ( int a , int b ) <nl> - { <nl> - if ( ! a | | ! b ) <nl> - return 0 ; <nl> - if ( a > MAX_PATTERN_SIZE / b ) <nl> - return - 1 ; <nl> - return a * b ; <nl> - } <nl> - <nl> - static int calculateCompiledPatternLength ( const UChar * pattern , int patternLength , JSRegExpIgnoreCaseOption ignoreCase , <nl> - CompileData & cd , ErrorCode & errorcode ) <nl> - { <nl> - / * Make a pass over the pattern to compute the <nl> - amount of store required to hold the compiled code . This does not have to be <nl> - perfect as long as errors are overestimates . * / <nl> - <nl> - if ( patternLength > MAX_PATTERN_SIZE ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - <nl> - int length = 1 + LINK_SIZE ; / * For initial BRA plus length * / <nl> - int branch_extra = 0 ; <nl> - int lastitemlength = 0 ; <nl> - unsigned brastackptr = 0 ; <nl> - int brastack [ BRASTACK_SIZE ] ; <nl> - unsigned char bralenstack [ BRASTACK_SIZE ] ; <nl> - int bracount = 0 ; <nl> - <nl> - const UChar * ptr = ( const UChar * ) ( pattern - 1 ) ; <nl> - const UChar * patternEnd = ( const UChar * ) ( pattern + patternLength ) ; <nl> - <nl> - while ( + + ptr < patternEnd ) { <nl> - int minRepeats = 0 , maxRepeats = 0 ; <nl> - int c = * ptr ; <nl> - <nl> - switch ( c ) { <nl> - / * A backslashed item may be an escaped data character or it may be a <nl> - character type . * / <nl> - <nl> - case ' \ \ ' : <nl> - c = checkEscape ( & ptr , patternEnd , & errorcode , cd . numCapturingBrackets , false ) ; <nl> - if ( errorcode ! = 0 ) <nl> - return - 1 ; <nl> - <nl> - lastitemlength = 1 ; / * Default length of last item for repeats * / <nl> - <nl> - if ( c > = 0 ) { / * Data character * / <nl> - length + = 2 ; / * For a one - byte character * / <nl> - <nl> - if ( c > 127 ) { <nl> - int i ; <nl> - for ( i = 0 ; i < kjs_pcre_utf8_table1_size ; i + + ) <nl> - if ( c < = kjs_pcre_utf8_table1 [ i ] ) break ; <nl> - length + = i ; <nl> - lastitemlength + = i ; <nl> - } <nl> - <nl> - continue ; <nl> - } <nl> - <nl> - / * Other escapes need one byte * / <nl> - <nl> - length + + ; <nl> - <nl> - / * A back reference needs an additional 2 bytes , plus either one or 5 <nl> - bytes for a repeat . We also need to keep the value of the highest <nl> - back reference . * / <nl> - <nl> - if ( c < = - ESC_REF ) { <nl> - int refnum = - c - ESC_REF ; <nl> - cd . backrefMap | = ( refnum < 32 ) ? ( 1 < < refnum ) : 1 ; <nl> - if ( refnum > cd . top_backref ) <nl> - cd . top_backref = refnum ; <nl> - length + = 2 ; / * For single back reference * / <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' { ' ) & & isCountedRepeat ( ptr + 2 , patternEnd ) ) { <nl> - ptr = readRepeatCounts ( ptr + 2 , & minRepeats , & maxRepeats , & errorcode ) ; <nl> - if ( errorcode ) <nl> - return - 1 ; <nl> - if ( ( minRepeats = = 0 & & ( maxRepeats = = 1 | | maxRepeats = = - 1 ) ) | | <nl> - ( minRepeats = = 1 & & maxRepeats = = - 1 ) ) <nl> - length + + ; <nl> - else <nl> - length + = 5 ; <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' ? ' ) ) <nl> - ptr + + ; <nl> - } <nl> - } <nl> - continue ; <nl> - <nl> - case ' ^ ' : / * Single - byte metacharacters * / <nl> - case ' . ' : <nl> - case ' $ ' : <nl> - length + + ; <nl> - lastitemlength = 1 ; <nl> - continue ; <nl> - <nl> - case ' * ' : / * These repeats won ' t be after brackets ; * / <nl> - case ' + ' : / * those are handled separately * / <nl> - case ' ? ' : <nl> - length + + ; <nl> - goto POSSESSIVE ; <nl> - <nl> - / * This covers the cases of braced repeats after a single char , metachar , <nl> - class , or back reference . * / <nl> - <nl> - case ' { ' : <nl> - if ( ! isCountedRepeat ( ptr + 1 , patternEnd ) ) <nl> - goto NORMAL_CHAR ; <nl> - ptr = readRepeatCounts ( ptr + 1 , & minRepeats , & maxRepeats , & errorcode ) ; <nl> - if ( errorcode ! = 0 ) <nl> - return - 1 ; <nl> - <nl> - / * These special cases just insert one extra opcode * / <nl> - <nl> - if ( ( minRepeats = = 0 & & ( maxRepeats = = 1 | | maxRepeats = = - 1 ) ) | | <nl> - ( minRepeats = = 1 & & maxRepeats = = - 1 ) ) <nl> - length + + ; <nl> - <nl> - / * These cases might insert additional copies of a preceding character . * / <nl> - <nl> - else { <nl> - if ( minRepeats ! = 1 ) { <nl> - length - = lastitemlength ; / * Uncount the original char or metachar * / <nl> - if ( minRepeats > 0 ) <nl> - length + = 3 + lastitemlength ; <nl> - } <nl> - length + = lastitemlength + ( ( maxRepeats > 0 ) ? 3 : 1 ) ; <nl> - } <nl> - <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' ? ' ) ) <nl> - ptr + + ; / * Needs no extra length * / <nl> - <nl> - POSSESSIVE : / * Test for possessive quantifier * / <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' + ' ) ) { <nl> - ptr + + ; <nl> - length + = 2 + 2 * LINK_SIZE ; / * Allow for atomic brackets * / <nl> - } <nl> - continue ; <nl> - <nl> - / * An alternation contains an offset to the next branch or ket . If any ims <nl> - options changed in the previous branch ( es ) , and / or if we are in a <nl> - lookbehind assertion , extra space will be needed at the start of the <nl> - branch . This is handled by branch_extra . * / <nl> - <nl> - case ' | ' : <nl> - if ( brastackptr = = 0 ) <nl> - cd . needOuterBracket = true ; <nl> - length + = 1 + LINK_SIZE + branch_extra ; <nl> - continue ; <nl> - <nl> - / * A character class uses 33 characters provided that all the character <nl> - values are less than 256 . Otherwise , it uses a bit map for low valued <nl> - characters , and individual items for others . Don ' t worry about character <nl> - types that aren ' t allowed in classes - they ' ll get picked up during the <nl> - compile . A character class that contains only one single - byte character <nl> - uses 2 or 3 bytes , depending on whether it is negated or not . Notice this <nl> - where we can . ( In UTF - 8 mode we can do this only for chars < 128 . ) * / <nl> - <nl> - case ' [ ' : { <nl> - int class_optcount ; <nl> - if ( * ( + + ptr ) = = ' ^ ' ) { <nl> - class_optcount = 10 ; / * Greater than one * / <nl> - ptr + + ; <nl> - } <nl> - else <nl> - class_optcount = 0 ; <nl> - <nl> - bool class_utf8 = false ; <nl> - <nl> - for ( ; ptr < patternEnd & & * ptr ! = ' ] ' ; + + ptr ) { <nl> - / * Check for escapes * / <nl> - <nl> - if ( * ptr = = ' \ \ ' ) { <nl> - c = checkEscape ( & ptr , patternEnd , & errorcode , cd . numCapturingBrackets , true ) ; <nl> - if ( errorcode ! = 0 ) <nl> - return - 1 ; <nl> - <nl> - / * Handle escapes that turn into characters * / <nl> - <nl> - if ( c > = 0 ) <nl> - goto NON_SPECIAL_CHARACTER ; <nl> - <nl> - / * Escapes that are meta - things . The normal ones just affect the <nl> - bit map , but Unicode properties require an XCLASS extended item . * / <nl> - <nl> - else <nl> - class_optcount = 10 ; / * \ d , \ s etc ; make sure > 1 * / <nl> - } <nl> - <nl> - / * Anything else increments the possible optimization count . We have to <nl> - detect ranges here so that we can compute the number of extra ranges for <nl> - caseless wide characters when UCP support is available . If there are wide <nl> - characters , we are going to have to use an XCLASS , even for single <nl> - characters . * / <nl> - <nl> - else { <nl> - c = * ptr ; <nl> - <nl> - / * Come here from handling \ above when it escapes to a char value * / <nl> - <nl> - NON_SPECIAL_CHARACTER : <nl> - class_optcount + + ; <nl> - <nl> - int d = - 1 ; <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' - ' ) ) { <nl> - UChar const * hyptr = ptr + + ; <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' \ \ ' ) ) { <nl> - ptr + + ; <nl> - d = checkEscape ( & ptr , patternEnd , & errorcode , cd . numCapturingBrackets , true ) ; <nl> - if ( errorcode ! = 0 ) <nl> - return - 1 ; <nl> - } <nl> - else if ( ( ptr + 1 < patternEnd ) & & ptr [ 1 ] ! = ' ] ' ) <nl> - d = * + + ptr ; <nl> - if ( d < 0 ) <nl> - ptr = hyptr ; / * go back to hyphen as data * / <nl> - } <nl> - <nl> - / * If d > = 0 we have a range . In UTF - 8 mode , if the end is > 255 , or > <nl> - 127 for caseless matching , we will need to use an XCLASS . * / <nl> - <nl> - if ( d > = 0 ) { <nl> - class_optcount = 10 ; / * Ensure > 1 * / <nl> - if ( d < c ) { <nl> - errorcode = ERR8 ; <nl> - return - 1 ; <nl> - } <nl> - <nl> - if ( ( d > 255 | | ( ignoreCase & & d > 127 ) ) ) { <nl> - unsigned char buffer [ 6 ] ; <nl> - if ( ! class_utf8 ) / * Allow for XCLASS overhead * / <nl> - { <nl> - class_utf8 = true ; <nl> - length + = LINK_SIZE + 2 ; <nl> - } <nl> - <nl> - / * If we have UCP support , find out how many extra ranges are <nl> - needed to map the other case of characters within this range . We <nl> - have to mimic the range optimization here , because extending the <nl> - range upwards might push d over a boundary that makes it use <nl> - another byte in the UTF - 8 representation . * / <nl> - <nl> - if ( ignoreCase ) { <nl> - int occ , ocd ; <nl> - int cc = c ; <nl> - int origd = d ; <nl> - while ( getOthercaseRange ( & cc , origd , & occ , & ocd ) ) { <nl> - if ( occ > = c & & ocd < = d ) <nl> - continue ; / * Skip embedded * / <nl> - <nl> - if ( occ < c & & ocd > = c - 1 ) / * Extend the basic range * / <nl> - { / * if there is overlap , * / <nl> - c = occ ; / * noting that if occ < c * / <nl> - continue ; / * we can ' t have ocd > d * / <nl> - } / * because a subrange is * / <nl> - if ( ocd > d & & occ < = d + 1 ) / * always shorter than * / <nl> - { / * the basic range . * / <nl> - d = ocd ; <nl> - continue ; <nl> - } <nl> - <nl> - / * An extra item is needed * / <nl> - <nl> - length + = 1 + encodeUTF8 ( occ , buffer ) + <nl> - ( ( occ = = ocd ) ? 0 : encodeUTF8 ( ocd , buffer ) ) ; <nl> - } <nl> - } <nl> - <nl> - / * The length of the ( possibly extended ) range * / <nl> - <nl> - length + = 1 + encodeUTF8 ( c , buffer ) + encodeUTF8 ( d , buffer ) ; <nl> - } <nl> - <nl> - } <nl> - <nl> - / * We have a single character . There is nothing to be done unless we <nl> - are in UTF - 8 mode . If the char is > 255 , or 127 when caseless , we must <nl> - allow for an XCL_SINGLE item , doubled for caselessness if there is UCP <nl> - support . * / <nl> - <nl> - else { <nl> - if ( ( c > 255 | | ( ignoreCase & & c > 127 ) ) ) { <nl> - unsigned char buffer [ 6 ] ; <nl> - class_optcount = 10 ; / * Ensure > 1 * / <nl> - if ( ! class_utf8 ) / * Allow for XCLASS overhead * / <nl> - { <nl> - class_utf8 = true ; <nl> - length + = LINK_SIZE + 2 ; <nl> - } <nl> - length + = ( ignoreCase ? 2 : 1 ) * ( 1 + encodeUTF8 ( c , buffer ) ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> - if ( ptr > = patternEnd ) { / * Missing terminating ' ] ' * / <nl> - errorcode = ERR6 ; <nl> - return - 1 ; <nl> - } <nl> - <nl> - / * We can optimize when there was only one optimizable character . <nl> - Note that this does not detect the case of a negated single character . <nl> - In that case we do an incorrect length computation , but it ' s not a serious <nl> - problem because the computed length is too large rather than too small . * / <nl> - <nl> - if ( class_optcount = = 1 ) <nl> - goto NORMAL_CHAR ; <nl> - <nl> - / * Here , we handle repeats for the class opcodes . * / <nl> - { <nl> - length + = 33 ; <nl> - <nl> - / * A repeat needs either 1 or 5 bytes . If it is a possessive quantifier , <nl> - we also need extra for wrapping the whole thing in a sub - pattern . * / <nl> - <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' { ' ) & & isCountedRepeat ( ptr + 2 , patternEnd ) ) { <nl> - ptr = readRepeatCounts ( ptr + 2 , & minRepeats , & maxRepeats , & errorcode ) ; <nl> - if ( errorcode ! = 0 ) <nl> - return - 1 ; <nl> - if ( ( minRepeats = = 0 & & ( maxRepeats = = 1 | | maxRepeats = = - 1 ) ) | | <nl> - ( minRepeats = = 1 & & maxRepeats = = - 1 ) ) <nl> - length + + ; <nl> - else <nl> - length + = 5 ; <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' + ' ) ) { <nl> - ptr + + ; <nl> - length + = 2 + 2 * LINK_SIZE ; <nl> - } else if ( safelyCheckNextChar ( ptr , patternEnd , ' ? ' ) ) <nl> - ptr + + ; <nl> - } <nl> - } <nl> - continue ; <nl> - } <nl> - <nl> - / * Brackets may be genuine groups or special things * / <nl> - <nl> - case ' ( ' : { <nl> - int branch_newextra = 0 ; <nl> - int bracket_length = 1 + LINK_SIZE ; <nl> - bool capturing = false ; <nl> - <nl> - / * Handle special forms of bracket , which all start ( ? * / <nl> - <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' ? ' ) ) { <nl> - switch ( c = ( ptr + 2 < patternEnd ? ptr [ 2 ] : 0 ) ) { <nl> - / * Non - referencing groups and lookaheads just move the pointer on , and <nl> - then behave like a non - special bracket , except that they don ' t increment <nl> - the count of extracting brackets . Ditto for the " once only " bracket , <nl> - which is in Perl from version 5 . 005 . * / <nl> - <nl> - case ' : ' : <nl> - case ' = ' : <nl> - case ' ! ' : <nl> - ptr + = 2 ; <nl> - break ; <nl> - <nl> - / * Else loop checking valid options until ) is met . Anything else is an <nl> - error . If we are without any brackets , i . e . at top level , the settings <nl> - act as if specified in the options , so massage the options immediately . <nl> - This is for backward compatibility with Perl 5 . 004 . * / <nl> - <nl> - default : <nl> - errorcode = ERR12 ; <nl> - return - 1 ; <nl> - } <nl> - } else <nl> - capturing = 1 ; <nl> - <nl> - / * Capturing brackets must be counted so we can process escapes in a <nl> - Perlish way . If the number exceeds EXTRACT_BASIC_MAX we are going to need <nl> - an additional 3 bytes of memory per capturing bracket . * / <nl> - <nl> - if ( capturing ) { <nl> - bracount + + ; <nl> - if ( bracount > EXTRACT_BASIC_MAX ) <nl> - bracket_length + = 3 ; <nl> - } <nl> - <nl> - / * Save length for computing whole length at end if there ' s a repeat that <nl> - requires duplication of the group . Also save the current value of <nl> - branch_extra , and start the new group with the new value . If non - zero , this <nl> - will either be 2 for a ( ? imsx : group , or 3 for a lookbehind assertion . * / <nl> - <nl> - if ( brastackptr > = sizeof ( brastack ) / sizeof ( int ) ) { <nl> - errorcode = ERR17 ; <nl> - return - 1 ; <nl> - } <nl> - <nl> - bralenstack [ brastackptr ] = branch_extra ; <nl> - branch_extra = branch_newextra ; <nl> - <nl> - brastack [ brastackptr + + ] = length ; <nl> - length + = bracket_length ; <nl> - continue ; <nl> - } <nl> - <nl> - / * Handle ket . Look for subsequent maxRepeats / minRepeats ; for certain sets of values we <nl> - have to replicate this bracket up to that many times . If brastackptr is <nl> - 0 this is an unmatched bracket which will generate an error , but take care <nl> - not to try to access brastack [ - 1 ] when computing the length and restoring <nl> - the branch_extra value . * / <nl> - <nl> - case ' ) ' : { <nl> - int duplength ; <nl> - length + = 1 + LINK_SIZE ; <nl> - if ( brastackptr > 0 ) { <nl> - duplength = length - brastack [ - - brastackptr ] ; <nl> - branch_extra = bralenstack [ brastackptr ] ; <nl> - } <nl> - else <nl> - duplength = 0 ; <nl> - <nl> - / * Leave ptr at the final char ; for readRepeatCounts this happens <nl> - automatically ; for the others we need an increment . * / <nl> - <nl> - if ( ( ptr + 1 < patternEnd ) & & ( c = ptr [ 1 ] ) = = ' { ' & & isCountedRepeat ( ptr + 2 , patternEnd ) ) { <nl> - ptr = readRepeatCounts ( ptr + 2 , & minRepeats , & maxRepeats , & errorcode ) ; <nl> - if ( errorcode ) <nl> - return - 1 ; <nl> - } else if ( c = = ' * ' ) { <nl> - minRepeats = 0 ; <nl> - maxRepeats = - 1 ; <nl> - ptr + + ; <nl> - } else if ( c = = ' + ' ) { <nl> - minRepeats = 1 ; <nl> - maxRepeats = - 1 ; <nl> - ptr + + ; <nl> - } else if ( c = = ' ? ' ) { <nl> - minRepeats = 0 ; <nl> - maxRepeats = 1 ; <nl> - ptr + + ; <nl> - } else { <nl> - minRepeats = 1 ; <nl> - maxRepeats = 1 ; <nl> - } <nl> - <nl> - / * If the minimum is zero , we have to allow for an OP_BRAZERO before the <nl> - group , and if the maximum is greater than zero , we have to replicate <nl> - maxval - 1 times ; each replication acquires an OP_BRAZERO plus a nesting <nl> - bracket set . * / <nl> - <nl> - int repeatsLength ; <nl> - if ( minRepeats = = 0 ) { <nl> - length + + ; <nl> - if ( maxRepeats > 0 ) { <nl> - repeatsLength = multiplyWithOverflowCheck ( maxRepeats - 1 , duplength + 3 + 2 * LINK_SIZE ) ; <nl> - if ( repeatsLength < 0 ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - length + = repeatsLength ; <nl> - if ( length > MAX_PATTERN_SIZE ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / * When the minimum is greater than zero , we have to replicate up to <nl> - minval - 1 times , with no additions required in the copies . Then , if there <nl> - is a limited maximum we have to replicate up to maxval - 1 times allowing <nl> - for a BRAZERO item before each optional copy and nesting brackets for all <nl> - but one of the optional copies . * / <nl> - <nl> - else { <nl> - repeatsLength = multiplyWithOverflowCheck ( minRepeats - 1 , duplength ) ; <nl> - if ( repeatsLength < 0 ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - length + = repeatsLength ; <nl> - if ( maxRepeats > minRepeats ) { / * Need this test as maxRepeats = - 1 means no limit * / <nl> - repeatsLength = multiplyWithOverflowCheck ( maxRepeats - minRepeats , duplength + 3 + 2 * LINK_SIZE ) ; <nl> - if ( repeatsLength < 0 ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - length + = repeatsLength - ( 2 + 2 * LINK_SIZE ) ; <nl> - } <nl> - if ( length > MAX_PATTERN_SIZE ) { <nl> - errorcode = ERR16 ; <nl> - return - 1 ; <nl> - } <nl> - } <nl> - <nl> - / * Allow space for once brackets for " possessive quantifier " * / <nl> - <nl> - if ( safelyCheckNextChar ( ptr , patternEnd , ' + ' ) ) { <nl> - ptr + + ; <nl> - length + = 2 + 2 * LINK_SIZE ; <nl> - } <nl> - continue ; <nl> - } <nl> - <nl> - / * Non - special character . It won ' t be space or # in extended mode , so it is <nl> - always a genuine character . If we are in a \ Q . . . \ E sequence , check for the <nl> - end ; if not , we have a literal . * / <nl> - <nl> - default : <nl> - NORMAL_CHAR : <nl> - length + = 2 ; / * For a one - byte character * / <nl> - lastitemlength = 1 ; / * Default length of last item for repeats * / <nl> - <nl> - if ( c > 127 ) { <nl> - int i ; <nl> - for ( i = 0 ; i < kjs_pcre_utf8_table1_size ; i + + ) <nl> - if ( c < = kjs_pcre_utf8_table1 [ i ] ) <nl> - break ; <nl> - length + = i ; <nl> - lastitemlength + = i ; <nl> - } <nl> - <nl> - continue ; <nl> - } <nl> - } <nl> - <nl> - length + = 2 + LINK_SIZE ; / * For final KET and END * / <nl> - <nl> - cd . numCapturingBrackets = bracount ; <nl> - return length ; <nl> - } <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Compile a Regular Expression * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function takes a string and returns a pointer to a block of store <nl> - holding a compiled version of the expression . The original API for this <nl> - function had no error code return variable ; it is retained for backwards <nl> - compatibility . The new function is given a new name . <nl> - <nl> - Arguments : <nl> - pattern the regular expression <nl> - options various option bits <nl> - errorcodeptr pointer to error code variable ( pcre_compile2 ( ) only ) <nl> - can be NULL if you don ' t want a code value <nl> - errorptr pointer to pointer to error text <nl> - erroroffset ptr offset in pattern where error was detected <nl> - tables pointer to character tables or NULL <nl> - <nl> - Returns : pointer to compiled data block , or NULL on error , <nl> - with errorptr and erroroffset set <nl> - * / <nl> - <nl> - static inline JSRegExp * returnError ( ErrorCode errorcode , const char * * errorptr ) <nl> - { <nl> - * errorptr = errorText ( errorcode ) ; <nl> - return 0 ; <nl> - } <nl> - <nl> - JSRegExp * jsRegExpCompile ( const UChar * pattern , int patternLength , <nl> - JSRegExpIgnoreCaseOption ignoreCase , JSRegExpMultilineOption multiline , <nl> - unsigned * numSubpatterns , const char * * errorptr , <nl> - malloc_t * allocate_function , free_t * free_function ) <nl> - { <nl> - / * We can ' t pass back an error message if errorptr is NULL ; I guess the best we <nl> - can do is just return NULL , but we can set a code value if there is a code pointer . * / <nl> - if ( ! errorptr ) <nl> - return 0 ; <nl> - * errorptr = NULL ; <nl> - <nl> - CompileData cd ; <nl> - <nl> - ErrorCode errorcode = ERR0 ; <nl> - / * Call this once just to count the brackets . * / <nl> - calculateCompiledPatternLength ( pattern , patternLength , ignoreCase , cd , errorcode ) ; <nl> - / * Call it again to compute the length . * / <nl> - int length = calculateCompiledPatternLength ( pattern , patternLength , ignoreCase , cd , errorcode ) ; <nl> - if ( errorcode ) <nl> - return returnError ( errorcode , errorptr ) ; <nl> - <nl> - if ( length > MAX_PATTERN_SIZE ) <nl> - return returnError ( ERR16 , errorptr ) ; <nl> - <nl> - size_t size = length + sizeof ( JSRegExp ) ; <nl> - JSRegExp * re = reinterpret_cast < JSRegExp * > ( ( * allocate_function ) ( size ) ) ; <nl> - <nl> - if ( ! re ) <nl> - return returnError ( ERR13 , errorptr ) ; <nl> - <nl> - re - > options = ( ignoreCase ? IgnoreCaseOption : 0 ) | ( multiline ? MatchAcrossMultipleLinesOption : 0 ) ; <nl> - <nl> - / * The starting points of the name / number translation table and of the code are <nl> - passed around in the compile data block . * / <nl> - <nl> - const unsigned char * codeStart = ( const unsigned char * ) ( re + 1 ) ; <nl> - <nl> - / * Set up a starting , non - extracting bracket , then compile the expression . On <nl> - error , errorcode will be set non - zero , so we don ' t need to look at the result <nl> - of the function here . * / <nl> - <nl> - const UChar * ptr = ( const UChar * ) pattern ; <nl> - const UChar * patternEnd = pattern + patternLength ; <nl> - unsigned char * code = ( unsigned char * ) codeStart ; <nl> - int firstbyte , reqbyte ; <nl> - int bracketCount = 0 ; <nl> - if ( ! cd . needOuterBracket ) <nl> - compileBranch ( re - > options , & bracketCount , & code , & ptr , patternEnd , & errorcode , & firstbyte , & reqbyte , cd ) ; <nl> - else { <nl> - * code = OP_BRA ; <nl> - compileBracket ( re - > options , & bracketCount , & code , & ptr , patternEnd , & errorcode , 0 , & firstbyte , & reqbyte , cd ) ; <nl> - } <nl> - re - > top_bracket = bracketCount ; <nl> - re - > top_backref = cd . top_backref ; <nl> - <nl> - / * If not reached end of pattern on success , there ' s an excess bracket . * / <nl> - <nl> - if ( errorcode = = 0 & & ptr < patternEnd ) <nl> - errorcode = ERR10 ; <nl> - <nl> - / * Fill in the terminating state and check for disastrous overflow , but <nl> - if debugging , leave the test till after things are printed out . * / <nl> - <nl> - * code + + = OP_END ; <nl> - <nl> - ASSERT ( code - codeStart < = length ) ; <nl> - if ( code - codeStart > length ) <nl> - errorcode = ERR7 ; <nl> - <nl> - / * Give an error if there ' s back reference to a non - existent capturing <nl> - subpattern . * / <nl> - <nl> - if ( re - > top_backref > re - > top_bracket ) <nl> - errorcode = ERR15 ; <nl> - <nl> - / * Failed to compile , or error while post - processing * / <nl> - <nl> - if ( errorcode ! = ERR0 ) { <nl> - ( * free_function ) ( reinterpret_cast < void * > ( re ) ) ; <nl> - return returnError ( errorcode , errorptr ) ; <nl> - } <nl> - <nl> - / * If the anchored option was not passed , set the flag if we can determine that <nl> - the pattern is anchored by virtue of ^ characters or \ A or anything else ( such <nl> - as starting with . * when DOTALL is set ) . <nl> - <nl> - Otherwise , if we know what the first character has to be , save it , because that <nl> - speeds up unanchored matches no end . If not , see if we can set the <nl> - UseMultiLineFirstByteOptimizationOption flag . This is helpful for multiline matches when all branches <nl> - start with ^ . and also when all branches start with . * for non - DOTALL matches . <nl> - * / <nl> - <nl> - if ( cd . needOuterBracket ? bracketIsAnchored ( codeStart ) : branchIsAnchored ( codeStart ) ) <nl> - re - > options | = IsAnchoredOption ; <nl> - else { <nl> - if ( firstbyte < 0 ) { <nl> - firstbyte = ( cd . needOuterBracket <nl> - ? bracketFindFirstAssertedCharacter ( codeStart , false ) <nl> - : branchFindFirstAssertedCharacter ( codeStart , false ) ) <nl> - | ( ( re - > options & IgnoreCaseOption ) ? REQ_IGNORE_CASE : 0 ) ; <nl> - } <nl> - if ( firstbyte > = 0 ) { <nl> - int ch = firstbyte & 255 ; <nl> - if ( ch < 127 ) { <nl> - re - > first_byte = ( ( firstbyte & REQ_IGNORE_CASE ) & & flipCase ( ch ) = = ch ) ? ch : firstbyte ; <nl> - re - > options | = UseFirstByteOptimizationOption ; <nl> - } <nl> - } else { <nl> - if ( cd . needOuterBracket ? bracketNeedsLineStart ( codeStart , 0 , cd . backrefMap ) : branchNeedsLineStart ( codeStart , 0 , cd . backrefMap ) ) <nl> - re - > options | = UseMultiLineFirstByteOptimizationOption ; <nl> - } <nl> - } <nl> - <nl> - / * For an anchored pattern , we use the " required byte " only if it follows a <nl> - variable length item in the regex . Remove the caseless flag for non - caseable <nl> - bytes . * / <nl> - <nl> - if ( reqbyte > = 0 & & ( ! ( re - > options & IsAnchoredOption ) | | ( reqbyte & REQ_VARY ) ) ) { <nl> - int ch = reqbyte & 255 ; <nl> - if ( ch < 127 ) { <nl> - re - > req_byte = ( ( reqbyte & REQ_IGNORE_CASE ) & & flipCase ( ch ) = = ch ) ? ( reqbyte & ~ REQ_IGNORE_CASE ) : reqbyte ; <nl> - re - > options | = UseRequiredByteOptimizationOption ; <nl> - } <nl> - } <nl> - <nl> - if ( numSubpatterns ) <nl> - * numSubpatterns = re - > top_bracket ; <nl> - return re ; <nl> - } <nl> - <nl> - void jsRegExpFree ( JSRegExp * re , free_t * free_function ) <nl> - { <nl> - ( * free_function ) ( reinterpret_cast < void * > ( re ) ) ; <nl> - } <nl> - <nl> - } } / / namespace v8 : : jscre <nl> deleted file mode 100644 <nl> index dda25a49d59 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_exec . cpp <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - Copyright ( C ) 2007 Eric Seidel < eric @ webkit . org > <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * This module contains jsRegExpExecute ( ) , the externally visible function <nl> - that does pattern matching using an NFA algorithm , following the rules from <nl> - the JavaScript specification . There are also some supporting functions . * / <nl> - <nl> - # include " config . h " <nl> - <nl> - # include " pcre_internal . h " <nl> - <nl> - # include " ASCIICType . h " <nl> - <nl> - # include < ctype . h > <nl> - # include < limits . h > <nl> - # include < string . h > / * for memcpy * / <nl> - <nl> - # ifdef __GNUC__ <nl> - # define USE_COMPUTED_GOTO_FOR_MATCH_RECURSION <nl> - / / # define USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - # endif <nl> - <nl> - / * Avoid warnings on Windows . * / <nl> - # undef min <nl> - # undef max <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - # ifndef USE_COMPUTED_GOTO_FOR_MATCH_RECURSION <nl> - typedef int ReturnLocation ; <nl> - # else <nl> - typedef void * ReturnLocation ; <nl> - # endif <nl> - <nl> - / * Structure for building a chain of data for holding the values of <nl> - the subject pointer at the start of each bracket , used to detect when <nl> - an empty string has been matched by a bracket to break infinite loops . * / <nl> - struct BracketChainNode { <nl> - BracketChainNode * previousBracket ; <nl> - const UChar * bracketStart ; <nl> - } ; <nl> - <nl> - struct MatchFrame { <nl> - ReturnLocation returnLocation ; <nl> - struct MatchFrame * previousFrame ; <nl> - <nl> - / * Function arguments that may change * / <nl> - struct { <nl> - const UChar * subjectPtr ; <nl> - const unsigned char * instructionPtr ; <nl> - int offsetTop ; <nl> - BracketChainNode * bracketChain ; <nl> - } args ; <nl> - <nl> - <nl> - / * PCRE uses " fake " recursion built off of gotos , thus <nl> - stack - based local variables are not safe to use . Instead we have to <nl> - store local variables on the current MatchFrame . * / <nl> - struct { <nl> - const unsigned char * data ; <nl> - const unsigned char * startOfRepeatingBracket ; <nl> - const UChar * subjectPtrAtStartOfInstruction ; / / Several instrutions stash away a subjectPtr here for later compare <nl> - const unsigned char * instructionPtrAtStartOfOnce ; <nl> - <nl> - int repeatOthercase ; <nl> - <nl> - int ctype ; <nl> - int fc ; <nl> - int fi ; <nl> - int length ; <nl> - int max ; <nl> - int number ; <nl> - int offset ; <nl> - int saveOffset1 ; <nl> - int saveOffset2 ; <nl> - int saveOffset3 ; <nl> - <nl> - BracketChainNode bracketChainNode ; <nl> - } locals ; <nl> - } ; <nl> - <nl> - / * Structure for passing " static " information around between the functions <nl> - doing traditional NFA matching , so that they are thread - safe . * / <nl> - <nl> - struct MatchData { <nl> - int * offsetVector ; / * Offset vector * / <nl> - int offsetEnd ; / * One past the end * / <nl> - int offsetMax ; / * The maximum usable for return data * / <nl> - bool offsetOverflow ; / * Set if too many extractions * / <nl> - const UChar * startSubject ; / * Start of the subject string * / <nl> - const UChar * endSubject ; / * End of the subject string * / <nl> - const UChar * endMatchPtr ; / * Subject position at end match * / <nl> - int endOffsetTop ; / * Highwater mark at end of match * / <nl> - bool multiline ; <nl> - bool ignoreCase ; <nl> - } ; <nl> - <nl> - / * The maximum remaining length of subject we are prepared to search for a <nl> - req_byte match . * / <nl> - <nl> - # define REQ_BYTE_MAX 1000 <nl> - <nl> - / * The below limit restricts the number of " recursive " match calls in order to <nl> - avoid spending exponential time on complex regular expressions . * / <nl> - <nl> - static const unsigned matchLimit = 100000 ; <nl> - <nl> - # ifdef DEBUG <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Debugging function to print chars * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Print a sequence of chars in printable format , stopping at the end of the <nl> - subject if the requested . <nl> - <nl> - Arguments : <nl> - p points to characters <nl> - length number to print <nl> - isSubject true if printing from within md . startSubject <nl> - md pointer to matching data block , if isSubject is true <nl> - * / <nl> - <nl> - static void pchars ( const UChar * p , int length , bool isSubject , const MatchData & md ) <nl> - { <nl> - if ( isSubject & & length > md . endSubject - p ) <nl> - length = md . endSubject - p ; <nl> - while ( length - - > 0 ) { <nl> - int c ; <nl> - if ( isprint ( c = * ( p + + ) ) ) <nl> - printf ( " % c " , c ) ; <nl> - else if ( c < 256 ) <nl> - printf ( " \ \ x % 02x " , c ) ; <nl> - else <nl> - printf ( " \ \ x { % x } " , c ) ; <nl> - } <nl> - } <nl> - # endif <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Match a back - reference * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * If a back reference hasn ' t been set , the length that is passed is greater <nl> - than the number of characters left in the string , so the match fails . <nl> - <nl> - Arguments : <nl> - offset index into the offset vector <nl> - subjectPtr points into the subject <nl> - length length to be matched <nl> - md points to match data block <nl> - <nl> - Returns : true if matched <nl> - * / <nl> - <nl> - static bool matchRef ( int offset , const UChar * subjectPtr , int length , const MatchData & md ) <nl> - { <nl> - const UChar * p = md . startSubject + md . offsetVector [ offset ] ; <nl> - <nl> - # ifdef DEBUG <nl> - if ( subjectPtr > = md . endSubject ) <nl> - printf ( " matching subject < null > " ) ; <nl> - else { <nl> - printf ( " matching subject " ) ; <nl> - pchars ( subjectPtr , length , true , md ) ; <nl> - } <nl> - printf ( " against backref " ) ; <nl> - pchars ( p , length , false , md ) ; <nl> - printf ( " \ n " ) ; <nl> - # endif <nl> - <nl> - / * Always fail if not enough characters left * / <nl> - <nl> - if ( length > md . endSubject - subjectPtr ) <nl> - return false ; <nl> - <nl> - / * Separate the caselesss case for speed * / <nl> - <nl> - if ( md . ignoreCase ) { <nl> - while ( length - - > 0 ) { <nl> - UChar c = * p + + ; <nl> - int othercase = kjs_pcre_ucp_othercase ( c ) ; <nl> - UChar d = * subjectPtr + + ; <nl> - if ( c ! = d & & othercase ! = d ) <nl> - return false ; <nl> - } <nl> - } <nl> - else { <nl> - while ( length - - > 0 ) <nl> - if ( * p + + ! = * subjectPtr + + ) <nl> - return false ; <nl> - } <nl> - <nl> - return true ; <nl> - } <nl> - <nl> - # ifndef USE_COMPUTED_GOTO_FOR_MATCH_RECURSION <nl> - <nl> - / * Use numbered labels and switch statement at the bottom of the match function . * / <nl> - <nl> - # define RMATCH_WHERE ( num ) num <nl> - # define RRETURN_LABEL RRETURN_SWITCH <nl> - <nl> - # else <nl> - <nl> - / * Use GCC ' s computed goto extension . * / <nl> - <nl> - / * For one test case this is more than 40 % faster than the switch statement . <nl> - We could avoid the use of the num argument entirely by using local labels , <nl> - but using it for the GCC case as well as the non - GCC case allows us to share <nl> - a bit more code and notice if we use conflicting numbers . * / <nl> - <nl> - # define RMATCH_WHERE ( num ) & & RRETURN_ # # num <nl> - # define RRETURN_LABEL * stack . currentFrame - > returnLocation <nl> - <nl> - # endif <nl> - <nl> - # define RECURSIVE_MATCH_COMMON ( num ) \ <nl> - goto RECURSE ; \ <nl> - RRETURN_ # # num : \ <nl> - stack . popCurrentFrame ( ) ; <nl> - <nl> - # define RECURSIVE_MATCH ( num , ra , rb ) \ <nl> - do { \ <nl> - stack . pushNewFrame ( ( ra ) , ( rb ) , RMATCH_WHERE ( num ) ) ; \ <nl> - RECURSIVE_MATCH_COMMON ( num ) \ <nl> - } while ( 0 ) <nl> - <nl> - # define RECURSIVE_MATCH_STARTNG_NEW_GROUP ( num , ra , rb ) \ <nl> - do { \ <nl> - stack . pushNewFrame ( ( ra ) , ( rb ) , RMATCH_WHERE ( num ) ) ; \ <nl> - startNewGroup ( stack . currentFrame ) ; \ <nl> - RECURSIVE_MATCH_COMMON ( num ) \ <nl> - } while ( 0 ) <nl> - <nl> - # define RRETURN goto RRETURN_LABEL <nl> - <nl> - # define RRETURN_NO_MATCH do { isMatch = false ; RRETURN ; } while ( 0 ) <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Match from current position * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * On entry instructionPtr points to the first opcode , and subjectPtr to the first character <nl> - in the subject string , while substringStart holds the value of subjectPtr at the start of the <nl> - last bracketed group - used for breaking infinite loops matching zero - length <nl> - strings . This function is called recursively in many circumstances . Whenever it <nl> - returns a negative ( error ) response , the outer match ( ) call must also return the <nl> - same response . <nl> - <nl> - Arguments : <nl> - subjectPtr pointer in subject <nl> - instructionPtr position in code <nl> - offsetTop current top pointer <nl> - md pointer to " static " info for the match <nl> - <nl> - Returns : 1 if matched ) these values are > = 0 <nl> - 0 if failed to match ) <nl> - a negative error value if aborted by an error condition <nl> - ( e . g . stopped by repeated call or recursion limit ) <nl> - * / <nl> - <nl> - static const unsigned FRAMES_ON_STACK = 16 ; <nl> - <nl> - struct MatchStack { <nl> - MatchStack ( ) <nl> - : framesEnd ( frames + FRAMES_ON_STACK ) <nl> - , currentFrame ( frames ) <nl> - , size ( 1 ) / / match ( ) creates accesses the first frame w / o calling pushNewFrame <nl> - { <nl> - ASSERT ( ( sizeof ( frames ) / sizeof ( frames [ 0 ] ) ) = = FRAMES_ON_STACK ) ; <nl> - } <nl> - <nl> - MatchFrame frames [ FRAMES_ON_STACK ] ; <nl> - MatchFrame * framesEnd ; <nl> - MatchFrame * currentFrame ; <nl> - unsigned size ; <nl> - <nl> - inline bool canUseStackBufferForNextFrame ( ) <nl> - { <nl> - return size < FRAMES_ON_STACK ; <nl> - } <nl> - <nl> - inline MatchFrame * allocateNextFrame ( ) <nl> - { <nl> - if ( canUseStackBufferForNextFrame ( ) ) <nl> - return currentFrame + 1 ; <nl> - return new MatchFrame ; <nl> - } <nl> - <nl> - inline void pushNewFrame ( const unsigned char * instructionPtr , BracketChainNode * bracketChain , ReturnLocation returnLocation ) <nl> - { <nl> - MatchFrame * newframe = allocateNextFrame ( ) ; <nl> - newframe - > previousFrame = currentFrame ; <nl> - <nl> - newframe - > args . subjectPtr = currentFrame - > args . subjectPtr ; <nl> - newframe - > args . offsetTop = currentFrame - > args . offsetTop ; <nl> - newframe - > args . instructionPtr = instructionPtr ; <nl> - newframe - > args . bracketChain = bracketChain ; <nl> - newframe - > returnLocation = returnLocation ; <nl> - size + + ; <nl> - <nl> - currentFrame = newframe ; <nl> - } <nl> - <nl> - inline void popCurrentFrame ( ) <nl> - { <nl> - MatchFrame * oldFrame = currentFrame ; <nl> - currentFrame = currentFrame - > previousFrame ; <nl> - if ( size > FRAMES_ON_STACK ) <nl> - delete oldFrame ; <nl> - size - - ; <nl> - } <nl> - <nl> - void popAllFrames ( ) <nl> - { <nl> - while ( size ) <nl> - popCurrentFrame ( ) ; <nl> - } <nl> - } ; <nl> - <nl> - static int matchError ( int errorCode , MatchStack & stack ) <nl> - { <nl> - stack . popAllFrames ( ) ; <nl> - return errorCode ; <nl> - } <nl> - <nl> - / * Get the next UTF - 8 character , not advancing the pointer , incrementing length <nl> - if there are extra bytes . This is called when we know we are in UTF - 8 mode . * / <nl> - <nl> - static inline void getUTF8CharAndIncrementLength ( int & c , const unsigned char * subjectPtr , int & len ) <nl> - { <nl> - c = * subjectPtr ; <nl> - if ( ( c & 0xc0 ) = = 0xc0 ) { <nl> - int gcaa = kjs_pcre_utf8_table4 [ c & 0x3f ] ; / * Number of additional bytes * / <nl> - int gcss = 6 * gcaa ; <nl> - c = ( c & kjs_pcre_utf8_table3 [ gcaa ] ) < < gcss ; <nl> - for ( int gcii = 1 ; gcii < = gcaa ; gcii + + ) { <nl> - gcss - = 6 ; <nl> - c | = ( subjectPtr [ gcii ] & 0x3f ) < < gcss ; <nl> - } <nl> - len + = gcaa ; <nl> - } <nl> - } <nl> - <nl> - static inline void startNewGroup ( MatchFrame * currentFrame ) <nl> - { <nl> - / * At the start of a bracketed group , add the current subject pointer to the <nl> - stack of such pointers , to be re - instated at the end of the group when we hit <nl> - the closing ket . When match ( ) is called in other circumstances , we don ' t add to <nl> - this stack . * / <nl> - <nl> - currentFrame - > locals . bracketChainNode . previousBracket = currentFrame - > args . bracketChain ; <nl> - currentFrame - > locals . bracketChainNode . bracketStart = currentFrame - > args . subjectPtr ; <nl> - currentFrame - > args . bracketChain = & currentFrame - > locals . bracketChainNode ; <nl> - } <nl> - <nl> - / / FIXME : " minimize " means " not greedy " , we should invert the callers to ask for " greedy " to be less confusing <nl> - static inline void repeatInformationFromInstructionOffset ( short instructionOffset , bool & minimize , int & minimumRepeats , int & maximumRepeats ) <nl> - { <nl> - / / Instruction offsets are based off of OP_CRSTAR , OP_STAR , OP_TYPESTAR , OP_NOTSTAR <nl> - static const char minimumRepeatsFromInstructionOffset [ ] = { 0 , 0 , 1 , 1 , 0 , 0 } ; <nl> - static const int maximumRepeatsFromInstructionOffset [ ] = { INT_MAX , INT_MAX , INT_MAX , INT_MAX , 1 , 1 } ; <nl> - <nl> - ASSERT ( instructionOffset > = 0 ) ; <nl> - ASSERT ( instructionOffset < = ( OP_CRMINQUERY - OP_CRSTAR ) ) ; <nl> - <nl> - minimize = ( instructionOffset & 1 ) ; / / this assumes ordering : Instruction , MinimizeInstruction , Instruction2 , MinimizeInstruction2 <nl> - minimumRepeats = minimumRepeatsFromInstructionOffset [ instructionOffset ] ; <nl> - maximumRepeats = maximumRepeatsFromInstructionOffset [ instructionOffset ] ; <nl> - } <nl> - <nl> - static int match ( const UChar * subjectPtr , const unsigned char * instructionPtr , int offsetTop , MatchData & md ) <nl> - { <nl> - bool isMatch = false ; <nl> - int min ; <nl> - bool minimize = false ; / * Initialization not really needed , but some compilers think so . * / <nl> - unsigned matchCount = 0 ; <nl> - <nl> - MatchStack stack ; <nl> - <nl> - / * The opcode jump table . * / <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - # define EMIT_JUMP_TABLE_ENTRY ( opcode ) & & LABEL_OP_ # # opcode , <nl> - static void * opcodeJumpTable [ 256 ] = { FOR_EACH_OPCODE ( EMIT_JUMP_TABLE_ENTRY ) } ; <nl> - # undef EMIT_JUMP_TABLE_ENTRY <nl> - # endif <nl> - <nl> - / * One - time setup of the opcode jump table . * / <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - for ( int i = 255 ; ! opcodeJumpTable [ i ] ; i - - ) <nl> - opcodeJumpTable [ i ] = & & CAPTURING_BRACKET ; <nl> - # endif <nl> - <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_RECURSION <nl> - / / Shark shows this as a hot line <nl> - / / Using a static const here makes this line disappear , but makes later access hotter ( not sure why ) <nl> - stack . currentFrame - > returnLocation = & & RETURN ; <nl> - # else <nl> - stack . currentFrame - > returnLocation = 0 ; <nl> - # endif <nl> - stack . currentFrame - > args . subjectPtr = subjectPtr ; <nl> - stack . currentFrame - > args . instructionPtr = instructionPtr ; <nl> - stack . currentFrame - > args . offsetTop = offsetTop ; <nl> - stack . currentFrame - > args . bracketChain = 0 ; <nl> - startNewGroup ( stack . currentFrame ) ; <nl> - <nl> - / * This is where control jumps back to to effect " recursion " * / <nl> - <nl> - RECURSE : <nl> - if ( + + matchCount > matchLimit ) <nl> - return matchError ( JSRegExpErrorHitLimit , stack ) ; <nl> - <nl> - / * Now start processing the operations . * / <nl> - <nl> - # ifndef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - while ( true ) <nl> - # endif <nl> - { <nl> - <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - # define BEGIN_OPCODE ( opcode ) LABEL_OP_ # # opcode <nl> - # define NEXT_OPCODE goto * opcodeJumpTable [ * stack . currentFrame - > args . instructionPtr ] <nl> - # else <nl> - # define BEGIN_OPCODE ( opcode ) case OP_ # # opcode <nl> - # define NEXT_OPCODE continue <nl> - # endif <nl> - <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - NEXT_OPCODE ; <nl> - # else <nl> - switch ( * stack . currentFrame - > args . instructionPtr ) <nl> - # endif <nl> - { <nl> - / * Non - capturing bracket : optimized * / <nl> - <nl> - BEGIN_OPCODE ( BRA ) : <nl> - NON_CAPTURING_BRACKET : <nl> - DPRINTF ( ( " start bracket 0 \ n " ) ) ; <nl> - do { <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 2 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . instructionPtr + = getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - } while ( * stack . currentFrame - > args . instructionPtr = = OP_ALT ) ; <nl> - DPRINTF ( ( " bracket 0 failed \ n " ) ) ; <nl> - RRETURN ; <nl> - <nl> - / * Skip over large extraction number data if encountered . * / <nl> - <nl> - BEGIN_OPCODE ( BRANUMBER ) : <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * End of the pattern . * / <nl> - <nl> - BEGIN_OPCODE ( END ) : <nl> - md . endMatchPtr = stack . currentFrame - > args . subjectPtr ; / * Record where we ended * / <nl> - md . endOffsetTop = stack . currentFrame - > args . offsetTop ; / * and how many extracts were taken * / <nl> - isMatch = true ; <nl> - RRETURN ; <nl> - <nl> - / * Assertion brackets . Check the alternative branches in turn - the <nl> - matching won ' t pass the KET for an assertion . If any one branch matches , <nl> - the assertion is true . Lookbehind assertions have an OP_REVERSE item at the <nl> - start of each branch to move the current point backwards , so the code at <nl> - this level is identical to the lookahead case . * / <nl> - <nl> - BEGIN_OPCODE ( ASSERT ) : <nl> - do { <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 6 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , NULL ) ; <nl> - if ( isMatch ) <nl> - break ; <nl> - stack . currentFrame - > args . instructionPtr + = getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - } while ( * stack . currentFrame - > args . instructionPtr = = OP_ALT ) ; <nl> - if ( * stack . currentFrame - > args . instructionPtr = = OP_KET ) <nl> - RRETURN_NO_MATCH ; <nl> - <nl> - / * Continue from after the assertion , updating the offsets high water <nl> - mark , since extracts may have been taken during the assertion . * / <nl> - <nl> - advanceToEndOfBracket ( stack . currentFrame - > args . instructionPtr ) ; <nl> - stack . currentFrame - > args . instructionPtr + = 1 + LINK_SIZE ; <nl> - stack . currentFrame - > args . offsetTop = md . endOffsetTop ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Negative assertion : all branches must fail to match * / <nl> - <nl> - BEGIN_OPCODE ( ASSERT_NOT ) : <nl> - do { <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 7 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , NULL ) ; <nl> - if ( isMatch ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + = getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - } while ( * stack . currentFrame - > args . instructionPtr = = OP_ALT ) ; <nl> - <nl> - stack . currentFrame - > args . instructionPtr + = 1 + LINK_SIZE ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * An alternation is the end of a branch ; scan along to find the end of the <nl> - bracketed group and go to there . * / <nl> - <nl> - BEGIN_OPCODE ( ALT ) : <nl> - advanceToEndOfBracket ( stack . currentFrame - > args . instructionPtr ) ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * BRAZERO and BRAMINZERO occur just before a bracket group , indicating <nl> - that it may occur zero times . It may repeat infinitely , or not at all - <nl> - i . e . it could be ( ) * or ( ) ? in the pattern . Brackets with fixed upper <nl> - repeat limits are compiled as a number of copies , with the optional ones <nl> - preceded by BRAZERO or BRAMINZERO . * / <nl> - <nl> - BEGIN_OPCODE ( BRAZERO ) : { <nl> - stack . currentFrame - > locals . startOfRepeatingBracket = stack . currentFrame - > args . instructionPtr + 1 ; <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 14 , stack . currentFrame - > locals . startOfRepeatingBracket , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - advanceToEndOfBracket ( stack . currentFrame - > locals . startOfRepeatingBracket ) ; <nl> - stack . currentFrame - > args . instructionPtr = stack . currentFrame - > locals . startOfRepeatingBracket + 1 + LINK_SIZE ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - BEGIN_OPCODE ( BRAMINZERO ) : { <nl> - stack . currentFrame - > locals . startOfRepeatingBracket = stack . currentFrame - > args . instructionPtr + 1 ; <nl> - advanceToEndOfBracket ( stack . currentFrame - > locals . startOfRepeatingBracket ) ; <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 15 , stack . currentFrame - > locals . startOfRepeatingBracket + 1 + LINK_SIZE , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * End of a group , repeated or non - repeating . If we are at the end of <nl> - an assertion " group " , stop matching and return 1 , but record the <nl> - current high water mark for use by positive assertions . Do this also <nl> - for the " once " ( not - backup up ) groups . * / <nl> - <nl> - BEGIN_OPCODE ( KET ) : <nl> - BEGIN_OPCODE ( KETRMIN ) : <nl> - BEGIN_OPCODE ( KETRMAX ) : <nl> - stack . currentFrame - > locals . instructionPtrAtStartOfOnce = stack . currentFrame - > args . instructionPtr - getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . bracketChain - > bracketStart ; <nl> - <nl> - / * Back up the stack of bracket start pointers . * / <nl> - <nl> - stack . currentFrame - > args . bracketChain = stack . currentFrame - > args . bracketChain - > previousBracket ; <nl> - <nl> - if ( * stack . currentFrame - > locals . instructionPtrAtStartOfOnce = = OP_ASSERT | | * stack . currentFrame - > locals . instructionPtrAtStartOfOnce = = OP_ASSERT_NOT ) { <nl> - md . endOffsetTop = stack . currentFrame - > args . offsetTop ; <nl> - isMatch = true ; <nl> - RRETURN ; <nl> - } <nl> - <nl> - / * In all other cases except a conditional group we have to check the <nl> - group number back at the start and if necessary complete handling an <nl> - extraction by setting the offsets and bumping the high water mark . * / <nl> - <nl> - stack . currentFrame - > locals . number = * stack . currentFrame - > locals . instructionPtrAtStartOfOnce - OP_BRA ; <nl> - <nl> - / * For extended extraction brackets ( large number ) , we have to fish out <nl> - the number from a dummy opcode at the start . * / <nl> - <nl> - if ( stack . currentFrame - > locals . number > EXTRACT_BASIC_MAX ) <nl> - stack . currentFrame - > locals . number = get2ByteValue ( stack . currentFrame - > locals . instructionPtrAtStartOfOnce + 2 + LINK_SIZE ) ; <nl> - stack . currentFrame - > locals . offset = stack . currentFrame - > locals . number < < 1 ; <nl> - <nl> - # ifdef DEBUG <nl> - printf ( " end bracket % d " , stack . currentFrame - > locals . number ) ; <nl> - printf ( " \ n " ) ; <nl> - # endif <nl> - <nl> - / * Test for a numbered group . This includes groups called as a result <nl> - of recursion . Note that whole - pattern recursion is coded as a recurse <nl> - into group 0 , so it won ' t be picked up here . Instead , we catch it when <nl> - the OP_END is reached . * / <nl> - <nl> - if ( stack . currentFrame - > locals . number > 0 ) { <nl> - if ( stack . currentFrame - > locals . offset > = md . offsetMax ) <nl> - md . offsetOverflow = true ; <nl> - else { <nl> - md . offsetVector [ stack . currentFrame - > locals . offset ] = <nl> - md . offsetVector [ md . offsetEnd - stack . currentFrame - > locals . number ] ; <nl> - md . offsetVector [ stack . currentFrame - > locals . offset + 1 ] = stack . currentFrame - > args . subjectPtr - md . startSubject ; <nl> - if ( stack . currentFrame - > args . offsetTop < = stack . currentFrame - > locals . offset ) <nl> - stack . currentFrame - > args . offsetTop = stack . currentFrame - > locals . offset + 2 ; <nl> - } <nl> - } <nl> - <nl> - / * For a non - repeating ket , just continue at this level . This also <nl> - happens for a repeating ket if no characters were matched in the group . <nl> - This is the forcible breaking of infinite loops as implemented in Perl <nl> - 5 . 005 . If there is an options reset , it will get obeyed in the normal <nl> - course of events . * / <nl> - <nl> - if ( * stack . currentFrame - > args . instructionPtr = = OP_KET | | stack . currentFrame - > args . subjectPtr = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) { <nl> - stack . currentFrame - > args . instructionPtr + = 1 + LINK_SIZE ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * The repeating kets try the rest of the pattern or restart from the <nl> - preceding bracket , in the appropriate order . * / <nl> - <nl> - if ( * stack . currentFrame - > args . instructionPtr = = OP_KETRMIN ) { <nl> - RECURSIVE_MATCH ( 16 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 17 , stack . currentFrame - > locals . instructionPtrAtStartOfOnce , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - } else { / * OP_KETRMAX * / <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 18 , stack . currentFrame - > locals . instructionPtrAtStartOfOnce , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - RECURSIVE_MATCH ( 19 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - } <nl> - RRETURN ; <nl> - <nl> - / * Start of subject . * / <nl> - <nl> - BEGIN_OPCODE ( CIRC ) : <nl> - if ( stack . currentFrame - > args . subjectPtr ! = md . startSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * After internal newline if multiline . * / <nl> - <nl> - BEGIN_OPCODE ( BOL ) : <nl> - if ( stack . currentFrame - > args . subjectPtr ! = md . startSubject & & ! isNewline ( stack . currentFrame - > args . subjectPtr [ - 1 ] ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * End of subject . * / <nl> - <nl> - BEGIN_OPCODE ( DOLL ) : <nl> - if ( stack . currentFrame - > args . subjectPtr < md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Before internal newline if multiline . * / <nl> - <nl> - BEGIN_OPCODE ( EOL ) : <nl> - if ( stack . currentFrame - > args . subjectPtr < md . endSubject & & ! isNewline ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Word boundary assertions * / <nl> - <nl> - BEGIN_OPCODE ( NOT_WORD_BOUNDARY ) : <nl> - BEGIN_OPCODE ( WORD_BOUNDARY ) : { <nl> - bool currentCharIsWordChar = false ; <nl> - bool previousCharIsWordChar = false ; <nl> - <nl> - if ( stack . currentFrame - > args . subjectPtr > md . startSubject ) <nl> - previousCharIsWordChar = isWordChar ( stack . currentFrame - > args . subjectPtr [ - 1 ] ) ; <nl> - if ( stack . currentFrame - > args . subjectPtr < md . endSubject ) <nl> - currentCharIsWordChar = isWordChar ( * stack . currentFrame - > args . subjectPtr ) ; <nl> - <nl> - / * Now see if the situation is what we want * / <nl> - bool wordBoundaryDesired = ( * stack . currentFrame - > args . instructionPtr + + = = OP_WORD_BOUNDARY ) ; <nl> - if ( wordBoundaryDesired ? currentCharIsWordChar = = previousCharIsWordChar : currentCharIsWordChar ! = previousCharIsWordChar ) <nl> - RRETURN_NO_MATCH ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * Match a single character type ; inline for speed * / <nl> - <nl> - BEGIN_OPCODE ( NOT_NEWLINE ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( isNewline ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( NOT_DIGIT ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( isASCIIDigit ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( DIGIT ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( ! isASCIIDigit ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( NOT_WHITESPACE ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( isSpaceChar ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( WHITESPACE ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( ! isSpaceChar ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( NOT_WORDCHAR ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( isWordChar ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - BEGIN_OPCODE ( WORDCHAR ) : <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( ! isWordChar ( * stack . currentFrame - > args . subjectPtr + + ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Match a back reference , possibly repeatedly . Look past the end of the <nl> - item to see if there is repeat information following . The code is similar <nl> - to that for character classes , but repeated for efficiency . Then obey <nl> - similar code to character type repeats - written out again for speed . <nl> - However , if the referenced string is the empty string , always treat <nl> - it as matched , any number of times ( otherwise there could be infinite <nl> - loops ) . * / <nl> - <nl> - BEGIN_OPCODE ( REF ) : <nl> - stack . currentFrame - > locals . offset = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) < < 1 ; / * Doubled ref number * / <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; / * Advance past item * / <nl> - <nl> - / * If the reference is unset , set the length to be longer than the amount <nl> - of subject left ; this ensures that every attempt at a match fails . We <nl> - can ' t just fail here , because of the possibility of quantifiers with zero <nl> - minima . * / <nl> - <nl> - if ( stack . currentFrame - > locals . offset > = stack . currentFrame - > args . offsetTop | | md . offsetVector [ stack . currentFrame - > locals . offset ] < 0 ) <nl> - stack . currentFrame - > locals . length = 0 ; <nl> - else <nl> - stack . currentFrame - > locals . length = md . offsetVector [ stack . currentFrame - > locals . offset + 1 ] - md . offsetVector [ stack . currentFrame - > locals . offset ] ; <nl> - <nl> - / * Set up for repetition , or handle the non - repeated case * / <nl> - <nl> - switch ( * stack . currentFrame - > args . instructionPtr ) { <nl> - case OP_CRSTAR : <nl> - case OP_CRMINSTAR : <nl> - case OP_CRPLUS : <nl> - case OP_CRMINPLUS : <nl> - case OP_CRQUERY : <nl> - case OP_CRMINQUERY : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_CRSTAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - break ; <nl> - <nl> - case OP_CRRANGE : <nl> - case OP_CRMINRANGE : <nl> - minimize = ( * stack . currentFrame - > args . instructionPtr = = OP_CRMINRANGE ) ; <nl> - min = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 3 ) ; <nl> - if ( stack . currentFrame - > locals . max = = 0 ) <nl> - stack . currentFrame - > locals . max = INT_MAX ; <nl> - stack . currentFrame - > args . instructionPtr + = 5 ; <nl> - break ; <nl> - <nl> - default : / * No repeat follows * / <nl> - if ( ! matchRef ( stack . currentFrame - > locals . offset , stack . currentFrame - > args . subjectPtr , stack . currentFrame - > locals . length , md ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . subjectPtr + = stack . currentFrame - > locals . length ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * If the length of the reference is zero , just continue with the <nl> - main loop . * / <nl> - <nl> - if ( stack . currentFrame - > locals . length = = 0 ) <nl> - NEXT_OPCODE ; <nl> - <nl> - / * First , ensure the minimum number of matches are present . * / <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( ! matchRef ( stack . currentFrame - > locals . offset , stack . currentFrame - > args . subjectPtr , stack . currentFrame - > locals . length , md ) ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . subjectPtr + = stack . currentFrame - > locals . length ; <nl> - } <nl> - <nl> - / * If min = max , continue at the same level without recursion . <nl> - They are not both allowed to be zero . * / <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - / * If minimizing , keep trying and advancing the pointer * / <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 20 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | ! matchRef ( stack . currentFrame - > locals . offset , stack . currentFrame - > args . subjectPtr , stack . currentFrame - > locals . length , md ) ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . subjectPtr + = stack . currentFrame - > locals . length ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * If maximizing , find the longest string and work backwards * / <nl> - <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( ! matchRef ( stack . currentFrame - > locals . offset , stack . currentFrame - > args . subjectPtr , stack . currentFrame - > locals . length , md ) ) <nl> - break ; <nl> - stack . currentFrame - > args . subjectPtr + = stack . currentFrame - > locals . length ; <nl> - } <nl> - while ( stack . currentFrame - > args . subjectPtr > = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) { <nl> - RECURSIVE_MATCH ( 21 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . subjectPtr - = stack . currentFrame - > locals . length ; <nl> - } <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - / * Control never reaches here * / <nl> - <nl> - / * Match a bit - mapped character class , possibly repeatedly . This op code is <nl> - used when all the characters in the class have values in the range 0 - 255 , <nl> - and either the matching is caseful , or the characters are in the range <nl> - 0 - 127 when UTF - 8 processing is enabled . The only difference between <nl> - OP_CLASS and OP_NCLASS occurs when a data character outside the range is <nl> - encountered . <nl> - <nl> - First , look past the end of the item to see if there is repeat information <nl> - following . Then obey similar code to character type repeats - written out <nl> - again for speed . * / <nl> - <nl> - BEGIN_OPCODE ( NCLASS ) : <nl> - BEGIN_OPCODE ( CLASS ) : <nl> - stack . currentFrame - > locals . data = stack . currentFrame - > args . instructionPtr + 1 ; / * Save for matching * / <nl> - stack . currentFrame - > args . instructionPtr + = 33 ; / * Advance past the item * / <nl> - <nl> - switch ( * stack . currentFrame - > args . instructionPtr ) { <nl> - case OP_CRSTAR : <nl> - case OP_CRMINSTAR : <nl> - case OP_CRPLUS : <nl> - case OP_CRMINPLUS : <nl> - case OP_CRQUERY : <nl> - case OP_CRMINQUERY : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_CRSTAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - break ; <nl> - <nl> - case OP_CRRANGE : <nl> - case OP_CRMINRANGE : <nl> - minimize = ( * stack . currentFrame - > args . instructionPtr = = OP_CRMINRANGE ) ; <nl> - min = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 3 ) ; <nl> - if ( stack . currentFrame - > locals . max = = 0 ) <nl> - stack . currentFrame - > locals . max = INT_MAX ; <nl> - stack . currentFrame - > args . instructionPtr + = 5 ; <nl> - break ; <nl> - <nl> - default : / * No repeat follows * / <nl> - min = stack . currentFrame - > locals . max = 1 ; <nl> - break ; <nl> - } <nl> - <nl> - / * First , ensure the minimum number of matches are present . * / <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( c > 255 ) { <nl> - if ( stack . currentFrame - > locals . data [ - 1 ] = = OP_CLASS ) <nl> - RRETURN_NO_MATCH ; <nl> - } else { <nl> - if ( ! ( stack . currentFrame - > locals . data [ c / 8 ] & ( 1 < < ( c & 7 ) ) ) ) <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - } <nl> - <nl> - / * If max = = min we can continue with the main loop without the <nl> - need to recurse . * / <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - / * If minimizing , keep testing the rest of the expression and advancing <nl> - the pointer while it matches the class . * / <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 22 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN ; <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( c > 255 ) { <nl> - if ( stack . currentFrame - > locals . data [ - 1 ] = = OP_CLASS ) <nl> - RRETURN ; <nl> - } else { <nl> - if ( ( stack . currentFrame - > locals . data [ c / 8 ] & ( 1 < < ( c & 7 ) ) ) = = 0 ) <nl> - RRETURN ; <nl> - } <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - / * If maximizing , find the longest possible run , then work backwards . * / <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( c > 255 ) { <nl> - if ( stack . currentFrame - > locals . data [ - 1 ] = = OP_CLASS ) <nl> - break ; <nl> - } else { <nl> - if ( ! ( stack . currentFrame - > locals . data [ c / 8 ] & ( 1 < < ( c & 7 ) ) ) ) <nl> - break ; <nl> - } <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - for ( ; ; ) { <nl> - RECURSIVE_MATCH ( 24 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > args . subjectPtr - - = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) <nl> - break ; / * Stop if tried at original pos * / <nl> - } <nl> - <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - <nl> - / * Match an extended character class . * / <nl> - <nl> - BEGIN_OPCODE ( XCLASS ) : <nl> - stack . currentFrame - > locals . data = stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE ; / * Save for matching * / <nl> - stack . currentFrame - > args . instructionPtr + = getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; / * Advance past the item * / <nl> - <nl> - switch ( * stack . currentFrame - > args . instructionPtr ) { <nl> - case OP_CRSTAR : <nl> - case OP_CRMINSTAR : <nl> - case OP_CRPLUS : <nl> - case OP_CRMINPLUS : <nl> - case OP_CRQUERY : <nl> - case OP_CRMINQUERY : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_CRSTAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - break ; <nl> - <nl> - case OP_CRRANGE : <nl> - case OP_CRMINRANGE : <nl> - minimize = ( * stack . currentFrame - > args . instructionPtr = = OP_CRMINRANGE ) ; <nl> - min = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 3 ) ; <nl> - if ( stack . currentFrame - > locals . max = = 0 ) <nl> - stack . currentFrame - > locals . max = INT_MAX ; <nl> - stack . currentFrame - > args . instructionPtr + = 5 ; <nl> - break ; <nl> - <nl> - default : / * No repeat follows * / <nl> - min = stack . currentFrame - > locals . max = 1 ; <nl> - } <nl> - <nl> - / * First , ensure the minimum number of matches are present . * / <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( ! kjs_pcre_xclass ( c , stack . currentFrame - > locals . data ) ) <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - <nl> - / * If max = = min we can continue with the main loop without the <nl> - need to recurse . * / <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - / * If minimizing , keep testing the rest of the expression and advancing <nl> - the pointer while it matches the class . * / <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 26 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN ; <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( ! kjs_pcre_xclass ( c , stack . currentFrame - > locals . data ) ) <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * If maximizing , find the longest possible run , then work backwards . * / <nl> - <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( ! kjs_pcre_xclass ( c , stack . currentFrame - > locals . data ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - for ( ; ; ) { <nl> - RECURSIVE_MATCH ( 27 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > args . subjectPtr - - = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) <nl> - break ; / * Stop if tried at original pos * / <nl> - } <nl> - RRETURN ; <nl> - } <nl> - <nl> - / * Control never reaches here * / <nl> - <nl> - / * Match a single character , casefully * / <nl> - <nl> - BEGIN_OPCODE ( CHAR ) : <nl> - stack . currentFrame - > locals . length = 1 ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - getUTF8CharAndIncrementLength ( stack . currentFrame - > locals . fc , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > locals . length ) ; <nl> - stack . currentFrame - > args . instructionPtr + = stack . currentFrame - > locals . length ; <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( stack . currentFrame - > locals . fc ! = * stack . currentFrame - > args . subjectPtr + + ) <nl> - RRETURN_NO_MATCH ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Match a single character , caselessly * / <nl> - <nl> - BEGIN_OPCODE ( CHAR_IGNORING_CASE ) : { <nl> - stack . currentFrame - > locals . length = 1 ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - getUTF8CharAndIncrementLength ( stack . currentFrame - > locals . fc , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > locals . length ) ; <nl> - stack . currentFrame - > args . instructionPtr + = stack . currentFrame - > locals . length ; <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - int dc = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( stack . currentFrame - > locals . fc ! = dc & & kjs_pcre_ucp_othercase ( stack . currentFrame - > locals . fc ) ! = dc ) <nl> - RRETURN_NO_MATCH ; <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * Match a single ASCII character . * / <nl> - <nl> - BEGIN_OPCODE ( ASCII_CHAR ) : <nl> - if ( md . endSubject = = stack . currentFrame - > args . subjectPtr ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > args . instructionPtr [ 1 ] ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - stack . currentFrame - > args . instructionPtr + = 2 ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Match one of two cases of an ASCII letter . * / <nl> - <nl> - BEGIN_OPCODE ( ASCII_LETTER_IGNORING_CASE ) : <nl> - if ( md . endSubject = = stack . currentFrame - > args . subjectPtr ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( ( * stack . currentFrame - > args . subjectPtr | 0x20 ) ! = stack . currentFrame - > args . instructionPtr [ 1 ] ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - stack . currentFrame - > args . instructionPtr + = 2 ; <nl> - NEXT_OPCODE ; <nl> - <nl> - / * Match a single character repeatedly ; different opcodes share code . * / <nl> - <nl> - BEGIN_OPCODE ( EXACT ) : <nl> - min = stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = false ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATCHAR ; <nl> - <nl> - BEGIN_OPCODE ( UPTO ) : <nl> - BEGIN_OPCODE ( MINUPTO ) : <nl> - min = 0 ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = * stack . currentFrame - > args . instructionPtr = = OP_MINUPTO ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATCHAR ; <nl> - <nl> - BEGIN_OPCODE ( STAR ) : <nl> - BEGIN_OPCODE ( MINSTAR ) : <nl> - BEGIN_OPCODE ( PLUS ) : <nl> - BEGIN_OPCODE ( MINPLUS ) : <nl> - BEGIN_OPCODE ( QUERY ) : <nl> - BEGIN_OPCODE ( MINQUERY ) : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_STAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - <nl> - / * Common code for all repeated single - character matches . We can give <nl> - up quickly if there are fewer than the minimum number of characters left in <nl> - the subject . * / <nl> - <nl> - REPEATCHAR : <nl> - <nl> - stack . currentFrame - > locals . length = 1 ; <nl> - getUTF8CharAndIncrementLength ( stack . currentFrame - > locals . fc , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > locals . length ) ; <nl> - if ( min * ( stack . currentFrame - > locals . fc > 0xFFFF ? 2 : 1 ) > md . endSubject - stack . currentFrame - > args . subjectPtr ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + = stack . currentFrame - > locals . length ; <nl> - <nl> - if ( stack . currentFrame - > locals . fc < = 0xFFFF ) { <nl> - int othercase = md . ignoreCase ? kjs_pcre_ucp_othercase ( stack . currentFrame - > locals . fc ) : - 1 ; <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc & & * stack . currentFrame - > args . subjectPtr ! = othercase ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - if ( minimize ) { <nl> - stack . currentFrame - > locals . repeatOthercase = othercase ; <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 28 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN ; <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc & & * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . repeatOthercase ) <nl> - RRETURN ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc & & * stack . currentFrame - > args . subjectPtr ! = othercase ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - while ( stack . currentFrame - > args . subjectPtr > = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) { <nl> - RECURSIVE_MATCH ( 29 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - - - stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } else { <nl> - / * No case on surrogate pairs , so no need to bother with " othercase " . * / <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . subjectPtr + = 2 ; <nl> - } <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 30 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN ; <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . subjectPtr + = 2 ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > md . endSubject - 2 ) <nl> - break ; <nl> - if ( * stack . currentFrame - > args . subjectPtr ! = stack . currentFrame - > locals . fc ) <nl> - break ; <nl> - stack . currentFrame - > args . subjectPtr + = 2 ; <nl> - } <nl> - while ( stack . currentFrame - > args . subjectPtr > = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) { <nl> - RECURSIVE_MATCH ( 31 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . subjectPtr - = 2 ; <nl> - } <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - / * Control never reaches here * / <nl> - <nl> - / * Match a negated single one - byte character . * / <nl> - <nl> - BEGIN_OPCODE ( NOT ) : { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > args . instructionPtr + + ; <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( md . ignoreCase ) { <nl> - if ( c < 128 ) <nl> - c = toLowerCase ( c ) ; <nl> - if ( toLowerCase ( * stack . currentFrame - > args . instructionPtr + + ) = = c ) <nl> - RRETURN_NO_MATCH ; <nl> - } else { <nl> - if ( * stack . currentFrame - > args . instructionPtr + + = = c ) <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - NEXT_OPCODE ; <nl> - } <nl> - <nl> - / * Match a negated single one - byte character repeatedly . This is almost a <nl> - repeat of the code for a repeated single character , but I haven ' t found a <nl> - nice way of commoning these up that doesn ' t require a test of the <nl> - positive / negative option for each character match . Maybe that wouldn ' t add <nl> - very much to the time taken , but character matching * is * what this is all <nl> - about . . . * / <nl> - <nl> - BEGIN_OPCODE ( NOTEXACT ) : <nl> - min = stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = false ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATNOTCHAR ; <nl> - <nl> - BEGIN_OPCODE ( NOTUPTO ) : <nl> - BEGIN_OPCODE ( NOTMINUPTO ) : <nl> - min = 0 ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = * stack . currentFrame - > args . instructionPtr = = OP_NOTMINUPTO ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATNOTCHAR ; <nl> - <nl> - BEGIN_OPCODE ( NOTSTAR ) : <nl> - BEGIN_OPCODE ( NOTMINSTAR ) : <nl> - BEGIN_OPCODE ( NOTPLUS ) : <nl> - BEGIN_OPCODE ( NOTMINPLUS ) : <nl> - BEGIN_OPCODE ( NOTQUERY ) : <nl> - BEGIN_OPCODE ( NOTMINQUERY ) : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_NOTSTAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - <nl> - / * Common code for all repeated single - byte matches . We can give up quickly <nl> - if there are fewer than the minimum number of bytes left in the <nl> - subject . * / <nl> - <nl> - REPEATNOTCHAR : <nl> - if ( min > md . endSubject - stack . currentFrame - > args . subjectPtr ) <nl> - RRETURN_NO_MATCH ; <nl> - stack . currentFrame - > locals . fc = * stack . currentFrame - > args . instructionPtr + + ; <nl> - <nl> - / * The code is duplicated for the caseless and caseful cases , for speed , <nl> - since matching characters is likely to be quite common . First , ensure the <nl> - minimum number of matches are present . If min = max , continue at the same <nl> - level without recursing . Otherwise , if minimizing , keep trying the rest of <nl> - the expression and advancing one matching character if failing , up to the <nl> - maximum . Alternatively , if maximizing , find the maximum number of <nl> - characters and work backwards . * / <nl> - <nl> - DPRINTF ( ( " negative matching % c { % d , % d } \ n " , stack . currentFrame - > locals . fc , min , stack . currentFrame - > locals . max ) ) ; <nl> - <nl> - if ( md . ignoreCase ) { <nl> - if ( stack . currentFrame - > locals . fc < 128 ) <nl> - stack . currentFrame - > locals . fc = toLowerCase ( stack . currentFrame - > locals . fc ) ; <nl> - <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - int d = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( d < 128 ) <nl> - d = toLowerCase ( d ) ; <nl> - if ( stack . currentFrame - > locals . fc = = d ) <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 38 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - int d = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( d < 128 ) <nl> - d = toLowerCase ( d ) ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject | | stack . currentFrame - > locals . fc = = d ) <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * Maximize case * / <nl> - <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int d = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( d < 128 ) <nl> - d = toLowerCase ( d ) ; <nl> - if ( stack . currentFrame - > locals . fc = = d ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - for ( ; ; ) { <nl> - RECURSIVE_MATCH ( 40 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > args . subjectPtr - - = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) <nl> - break ; / * Stop if tried at original pos * / <nl> - } <nl> - <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * Caseful comparisons * / <nl> - <nl> - else { <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - int d = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( stack . currentFrame - > locals . fc = = d ) <nl> - RRETURN_NO_MATCH ; <nl> - } <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 42 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - int d = * stack . currentFrame - > args . subjectPtr + + ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject | | stack . currentFrame - > locals . fc = = d ) <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * Maximize case * / <nl> - <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; <nl> - <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int d = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( stack . currentFrame - > locals . fc = = d ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - for ( ; ; ) { <nl> - RECURSIVE_MATCH ( 44 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > args . subjectPtr - - = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) <nl> - break ; / * Stop if tried at original pos * / <nl> - } <nl> - <nl> - RRETURN ; <nl> - } <nl> - } <nl> - / * Control never reaches here * / <nl> - <nl> - / * Match a single character type repeatedly ; several different opcodes <nl> - share code . This is very similar to the code for single characters , but we <nl> - repeat it in the interests of efficiency . * / <nl> - <nl> - BEGIN_OPCODE ( TYPEEXACT ) : <nl> - min = stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = true ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATTYPE ; <nl> - <nl> - BEGIN_OPCODE ( TYPEUPTO ) : <nl> - BEGIN_OPCODE ( TYPEMINUPTO ) : <nl> - min = 0 ; <nl> - stack . currentFrame - > locals . max = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - minimize = * stack . currentFrame - > args . instructionPtr = = OP_TYPEMINUPTO ; <nl> - stack . currentFrame - > args . instructionPtr + = 3 ; <nl> - goto REPEATTYPE ; <nl> - <nl> - BEGIN_OPCODE ( TYPESTAR ) : <nl> - BEGIN_OPCODE ( TYPEMINSTAR ) : <nl> - BEGIN_OPCODE ( TYPEPLUS ) : <nl> - BEGIN_OPCODE ( TYPEMINPLUS ) : <nl> - BEGIN_OPCODE ( TYPEQUERY ) : <nl> - BEGIN_OPCODE ( TYPEMINQUERY ) : <nl> - repeatInformationFromInstructionOffset ( * stack . currentFrame - > args . instructionPtr + + - OP_TYPESTAR , minimize , min , stack . currentFrame - > locals . max ) ; <nl> - <nl> - / * Common code for all repeated single character type matches . Note that <nl> - in UTF - 8 mode , ' . ' matches a character of any length , but for the other <nl> - character types , the valid characters are all one - byte long . * / <nl> - <nl> - REPEATTYPE : <nl> - stack . currentFrame - > locals . ctype = * stack . currentFrame - > args . instructionPtr + + ; / * Code for the character type * / <nl> - <nl> - / * First , ensure the minimum number of matches are present . Use inline <nl> - code for maximizing the speed , and do the type test once at the start <nl> - ( i . e . keep it out of the loop ) . Also we can test that there are at least <nl> - the minimum number of characters before we start . * / <nl> - <nl> - if ( min > md . endSubject - stack . currentFrame - > args . subjectPtr ) <nl> - RRETURN_NO_MATCH ; <nl> - if ( min > 0 ) { <nl> - switch ( stack . currentFrame - > locals . ctype ) { <nl> - case OP_NOT_NEWLINE : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( isNewline ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_DIGIT : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( isASCIIDigit ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_DIGIT : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( ! isASCIIDigit ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_WHITESPACE : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( isSpaceChar ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_WHITESPACE : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( ! isSpaceChar ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_WORDCHAR : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( isWordChar ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_WORDCHAR : <nl> - for ( int i = 1 ; i < = min ; i + + ) { <nl> - if ( ! isWordChar ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - RRETURN_NO_MATCH ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - default : <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - return matchError ( JSRegExpErrorInternal , stack ) ; <nl> - } / * End switch ( stack . currentFrame - > locals . ctype ) * / <nl> - } <nl> - <nl> - / * If min = max , continue at the same level without recursing * / <nl> - <nl> - if ( min = = stack . currentFrame - > locals . max ) <nl> - NEXT_OPCODE ; <nl> - <nl> - / * If minimizing , we have to test the rest of the pattern before each <nl> - subsequent match . * / <nl> - <nl> - if ( minimize ) { <nl> - for ( stack . currentFrame - > locals . fi = min ; ; stack . currentFrame - > locals . fi + + ) { <nl> - RECURSIVE_MATCH ( 48 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > locals . fi > = stack . currentFrame - > locals . max | | stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - RRETURN ; <nl> - <nl> - int c = * stack . currentFrame - > args . subjectPtr + + ; <nl> - switch ( stack . currentFrame - > locals . ctype ) { <nl> - case OP_NOT_NEWLINE : <nl> - if ( isNewline ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_NOT_DIGIT : <nl> - if ( isASCIIDigit ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_DIGIT : <nl> - if ( ! isASCIIDigit ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_NOT_WHITESPACE : <nl> - if ( isSpaceChar ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_WHITESPACE : <nl> - if ( ! isSpaceChar ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_NOT_WORDCHAR : <nl> - if ( isWordChar ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - case OP_WORDCHAR : <nl> - if ( ! isWordChar ( c ) ) <nl> - RRETURN ; <nl> - break ; <nl> - <nl> - default : <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - return matchError ( JSRegExpErrorInternal , stack ) ; <nl> - } <nl> - } <nl> - / * Control never reaches here * / <nl> - } <nl> - <nl> - / * If maximizing it is worth using inline code for speed , doing the type <nl> - test once at the start ( i . e . keep it out of the loop ) . * / <nl> - <nl> - else { <nl> - stack . currentFrame - > locals . subjectPtrAtStartOfInstruction = stack . currentFrame - > args . subjectPtr ; / * Remember where we started * / <nl> - <nl> - switch ( stack . currentFrame - > locals . ctype ) { <nl> - case OP_NOT_NEWLINE : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject | | isNewline ( * stack . currentFrame - > args . subjectPtr ) ) <nl> - break ; <nl> - stack . currentFrame - > args . subjectPtr + + ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_DIGIT : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( isASCIIDigit ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_DIGIT : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( ! isASCIIDigit ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_WHITESPACE : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( isSpaceChar ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_WHITESPACE : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( ! isSpaceChar ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_NOT_WORDCHAR : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( isWordChar ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - case OP_WORDCHAR : <nl> - for ( int i = min ; i < stack . currentFrame - > locals . max ; i + + ) { <nl> - if ( stack . currentFrame - > args . subjectPtr > = md . endSubject ) <nl> - break ; <nl> - int c = * stack . currentFrame - > args . subjectPtr ; <nl> - if ( ! isWordChar ( c ) ) <nl> - break ; <nl> - + + stack . currentFrame - > args . subjectPtr ; <nl> - } <nl> - break ; <nl> - <nl> - default : <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - return matchError ( JSRegExpErrorInternal , stack ) ; <nl> - } <nl> - <nl> - / * stack . currentFrame - > args . subjectPtr is now past the end of the maximum run * / <nl> - <nl> - for ( ; ; ) { <nl> - RECURSIVE_MATCH ( 52 , stack . currentFrame - > args . instructionPtr , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - if ( stack . currentFrame - > args . subjectPtr - - = = stack . currentFrame - > locals . subjectPtrAtStartOfInstruction ) <nl> - break ; / * Stop if tried at original pos * / <nl> - } <nl> - <nl> - / * Get here if we can ' t make it match with any permitted repetitions * / <nl> - <nl> - RRETURN ; <nl> - } <nl> - / * Control never reaches here * / <nl> - <nl> - BEGIN_OPCODE ( CRMINPLUS ) : <nl> - BEGIN_OPCODE ( CRMINQUERY ) : <nl> - BEGIN_OPCODE ( CRMINRANGE ) : <nl> - BEGIN_OPCODE ( CRMINSTAR ) : <nl> - BEGIN_OPCODE ( CRPLUS ) : <nl> - BEGIN_OPCODE ( CRQUERY ) : <nl> - BEGIN_OPCODE ( CRRANGE ) : <nl> - BEGIN_OPCODE ( CRSTAR ) : <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - return matchError ( JSRegExpErrorInternal , stack ) ; <nl> - <nl> - # ifdef USE_COMPUTED_GOTO_FOR_MATCH_OPCODE_LOOP <nl> - CAPTURING_BRACKET : <nl> - # else <nl> - default : <nl> - # endif <nl> - / * Opening capturing bracket . If there is space in the offset vector , save <nl> - the current subject position in the working slot at the top of the vector . We <nl> - mustn ' t change the current values of the data slot , because they may be set <nl> - from a previous iteration of this group , and be referred to by a reference <nl> - inside the group . <nl> - <nl> - If the bracket fails to match , we need to restore this value and also the <nl> - values of the final offsets , in case they were set by a previous iteration of <nl> - the same bracket . <nl> - <nl> - If there isn ' t enough space in the offset vector , treat this as if it were a <nl> - non - capturing bracket . Don ' t worry about setting the flag for the error case <nl> - here ; that is handled in the code for KET . * / <nl> - <nl> - ASSERT ( * stack . currentFrame - > args . instructionPtr > OP_BRA ) ; <nl> - <nl> - stack . currentFrame - > locals . number = * stack . currentFrame - > args . instructionPtr - OP_BRA ; <nl> - <nl> - / * For extended extraction brackets ( large number ) , we have to fish out the <nl> - number from a dummy opcode at the start . * / <nl> - <nl> - if ( stack . currentFrame - > locals . number > EXTRACT_BASIC_MAX ) <nl> - stack . currentFrame - > locals . number = get2ByteValue ( stack . currentFrame - > args . instructionPtr + 2 + LINK_SIZE ) ; <nl> - stack . currentFrame - > locals . offset = stack . currentFrame - > locals . number < < 1 ; <nl> - <nl> - # ifdef DEBUG <nl> - printf ( " start bracket % d subject = " , stack . currentFrame - > locals . number ) ; <nl> - pchars ( stack . currentFrame - > args . subjectPtr , 16 , true , md ) ; <nl> - printf ( " \ n " ) ; <nl> - # endif <nl> - <nl> - if ( stack . currentFrame - > locals . offset < md . offsetMax ) { <nl> - stack . currentFrame - > locals . saveOffset1 = md . offsetVector [ stack . currentFrame - > locals . offset ] ; <nl> - stack . currentFrame - > locals . saveOffset2 = md . offsetVector [ stack . currentFrame - > locals . offset + 1 ] ; <nl> - stack . currentFrame - > locals . saveOffset3 = md . offsetVector [ md . offsetEnd - stack . currentFrame - > locals . number ] ; <nl> - <nl> - DPRINTF ( ( " saving % d % d % d \ n " , stack . currentFrame - > locals . saveOffset1 , stack . currentFrame - > locals . saveOffset2 , stack . currentFrame - > locals . saveOffset3 ) ) ; <nl> - md . offsetVector [ md . offsetEnd - stack . currentFrame - > locals . number ] = stack . currentFrame - > args . subjectPtr - md . startSubject ; <nl> - <nl> - do { <nl> - RECURSIVE_MATCH_STARTNG_NEW_GROUP ( 1 , stack . currentFrame - > args . instructionPtr + 1 + LINK_SIZE , stack . currentFrame - > args . bracketChain ) ; <nl> - if ( isMatch ) <nl> - RRETURN ; <nl> - stack . currentFrame - > args . instructionPtr + = getLinkValue ( stack . currentFrame - > args . instructionPtr + 1 ) ; <nl> - } while ( * stack . currentFrame - > args . instructionPtr = = OP_ALT ) ; <nl> - <nl> - DPRINTF ( ( " bracket % d failed \ n " , stack . currentFrame - > locals . number ) ) ; <nl> - <nl> - md . offsetVector [ stack . currentFrame - > locals . offset ] = stack . currentFrame - > locals . saveOffset1 ; <nl> - md . offsetVector [ stack . currentFrame - > locals . offset + 1 ] = stack . currentFrame - > locals . saveOffset2 ; <nl> - md . offsetVector [ md . offsetEnd - stack . currentFrame - > locals . number ] = stack . currentFrame - > locals . saveOffset3 ; <nl> - <nl> - RRETURN ; <nl> - } <nl> - <nl> - / * Insufficient room for saving captured contents * / <nl> - <nl> - goto NON_CAPTURING_BRACKET ; <nl> - } <nl> - <nl> - / * Do not stick any code in here without much thought ; it is assumed <nl> - that " continue " in the code above comes out to here to repeat the main <nl> - loop . * / <nl> - <nl> - } / * End of main loop * / <nl> - <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - <nl> - # ifndef USE_COMPUTED_GOTO_FOR_MATCH_RECURSION <nl> - <nl> - RRETURN_SWITCH : <nl> - switch ( stack . currentFrame - > returnLocation ) { <nl> - case 0 : goto RETURN ; <nl> - case 1 : goto RRETURN_1 ; <nl> - case 2 : goto RRETURN_2 ; <nl> - case 6 : goto RRETURN_6 ; <nl> - case 7 : goto RRETURN_7 ; <nl> - case 14 : goto RRETURN_14 ; <nl> - case 15 : goto RRETURN_15 ; <nl> - case 16 : goto RRETURN_16 ; <nl> - case 17 : goto RRETURN_17 ; <nl> - case 18 : goto RRETURN_18 ; <nl> - case 19 : goto RRETURN_19 ; <nl> - case 20 : goto RRETURN_20 ; <nl> - case 21 : goto RRETURN_21 ; <nl> - case 22 : goto RRETURN_22 ; <nl> - case 24 : goto RRETURN_24 ; <nl> - case 26 : goto RRETURN_26 ; <nl> - case 27 : goto RRETURN_27 ; <nl> - case 28 : goto RRETURN_28 ; <nl> - case 29 : goto RRETURN_29 ; <nl> - case 30 : goto RRETURN_30 ; <nl> - case 31 : goto RRETURN_31 ; <nl> - case 38 : goto RRETURN_38 ; <nl> - case 40 : goto RRETURN_40 ; <nl> - case 42 : goto RRETURN_42 ; <nl> - case 44 : goto RRETURN_44 ; <nl> - case 48 : goto RRETURN_48 ; <nl> - case 52 : goto RRETURN_52 ; <nl> - } <nl> - <nl> - ASSERT_NOT_REACHED ( ) ; <nl> - return matchError ( JSRegExpErrorInternal , stack ) ; <nl> - <nl> - # endif <nl> - <nl> - RETURN : <nl> - return isMatch ; <nl> - } <nl> - <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Execute a Regular Expression * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function applies a compiled re to a subject string and picks out <nl> - portions of the string if it matches . Two elements in the vector are set for <nl> - each substring : the offsets to the start and end of the substring . <nl> - <nl> - Arguments : <nl> - re points to the compiled expression <nl> - extra_data points to extra data or is NULL <nl> - subject points to the subject string <nl> - length length of subject string ( may contain binary zeros ) <nl> - start_offset where to start in the subject string <nl> - options option bits <nl> - offsets points to a vector of ints to be filled in with offsets <nl> - offsetcount the number of elements in the vector <nl> - <nl> - Returns : > 0 = > success ; value is the number of elements filled in <nl> - = 0 = > success , but offsets is not big enough <nl> - - 1 = > failed to match <nl> - < - 1 = > some kind of unexpected problem <nl> - * / <nl> - <nl> - static void tryFirstByteOptimization ( const UChar * & subjectPtr , const UChar * endSubject , int first_byte , bool first_byte_caseless , bool useMultiLineFirstCharOptimization , const UChar * originalSubjectStart ) <nl> - { <nl> - / / If first_byte is set , try scanning to the first instance of that byte <nl> - / / no need to try and match against any earlier part of the subject string . <nl> - if ( first_byte > = 0 ) { <nl> - UChar first_char = first_byte ; <nl> - if ( first_byte_caseless ) <nl> - while ( subjectPtr < endSubject ) { <nl> - int c = * subjectPtr ; <nl> - if ( c > 127 ) <nl> - break ; <nl> - if ( toLowerCase ( c ) = = first_char ) <nl> - break ; <nl> - subjectPtr + + ; <nl> - } <nl> - else { <nl> - while ( subjectPtr < endSubject & & * subjectPtr ! = first_char ) <nl> - subjectPtr + + ; <nl> - } <nl> - } else if ( useMultiLineFirstCharOptimization ) { <nl> - / * Or to just after \ n for a multiline match if possible * / <nl> - / / I ' m not sure why this ! = originalSubjectStart check is necessary - - ecs 11 / 18 / 07 <nl> - if ( subjectPtr > originalSubjectStart ) { <nl> - while ( subjectPtr < endSubject & & ! isNewline ( subjectPtr [ - 1 ] ) ) <nl> - subjectPtr + + ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - static bool tryRequiredByteOptimization ( const UChar * & subjectPtr , const UChar * endSubject , int req_byte , int req_byte2 , bool req_byte_caseless , bool hasFirstByte , const UChar * & reqBytePtr ) <nl> - { <nl> - / * If req_byte is set , we know that that character must appear in the subject <nl> - for the match to succeed . If the first character is set , req_byte must be <nl> - later in the subject ; otherwise the test starts at the match point . This <nl> - optimization can save a huge amount of backtracking in patterns with nested <nl> - unlimited repeats that aren ' t going to match . Writing separate code for <nl> - cased / caseless versions makes it go faster , as does using an autoincrement <nl> - and backing off on a match . <nl> - <nl> - HOWEVER : when the subject string is very , very long , searching to its end can <nl> - take a long time , and give bad performance on quite ordinary patterns . This <nl> - showed up when somebody was matching / ^ C / on a 32 - megabyte string . . . so we <nl> - don ' t do this when the string is sufficiently long . <nl> - * / <nl> - <nl> - if ( req_byte > = 0 & & endSubject - subjectPtr < REQ_BYTE_MAX ) { <nl> - const UChar * p = subjectPtr + ( hasFirstByte ? 1 : 0 ) ; <nl> - <nl> - / * We don ' t need to repeat the search if we haven ' t yet reached the <nl> - place we found it at last time . * / <nl> - <nl> - if ( p > reqBytePtr ) { <nl> - if ( req_byte_caseless ) { <nl> - while ( p < endSubject ) { <nl> - int pp = * p + + ; <nl> - if ( pp = = req_byte | | pp = = req_byte2 ) { <nl> - p - - ; <nl> - break ; <nl> - } <nl> - } <nl> - } else { <nl> - while ( p < endSubject ) { <nl> - if ( * p + + = = req_byte ) { <nl> - p - - ; <nl> - break ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / * If we can ' t find the required character , break the matching loop * / <nl> - <nl> - if ( p > = endSubject ) <nl> - return true ; <nl> - <nl> - / * If we have found the required character , save the point where we <nl> - found it , so that we don ' t search again next time round the loop if <nl> - the start hasn ' t passed this character yet . * / <nl> - <nl> - reqBytePtr = p ; <nl> - } <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> - int jsRegExpExecute ( const JSRegExp * re , <nl> - const UChar * subject , int length , int start_offset , int * offsets , <nl> - int offsetcount ) <nl> - { <nl> - ASSERT ( re ) ; <nl> - ASSERT ( subject ) ; <nl> - ASSERT ( offsetcount > = 0 ) ; <nl> - ASSERT ( offsets | | offsetcount = = 0 ) ; <nl> - <nl> - MatchData matchBlock ; <nl> - matchBlock . startSubject = subject ; <nl> - matchBlock . endSubject = matchBlock . startSubject + length ; <nl> - const UChar * endSubject = matchBlock . endSubject ; <nl> - <nl> - matchBlock . multiline = ( re - > options & MatchAcrossMultipleLinesOption ) ; <nl> - matchBlock . ignoreCase = ( re - > options & IgnoreCaseOption ) ; <nl> - <nl> - / * If the expression has got more back references than the offsets supplied can <nl> - hold , we get a temporary chunk of working store to use during the matching . <nl> - Otherwise , we can use the vector supplied , rounding down its size to a multiple <nl> - of 3 . * / <nl> - <nl> - int ocount = offsetcount - ( offsetcount % 3 ) ; <nl> - <nl> - / / FIXME : This is lame that we have to second - guess our caller here . <nl> - / / The API should change to either fail - hard when we don ' t have enough offset space <nl> - / / or that we shouldn ' t ask our callers to pre - allocate in the first place . <nl> - bool using_temporary_offsets = false ; <nl> - if ( re - > top_backref > 0 & & re - > top_backref > = ocount / 3 ) { <nl> - ocount = re - > top_backref * 3 + 3 ; <nl> - matchBlock . offsetVector = new int [ ocount ] ; <nl> - if ( ! matchBlock . offsetVector ) <nl> - return JSRegExpErrorNoMemory ; <nl> - using_temporary_offsets = true ; <nl> - } else <nl> - matchBlock . offsetVector = offsets ; <nl> - <nl> - matchBlock . offsetEnd = ocount ; <nl> - matchBlock . offsetMax = ( 2 * ocount ) / 3 ; <nl> - matchBlock . offsetOverflow = false ; <nl> - <nl> - / * Compute the minimum number of offsets that we need to reset each time . Doing <nl> - this makes a huge difference to execution time when there aren ' t many brackets <nl> - in the pattern . * / <nl> - <nl> - int resetcount = 2 + re - > top_bracket * 2 ; <nl> - if ( resetcount > offsetcount ) <nl> - resetcount = ocount ; <nl> - <nl> - / * Reset the working variable associated with each extraction . These should <nl> - never be used unless previously set , but they get saved and restored , and so we <nl> - initialize them to avoid reading uninitialized locations . * / <nl> - <nl> - if ( matchBlock . offsetVector ) { <nl> - int * iptr = matchBlock . offsetVector + ocount ; <nl> - int * iend = iptr - resetcount / 2 + 1 ; <nl> - while ( - - iptr > = iend ) <nl> - * iptr = - 1 ; <nl> - } <nl> - <nl> - / * Set up the first character to match , if available . The first_byte value is <nl> - never set for an anchored regular expression , but the anchoring may be forced <nl> - at run time , so we have to test for anchoring . The first char may be unset for <nl> - an unanchored pattern , of course . If there ' s no first char and the pattern was <nl> - studied , there may be a bitmap of possible first characters . * / <nl> - <nl> - bool first_byte_caseless = false ; <nl> - int first_byte = - 1 ; <nl> - if ( re - > options & UseFirstByteOptimizationOption ) { <nl> - first_byte = re - > first_byte & 255 ; <nl> - if ( ( first_byte_caseless = ( re - > first_byte & REQ_IGNORE_CASE ) ) ) <nl> - first_byte = toLowerCase ( first_byte ) ; <nl> - } <nl> - <nl> - / * For anchored or unanchored matches , there may be a " last known required <nl> - character " set . * / <nl> - <nl> - bool req_byte_caseless = false ; <nl> - int req_byte = - 1 ; <nl> - int req_byte2 = - 1 ; <nl> - if ( re - > options & UseRequiredByteOptimizationOption ) { <nl> - req_byte = re - > req_byte & 255 ; / / FIXME : This optimization could be made to work for UTF16 chars as well . . . <nl> - req_byte_caseless = ( re - > req_byte & REQ_IGNORE_CASE ) ; <nl> - req_byte2 = flipCase ( req_byte ) ; <nl> - } <nl> - <nl> - / * Loop for handling unanchored repeated matching attempts ; for anchored regexs <nl> - the loop runs just once . * / <nl> - <nl> - const UChar * startMatch = subject + start_offset ; <nl> - const UChar * reqBytePtr = startMatch - 1 ; <nl> - bool useMultiLineFirstCharOptimization = re - > options & UseMultiLineFirstByteOptimizationOption ; <nl> - <nl> - do { <nl> - / * Reset the maximum number of extractions we might see . * / <nl> - if ( matchBlock . offsetVector ) { <nl> - int * iptr = matchBlock . offsetVector ; <nl> - int * iend = iptr + resetcount ; <nl> - while ( iptr < iend ) <nl> - * iptr + + = - 1 ; <nl> - } <nl> - <nl> - tryFirstByteOptimization ( startMatch , endSubject , first_byte , first_byte_caseless , useMultiLineFirstCharOptimization , matchBlock . startSubject + start_offset ) ; <nl> - if ( tryRequiredByteOptimization ( startMatch , endSubject , req_byte , req_byte2 , req_byte_caseless , first_byte > = 0 , reqBytePtr ) ) <nl> - break ; <nl> - <nl> - / * When a match occurs , substrings will be set for all internal extractions ; <nl> - we just need to set up the whole thing as substring 0 before returning . If <nl> - there were too many extractions , set the return code to zero . In the case <nl> - where we had to get some local store to hold offsets for backreferences , copy <nl> - those back references that we can . In this case there need not be overflow <nl> - if certain parts of the pattern were not used . * / <nl> - <nl> - / * The code starts after the JSRegExp block and the capture name table . * / <nl> - const unsigned char * start_code = ( const unsigned char * ) ( re + 1 ) ; <nl> - <nl> - int returnCode = match ( startMatch , start_code , 2 , matchBlock ) ; <nl> - <nl> - / * When the result is no match , advance the pointer to the next character <nl> - and continue . * / <nl> - if ( returnCode = = 0 ) { <nl> - startMatch + + ; <nl> - continue ; <nl> - } <nl> - <nl> - if ( returnCode ! = 1 ) { <nl> - ASSERT ( returnCode = = JSRegExpErrorHitLimit | | returnCode = = JSRegExpErrorNoMemory ) ; <nl> - DPRINTF ( ( " > > > > error : returning % d \ n " , returnCode ) ) ; <nl> - return returnCode ; <nl> - } <nl> - <nl> - / * We have a match ! Copy the offset information from temporary store if <nl> - necessary * / <nl> - <nl> - if ( using_temporary_offsets ) { <nl> - if ( offsetcount > = 4 ) { <nl> - memcpy ( offsets + 2 , matchBlock . offsetVector + 2 , ( offsetcount - 2 ) * sizeof ( int ) ) ; <nl> - DPRINTF ( ( " Copied offsets from temporary memory \ n " ) ) ; <nl> - } <nl> - if ( matchBlock . endOffsetTop > offsetcount ) <nl> - matchBlock . offsetOverflow = true ; <nl> - <nl> - DPRINTF ( ( " Freeing temporary memory \ n " ) ) ; <nl> - delete [ ] matchBlock . offsetVector ; <nl> - } <nl> - <nl> - returnCode = matchBlock . offsetOverflow ? 0 : matchBlock . endOffsetTop / 2 ; <nl> - <nl> - if ( offsetcount < 2 ) <nl> - returnCode = 0 ; <nl> - else { <nl> - offsets [ 0 ] = startMatch - matchBlock . startSubject ; <nl> - offsets [ 1 ] = matchBlock . endMatchPtr - matchBlock . startSubject ; <nl> - } <nl> - <nl> - DPRINTF ( ( " > > > > returning % d \ n " , returnCode ) ) ; <nl> - return returnCode ; <nl> - } while ( ! ( re - > options & IsAnchoredOption ) & & startMatch < = endSubject ) ; <nl> - <nl> - if ( using_temporary_offsets ) { <nl> - DPRINTF ( ( " Freeing temporary memory \ n " ) ) ; <nl> - delete [ ] matchBlock . offsetVector ; <nl> - } <nl> - <nl> - DPRINTF ( ( " > > > > returning PCRE_ERROR_NOMATCH \ n " ) ) ; <nl> - return JSRegExpErrorNoMatch ; <nl> - } <nl> - <nl> - } } / / namespace v8 : : jscre <nl> deleted file mode 100644 <nl> index 503005dd1a7 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_internal . h <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * This header contains definitions that are shared between the different <nl> - modules , but which are not relevant to the exported API . This includes some <nl> - functions whose names all begin with " _pcre_ " . * / <nl> - <nl> - # ifndef PCRE_INTERNAL_H <nl> - # define PCRE_INTERNAL_H <nl> - <nl> - / * Bit definitions for entries in the pcre_ctypes table . * / <nl> - <nl> - # define ctype_space 0x01 <nl> - # define ctype_xdigit 0x08 <nl> - # define ctype_word 0x10 / * alphameric or ' _ ' * / <nl> - <nl> - / * Offsets for the bitmap tables in pcre_cbits . Each table contains a set <nl> - of bits for a class map . Some classes are built by combining these tables . * / <nl> - <nl> - # define cbit_space 0 / * \ s * / <nl> - # define cbit_digit 32 / * \ d * / <nl> - # define cbit_word 64 / * \ w * / <nl> - # define cbit_length 96 / * Length of the cbits table * / <nl> - <nl> - / * Offsets of the various tables from the base tables pointer , and <nl> - total length . * / <nl> - <nl> - # define lcc_offset 0 <nl> - # define fcc_offset 128 <nl> - # define cbits_offset 256 <nl> - # define ctypes_offset ( cbits_offset + cbit_length ) <nl> - # define tables_length ( ctypes_offset + 128 ) <nl> - <nl> - # ifndef DFTABLES <nl> - <nl> - / / TODO : Hook this up to something that checks assertions . <nl> - # define ASSERT ( x ) do { } while ( 0 ) <nl> - # define ASSERT_NOT_REACHED ( ) do { } while ( 0 ) <nl> - <nl> - # ifdef WIN32 <nl> - # pragma warning ( disable : 4232 ) <nl> - # pragma warning ( disable : 4244 ) <nl> - # endif <nl> - <nl> - # include " pcre . h " <nl> - <nl> - / * The value of LINK_SIZE determines the number of bytes used to store links as <nl> - offsets within the compiled regex . The default is 2 , which allows for compiled <nl> - patterns up to 64K long . * / <nl> - <nl> - # define LINK_SIZE 2 <nl> - <nl> - / * Define DEBUG to get debugging output on stdout . * / <nl> - <nl> - # if 0 <nl> - # define DEBUG <nl> - # endif <nl> - <nl> - / * Use a macro for debugging printing , ' cause that eliminates the use of # ifdef <nl> - inline , and there are * still * stupid compilers about that don ' t like indented <nl> - pre - processor statements , or at least there were when I first wrote this . After <nl> - all , it had only been about 10 years then . . . * / <nl> - <nl> - # ifdef DEBUG <nl> - # define DPRINTF ( p ) printf p <nl> - # else <nl> - # define DPRINTF ( p ) / * nothing * / <nl> - # endif <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - / * PCRE keeps offsets in its compiled code as 2 - byte quantities ( always stored <nl> - in big - endian order ) by default . These are used , for example , to link from the <nl> - start of a subpattern to its alternatives and its end . The use of 2 bytes per <nl> - offset limits the size of the compiled regex to around 64K , which is big enough <nl> - for almost everybody . However , I received a request for an even bigger limit . <nl> - For this reason , and also to make the code easier to maintain , the storing and <nl> - loading of offsets from the byte string is now handled by the functions that are <nl> - defined here . * / <nl> - <nl> - / * PCRE uses some other 2 - byte quantities that do not change when the size of <nl> - offsets changes . There are used for repeat counts and for other things such as <nl> - capturing parenthesis numbers in back references . * / <nl> - <nl> - static inline void put2ByteValue ( unsigned char * opcodePtr , int value ) <nl> - { <nl> - ASSERT ( value > = 0 & & value < = 0xFFFF ) ; <nl> - opcodePtr [ 0 ] = value > > 8 ; <nl> - opcodePtr [ 1 ] = value ; <nl> - } <nl> - <nl> - static inline int get2ByteValue ( const unsigned char * opcodePtr ) <nl> - { <nl> - return ( opcodePtr [ 0 ] < < 8 ) | opcodePtr [ 1 ] ; <nl> - } <nl> - <nl> - static inline void put2ByteValueAndAdvance ( unsigned char * & opcodePtr , int value ) <nl> - { <nl> - put2ByteValue ( opcodePtr , value ) ; <nl> - opcodePtr + = 2 ; <nl> - } <nl> - <nl> - static inline void putLinkValueAllowZero ( unsigned char * opcodePtr , int value ) <nl> - { <nl> - put2ByteValue ( opcodePtr , value ) ; <nl> - } <nl> - <nl> - static inline int getLinkValueAllowZero ( const unsigned char * opcodePtr ) <nl> - { <nl> - return get2ByteValue ( opcodePtr ) ; <nl> - } <nl> - <nl> - # define MAX_PATTERN_SIZE ( 1 < < 16 ) <nl> - <nl> - static inline void putLinkValue ( unsigned char * opcodePtr , int value ) <nl> - { <nl> - ASSERT ( value ) ; <nl> - putLinkValueAllowZero ( opcodePtr , value ) ; <nl> - } <nl> - <nl> - static inline int getLinkValue ( const unsigned char * opcodePtr ) <nl> - { <nl> - int value = getLinkValueAllowZero ( opcodePtr ) ; <nl> - ASSERT ( value ) ; <nl> - return value ; <nl> - } <nl> - <nl> - static inline void putLinkValueAndAdvance ( unsigned char * & opcodePtr , int value ) <nl> - { <nl> - putLinkValue ( opcodePtr , value ) ; <nl> - opcodePtr + = LINK_SIZE ; <nl> - } <nl> - <nl> - static inline void putLinkValueAllowZeroAndAdvance ( unsigned char * & opcodePtr , int value ) <nl> - { <nl> - putLinkValueAllowZero ( opcodePtr , value ) ; <nl> - opcodePtr + = LINK_SIZE ; <nl> - } <nl> - <nl> - / / FIXME : These are really more of a " compiled regexp state " than " regexp options " <nl> - enum RegExpOptions { <nl> - UseFirstByteOptimizationOption = 0x40000000 , / * first_byte is set * / <nl> - UseRequiredByteOptimizationOption = 0x20000000 , / * req_byte is set * / <nl> - UseMultiLineFirstByteOptimizationOption = 0x10000000 , / * start after \ n for multiline * / <nl> - IsAnchoredOption = 0x02000000 , / * can ' t use partial with this regex * / <nl> - IgnoreCaseOption = 0x00000001 , <nl> - MatchAcrossMultipleLinesOption = 0x00000002 <nl> - } ; <nl> - <nl> - / * Flags added to firstbyte or reqbyte ; a " non - literal " item is either a <nl> - variable - length repeat , or a anything other than literal characters . * / <nl> - <nl> - # define REQ_IGNORE_CASE 0x0100 / * indicates should ignore case * / <nl> - # define REQ_VARY 0x0200 / * reqbyte followed non - literal item * / <nl> - <nl> - / * Miscellaneous definitions * / <nl> - <nl> - / * Flag bits and data types for the extended class ( OP_XCLASS ) for classes that <nl> - contain UTF - 8 characters with values greater than 255 . * / <nl> - <nl> - # define XCL_NOT 0x01 / * Flag : this is a negative class * / <nl> - # define XCL_MAP 0x02 / * Flag : a 32 - byte map is present * / <nl> - <nl> - # define XCL_END 0 / * Marks end of individual items * / <nl> - # define XCL_SINGLE 1 / * Single item ( one multibyte char ) follows * / <nl> - # define XCL_RANGE 2 / * A range ( two multibyte chars ) follows * / <nl> - <nl> - / * These are escaped items that aren ' t just an encoding of a particular data <nl> - value such as \ n . They must have non - zero values , as check_escape ( ) returns <nl> - their negation . Also , they must appear in the same order as in the opcode <nl> - definitions below , up to ESC_w . The final one must be <nl> - ESC_REF as subsequent values are used for \ 1 , \ 2 , \ 3 , etc . There is are two <nl> - tests in the code for an escape > ESC_b and < = ESC_w to <nl> - detect the types that may be repeated . These are the types that consume <nl> - characters . If any new escapes are put in between that don ' t consume a <nl> - character , that code will have to change . * / <nl> - <nl> - enum { ESC_B = 1 , ESC_b , ESC_D , ESC_d , ESC_S , ESC_s , ESC_W , ESC_w , ESC_REF } ; <nl> - <nl> - / * Opcode table : OP_BRA must be last , as all values > = it are used for brackets <nl> - that extract substrings . Starting from 1 ( i . e . after OP_END ) , the values up to <nl> - OP_EOD must correspond in order to the list of escapes immediately above . <nl> - Note that whenever this list is updated , the two macro definitions that follow <nl> - must also be updated to match . * / <nl> - <nl> - # define FOR_EACH_OPCODE ( macro ) \ <nl> - macro ( END ) \ <nl> - \ <nl> - macro ( NOT_WORD_BOUNDARY ) \ <nl> - macro ( WORD_BOUNDARY ) \ <nl> - macro ( NOT_DIGIT ) \ <nl> - macro ( DIGIT ) \ <nl> - macro ( NOT_WHITESPACE ) \ <nl> - macro ( WHITESPACE ) \ <nl> - macro ( NOT_WORDCHAR ) \ <nl> - macro ( WORDCHAR ) \ <nl> - \ <nl> - macro ( NOT_NEWLINE ) \ <nl> - \ <nl> - macro ( CIRC ) \ <nl> - macro ( DOLL ) \ <nl> - macro ( BOL ) \ <nl> - macro ( EOL ) \ <nl> - macro ( CHAR ) \ <nl> - macro ( CHAR_IGNORING_CASE ) \ <nl> - macro ( ASCII_CHAR ) \ <nl> - macro ( ASCII_LETTER_IGNORING_CASE ) \ <nl> - macro ( NOT ) \ <nl> - \ <nl> - macro ( STAR ) \ <nl> - macro ( MINSTAR ) \ <nl> - macro ( PLUS ) \ <nl> - macro ( MINPLUS ) \ <nl> - macro ( QUERY ) \ <nl> - macro ( MINQUERY ) \ <nl> - macro ( UPTO ) \ <nl> - macro ( MINUPTO ) \ <nl> - macro ( EXACT ) \ <nl> - \ <nl> - macro ( NOTSTAR ) \ <nl> - macro ( NOTMINSTAR ) \ <nl> - macro ( NOTPLUS ) \ <nl> - macro ( NOTMINPLUS ) \ <nl> - macro ( NOTQUERY ) \ <nl> - macro ( NOTMINQUERY ) \ <nl> - macro ( NOTUPTO ) \ <nl> - macro ( NOTMINUPTO ) \ <nl> - macro ( NOTEXACT ) \ <nl> - \ <nl> - macro ( TYPESTAR ) \ <nl> - macro ( TYPEMINSTAR ) \ <nl> - macro ( TYPEPLUS ) \ <nl> - macro ( TYPEMINPLUS ) \ <nl> - macro ( TYPEQUERY ) \ <nl> - macro ( TYPEMINQUERY ) \ <nl> - macro ( TYPEUPTO ) \ <nl> - macro ( TYPEMINUPTO ) \ <nl> - macro ( TYPEEXACT ) \ <nl> - \ <nl> - macro ( CRSTAR ) \ <nl> - macro ( CRMINSTAR ) \ <nl> - macro ( CRPLUS ) \ <nl> - macro ( CRMINPLUS ) \ <nl> - macro ( CRQUERY ) \ <nl> - macro ( CRMINQUERY ) \ <nl> - macro ( CRRANGE ) \ <nl> - macro ( CRMINRANGE ) \ <nl> - \ <nl> - macro ( CLASS ) \ <nl> - macro ( NCLASS ) \ <nl> - macro ( XCLASS ) \ <nl> - \ <nl> - macro ( REF ) \ <nl> - \ <nl> - macro ( ALT ) \ <nl> - macro ( KET ) \ <nl> - macro ( KETRMAX ) \ <nl> - macro ( KETRMIN ) \ <nl> - \ <nl> - macro ( ASSERT ) \ <nl> - macro ( ASSERT_NOT ) \ <nl> - \ <nl> - macro ( BRAZERO ) \ <nl> - macro ( BRAMINZERO ) \ <nl> - macro ( BRANUMBER ) \ <nl> - macro ( BRA ) <nl> - <nl> - # define OPCODE_ENUM_VALUE ( opcode ) OP_ # # opcode , <nl> - enum { FOR_EACH_OPCODE ( OPCODE_ENUM_VALUE ) } ; <nl> - <nl> - / * WARNING WARNING WARNING : There is an implicit assumption in pcre . c and <nl> - study . c that all opcodes are less than 128 in value . This makes handling UTF - 8 <nl> - character sequences easier . * / <nl> - <nl> - / * The highest extraction number before we have to start using additional <nl> - bytes . ( Originally PCRE didn ' t have support for extraction counts higher than <nl> - this number . ) The value is limited by the number of opcodes left after OP_BRA , <nl> - i . e . 255 - OP_BRA . We actually set it a bit lower to leave room for additional <nl> - opcodes . * / <nl> - <nl> - / * FIXME : Note that OP_BRA + 100 is > 128 , so the two comments above <nl> - are in conflict ! * / <nl> - <nl> - # define EXTRACT_BASIC_MAX 100 <nl> - <nl> - / * The index of names and the <nl> - code vector run on as long as necessary after the end . We store an explicit <nl> - offset to the name table so that if a regex is compiled on one host , saved , and <nl> - then run on another where the size of pointers is different , all might still <nl> - be well . For the case of compiled - on - 4 and run - on - 8 , we include an extra <nl> - pointer that is always NULL . <nl> - * / <nl> - <nl> - struct JSRegExp { <nl> - unsigned options ; <nl> - <nl> - unsigned short top_bracket ; <nl> - unsigned short top_backref ; <nl> - <nl> - unsigned short first_byte ; <nl> - unsigned short req_byte ; <nl> - } ; <nl> - <nl> - / * Internal shared data tables . These are tables that are used by more than one <nl> - of the exported public functions . They have to be " external " in the C sense , <nl> - but are not part of the PCRE public API . The data for these tables is in the <nl> - pcre_tables . c module . * / <nl> - <nl> - # define kjs_pcre_utf8_table1_size 6 <nl> - <nl> - extern const int kjs_pcre_utf8_table1 [ 6 ] ; <nl> - extern const int kjs_pcre_utf8_table2 [ 6 ] ; <nl> - extern const int kjs_pcre_utf8_table3 [ 6 ] ; <nl> - extern const unsigned char kjs_pcre_utf8_table4 [ 0x40 ] ; <nl> - <nl> - extern const unsigned char kjs_pcre_default_tables [ tables_length ] ; <nl> - <nl> - static inline unsigned char toLowerCase ( unsigned char c ) <nl> - { <nl> - static const unsigned char * lowerCaseChars = kjs_pcre_default_tables + lcc_offset ; <nl> - return lowerCaseChars [ c ] ; <nl> - } <nl> - <nl> - static inline unsigned char flipCase ( unsigned char c ) <nl> - { <nl> - static const unsigned char * flippedCaseChars = kjs_pcre_default_tables + fcc_offset ; <nl> - return flippedCaseChars [ c ] ; <nl> - } <nl> - <nl> - static inline unsigned char classBitmapForChar ( unsigned char c ) <nl> - { <nl> - static const unsigned char * charClassBitmaps = kjs_pcre_default_tables + cbits_offset ; <nl> - return charClassBitmaps [ c ] ; <nl> - } <nl> - <nl> - static inline unsigned char charTypeForChar ( unsigned char c ) <nl> - { <nl> - const unsigned char * charTypeMap = kjs_pcre_default_tables + ctypes_offset ; <nl> - return charTypeMap [ c ] ; <nl> - } <nl> - <nl> - static inline bool isWordChar ( UChar c ) <nl> - { <nl> - return c < 128 & & ( charTypeForChar ( c ) & ctype_word ) ; <nl> - } <nl> - <nl> - static inline bool isSpaceChar ( UChar c ) <nl> - { <nl> - return ( c < 128 & & ( charTypeForChar ( c ) & ctype_space ) ) ; <nl> - } <nl> - <nl> - static inline bool isNewline ( UChar nl ) <nl> - { <nl> - return ( nl = = 0xA | | nl = = 0xD | | nl = = 0x2028 | | nl = = 0x2029 ) ; <nl> - } <nl> - <nl> - static inline bool isBracketStartOpcode ( unsigned char opcode ) <nl> - { <nl> - if ( opcode > = OP_BRA ) <nl> - return true ; <nl> - switch ( opcode ) { <nl> - case OP_ASSERT : <nl> - case OP_ASSERT_NOT : <nl> - return true ; <nl> - default : <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - static inline void advanceToEndOfBracket ( const unsigned char * & opcodePtr ) <nl> - { <nl> - ASSERT ( isBracketStartOpcode ( * opcodePtr ) | | * opcodePtr = = OP_ALT ) ; <nl> - do <nl> - opcodePtr + = getLinkValue ( opcodePtr + 1 ) ; <nl> - while ( * opcodePtr = = OP_ALT ) ; <nl> - } <nl> - <nl> - / * Internal shared functions . These are functions that are used in more <nl> - that one of the source files . They have to have external linkage , but <nl> - but are not part of the public API and so not exported from the library . * / <nl> - <nl> - extern int kjs_pcre_ucp_othercase ( unsigned ) ; <nl> - extern bool kjs_pcre_xclass ( int , const unsigned char * ) ; <nl> - <nl> - } } / / namespace v8 : : jscre <nl> - # endif <nl> - <nl> - # endif <nl> - <nl> - / * End of pcre_internal . h * / <nl> deleted file mode 100644 <nl> index 3528c7a399b . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_tables . cpp <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * This module contains some fixed tables that are used by more than one of the <nl> - PCRE code modules . * / <nl> - <nl> - # include " pcre_internal . h " <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Tables for UTF - 8 support * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * These are the breakpoints for different numbers of bytes in a UTF - 8 <nl> - character . * / <nl> - <nl> - const int kjs_pcre_utf8_table1 [ 6 ] = <nl> - { 0x7f , 0x7ff , 0xffff , 0x1fffff , 0x3ffffff , 0x7fffffff } ; <nl> - <nl> - / * These are the indicator bits and the mask for the data bits to set in the <nl> - first byte of a character , indexed by the number of additional bytes . * / <nl> - <nl> - const int kjs_pcre_utf8_table2 [ 6 ] = { 0 , 0xc0 , 0xe0 , 0xf0 , 0xf8 , 0xfc } ; <nl> - const int kjs_pcre_utf8_table3 [ 6 ] = { 0xff , 0x1f , 0x0f , 0x07 , 0x03 , 0x01 } ; <nl> - <nl> - / * Table of the number of extra characters , indexed by the first character <nl> - masked with 0x3f . The highest number for a valid UTF - 8 character is in fact <nl> - 0x3d . * / <nl> - <nl> - const unsigned char kjs_pcre_utf8_table4 [ 0x40 ] = { <nl> - 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , <nl> - 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , <nl> - 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , 2 , <nl> - 3 , 3 , 3 , 3 , 3 , 3 , 3 , 3 , 4 , 4 , 4 , 4 , 5 , 5 , 5 , 5 } ; <nl> - <nl> - # include " pcre_chartables . c " <nl> - <nl> - } } / / namespace v8 : : jscre <nl> deleted file mode 100644 <nl> index 18059b4aea2 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_ucp_searchfuncs . cpp <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - <nl> - / * This module contains code for searching the table of Unicode character <nl> - properties . * / <nl> - <nl> - # include " pcre_internal . h " <nl> - <nl> - # include " ucpinternal . h " / * Internal table details * / <nl> - # include " ucptable . cpp " / * The table itself * / <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Search table and return other case * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * If the given character is a letter , and there is another case for the <nl> - letter , return the other case . Otherwise , return - 1 . <nl> - <nl> - Arguments : <nl> - c the character value <nl> - <nl> - Returns : the other case or - 1 if none <nl> - * / <nl> - <nl> - int kjs_pcre_ucp_othercase ( unsigned c ) <nl> - { <nl> - int bot = 0 ; <nl> - int top = sizeof ( ucp_table ) / sizeof ( cnode ) ; <nl> - int mid ; <nl> - <nl> - / * The table is searched using a binary chop . You might think that using <nl> - intermediate variables to hold some of the common expressions would speed <nl> - things up , but tests with gcc 3 . 4 . 4 on Linux showed that , on the contrary , it <nl> - makes things a lot slower . * / <nl> - <nl> - for ( ; ; ) { <nl> - if ( top < = bot ) <nl> - return - 1 ; <nl> - mid = ( bot + top ) > > 1 ; <nl> - if ( c = = ( ucp_table [ mid ] . f0 & f0_charmask ) ) <nl> - break ; <nl> - if ( c < ( ucp_table [ mid ] . f0 & f0_charmask ) ) <nl> - top = mid ; <nl> - else { <nl> - if ( ( ucp_table [ mid ] . f0 & f0_rangeflag ) & & ( c < = ( ucp_table [ mid ] . f0 & f0_charmask ) + ( ucp_table [ mid ] . f1 & f1_rangemask ) ) ) <nl> - break ; <nl> - bot = mid + 1 ; <nl> - } <nl> - } <nl> - <nl> - / * Found an entry in the table . Return - 1 for a range entry . Otherwise return <nl> - the other case if there is one , else - 1 . * / <nl> - <nl> - if ( ucp_table [ mid ] . f0 & f0_rangeflag ) <nl> - return - 1 ; <nl> - <nl> - int offset = ucp_table [ mid ] . f1 & f1_casemask ; <nl> - if ( offset & f1_caseneg ) <nl> - offset | = f1_caseneg ; <nl> - return ! offset ? - 1 : c + offset ; <nl> - } <nl> - <nl> - } } / / namespace v8 : : jscre <nl> deleted file mode 100644 <nl> index d006f4c7dd6 . . 00000000000 <nl> mmm a / src / third_party / jscre / pcre_xclass . cpp <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * This module contains an internal function that is used to match an extended <nl> - class ( one that contains characters whose values are > 255 ) . * / <nl> - <nl> - # include " pcre_internal . h " <nl> - <nl> - namespace v8 { namespace jscre { <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Match character against an XCLASS * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * This function is called to match a character against an extended class that <nl> - might contain values > 255 . <nl> - <nl> - Arguments : <nl> - c the character <nl> - data points to the flag byte of the XCLASS data <nl> - <nl> - Returns : true if character matches , else false <nl> - * / <nl> - <nl> - / * Get the next UTF - 8 character , advancing the pointer . This is called when we <nl> - know we are in UTF - 8 mode . * / <nl> - <nl> - static inline void getUTF8CharAndAdvancePointer ( int & c , const unsigned char * & subjectPtr ) <nl> - { <nl> - c = * subjectPtr + + ; <nl> - if ( ( c & 0xc0 ) = = 0xc0 ) { <nl> - int gcaa = kjs_pcre_utf8_table4 [ c & 0x3f ] ; / * Number of additional bytes * / <nl> - int gcss = 6 * gcaa ; <nl> - c = ( c & kjs_pcre_utf8_table3 [ gcaa ] ) < < gcss ; <nl> - while ( gcaa - - > 0 ) { <nl> - gcss - = 6 ; <nl> - c | = ( * subjectPtr + + & 0x3f ) < < gcss ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - bool kjs_pcre_xclass ( int c , const unsigned char * data ) <nl> - { <nl> - bool negated = ( * data & XCL_NOT ) ; <nl> - <nl> - / * Character values < 256 are matched against a bitmap , if one is present . If <nl> - not , we still carry on , because there may be ranges that start below 256 in the <nl> - additional data . * / <nl> - <nl> - if ( c < 256 ) { <nl> - if ( ( * data & XCL_MAP ) ! = 0 & & ( data [ 1 + c / 8 ] & ( 1 < < ( c & 7 ) ) ) ! = 0 ) <nl> - return ! negated ; / * char found * / <nl> - } <nl> - <nl> - / * First skip the bit map if present . Then match against the list of Unicode <nl> - properties or large chars or ranges that end with a large char . We won ' t ever <nl> - encounter XCL_PROP or XCL_NOTPROP when UCP support is not compiled . * / <nl> - <nl> - if ( ( * data + + & XCL_MAP ) ! = 0 ) <nl> - data + = 32 ; <nl> - <nl> - int t ; <nl> - while ( ( t = * data + + ) ! = XCL_END ) { <nl> - if ( t = = XCL_SINGLE ) { <nl> - int x ; <nl> - getUTF8CharAndAdvancePointer ( x , data ) ; <nl> - if ( c = = x ) <nl> - return ! negated ; <nl> - } <nl> - else if ( t = = XCL_RANGE ) { <nl> - int x , y ; <nl> - getUTF8CharAndAdvancePointer ( x , data ) ; <nl> - getUTF8CharAndAdvancePointer ( y , data ) ; <nl> - if ( c > = x & & c < = y ) <nl> - return ! negated ; <nl> - } <nl> - } <nl> - <nl> - return negated ; / * char did not match * / <nl> - } <nl> - <nl> - } } / / namespace v8 : : jscre <nl> deleted file mode 100644 <nl> index c8bc4aab679 . . 00000000000 <nl> mmm a / src / third_party / jscre / ucpinternal . h <nl> ppp / dev / null <nl> <nl> - / * This is JavaScriptCore ' s variant of the PCRE library . While this library <nl> - started out as a copy of PCRE , many of the features of PCRE have been <nl> - removed . This library now supports only the regular expression features <nl> - required by the JavaScript language specification , and has only the functions <nl> - needed by JavaScriptCore and the rest of WebKit . <nl> - <nl> - Originally written by Philip Hazel <nl> - Copyright ( c ) 1997 - 2006 University of Cambridge <nl> - Copyright ( C ) 2002 , 2004 , 2006 , 2007 Apple Inc . All rights reserved . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - Redistribution and use in source and binary forms , with or without <nl> - modification , are permitted provided that the following conditions are met : <nl> - <nl> - * Redistributions of source code must retain the above copyright notice , <nl> - this list of conditions and the following disclaimer . <nl> - <nl> - * Redistributions in binary form must reproduce the above copyright <nl> - notice , this list of conditions and the following disclaimer in the <nl> - documentation and / or other materials provided with the distribution . <nl> - <nl> - * Neither the name of the University of Cambridge nor the names of its <nl> - contributors may be used to endorse or promote products derived from <nl> - this software without specific prior written permission . <nl> - <nl> - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " <nl> - AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE <nl> - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE <nl> - ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE <nl> - LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR <nl> - CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF <nl> - SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS <nl> - INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN <nl> - CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) <nl> - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> - POSSIBILITY OF SUCH DAMAGE . <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - * / <nl> - <nl> - / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - * Unicode Property Table handler * <nl> - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> - <nl> - / * Internal header file defining the layout of the bits in each pair of 32 - bit <nl> - words that form a data item in the table . * / <nl> - <nl> - typedef struct cnode { <nl> - unsigned f0 ; <nl> - unsigned f1 ; <nl> - } cnode ; <nl> - <nl> - / * Things for the f0 field * / <nl> - <nl> - # define f0_scriptmask 0xff000000 / * Mask for script field * / <nl> - # define f0_scriptshift 24 / * Shift for script value * / <nl> - # define f0_rangeflag 0x00f00000 / * Flag for a range item * / <nl> - # define f0_charmask 0x001fffff / * Mask for code point value * / <nl> - <nl> - / * Things for the f1 field * / <nl> - <nl> - # define f1_typemask 0xfc000000 / * Mask for char type field * / <nl> - # define f1_typeshift 26 / * Shift for the type field * / <nl> - # define f1_rangemask 0x0000ffff / * Mask for a range offset * / <nl> - # define f1_casemask 0x0000ffff / * Mask for a case offset * / <nl> - # define f1_caseneg 0xffff8000 / * Bits for negation * / <nl> - <nl> - / * The data consists of a vector of structures of type cnode . The two unsigned <nl> - 32 - bit integers are used as follows : <nl> - <nl> - ( f0 ) ( 1 ) The most significant byte holds the script number . The numbers are <nl> - defined by the enum in ucp . h . <nl> - <nl> - ( 2 ) The 0x00800000 bit is set if this entry defines a range of characters . <nl> - It is not set if this entry defines a single character <nl> - <nl> - ( 3 ) The 0x00600000 bits are spare . <nl> - <nl> - ( 4 ) The 0x001fffff bits contain the code point . No Unicode code point will <nl> - ever be greater than 0x0010ffff , so this should be OK for ever . <nl> - <nl> - ( f1 ) ( 1 ) The 0xfc000000 bits contain the character type number . The numbers are <nl> - defined by an enum in ucp . h . <nl> - <nl> - ( 2 ) The 0x03ff0000 bits are spare . <nl> - <nl> - ( 3 ) The 0x0000ffff bits contain EITHER the unsigned offset to the top of <nl> - range if this entry defines a range , OR the * signed * offset to the <nl> - character ' s " other case " partner if this entry defines a single <nl> - character . There is no partner if the value is zero . <nl> - <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - | script ( 8 ) | . | . | . | codepoint ( 21 ) | | type ( 6 ) | . | . | spare ( 8 ) | offset ( 16 ) | <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> - | | | | | <nl> - | | | - > spare | | - > spare <nl> - | | | <nl> - | | - > spare | - > spare <nl> - | <nl> - | - > range flag <nl> - <nl> - The upper / lower casing information is set only for characters that come in <nl> - pairs . The non - one - to - one mappings in the Unicode data are ignored . <nl> - <nl> - When searching the data , proceed as follows : <nl> - <nl> - ( 1 ) Set up for a binary chop search . <nl> - <nl> - ( 2 ) If the top is not greater than the bottom , the character is not in the <nl> - table . Its type must therefore be " Cn " ( " Undefined " ) . <nl> - <nl> - ( 3 ) Find the middle vector element . <nl> - <nl> - ( 4 ) Extract the code point and compare . If equal , we are done . <nl> - <nl> - ( 5 ) If the test character is smaller , set the top to the current point , and <nl> - goto ( 2 ) . <nl> - <nl> - ( 6 ) If the current entry defines a range , compute the last character by adding <nl> - the offset , and see if the test character is within the range . If it is , <nl> - we are done . <nl> - <nl> - ( 7 ) Otherwise , set the bottom to one element past the current point and goto <nl> - ( 2 ) . <nl> - * / <nl> - <nl> - / * End of ucpinternal . h * / <nl> deleted file mode 100644 <nl> index 011f7f57244 . . 00000000000 <nl> mmm a / src / third_party / jscre / ucptable . cpp <nl> ppp / dev / null <nl> <nl> - / * This source module is automatically generated from the Unicode <nl> - property table . See ucpinternal . h for a description of the layout . * / <nl> - <nl> - static const cnode ucp_table [ ] = { <nl> - { 0x09800000 , 0x0000001f } , <nl> - { 0x09000020 , 0x74000000 } , <nl> - { 0x09800021 , 0x54000002 } , <nl> - { 0x09000024 , 0x5c000000 } , <nl> - { 0x09800025 , 0x54000002 } , <nl> - { 0x09000028 , 0x58000000 } , <nl> - { 0x09000029 , 0x48000000 } , <nl> - { 0x0900002a , 0x54000000 } , <nl> - { 0x0900002b , 0x64000000 } , <nl> - { 0x0900002c , 0x54000000 } , <nl> - { 0x0900002d , 0x44000000 } , <nl> - { 0x0980002e , 0x54000001 } , <nl> - { 0x09800030 , 0x34000009 } , <nl> - { 0x0980003a , 0x54000001 } , <nl> - { 0x0980003c , 0x64000002 } , <nl> - { 0x0980003f , 0x54000001 } , <nl> - { 0x21000041 , 0x24000020 } , <nl> - { 0x21000042 , 0x24000020 } , <nl> - { 0x21000043 , 0x24000020 } , <nl> - { 0x21000044 , 0x24000020 } , <nl> - { 0x21000045 , 0x24000020 } , <nl> - { 0x21000046 , 0x24000020 } , <nl> - { 0x21000047 , 0x24000020 } , <nl> - { 0x21000048 , 0x24000020 } , <nl> - { 0x21000049 , 0x24000020 } , <nl> - { 0x2100004a , 0x24000020 } , <nl> - { 0x2100004b , 0x24000020 } , <nl> - { 0x2100004c , 0x24000020 } , <nl> - { 0x2100004d , 0x24000020 } , <nl> - { 0x2100004e , 0x24000020 } , <nl> - { 0x2100004f , 0x24000020 } , <nl> - { 0x21000050 , 0x24000020 } , <nl> - { 0x21000051 , 0x24000020 } , <nl> - { 0x21000052 , 0x24000020 } , <nl> - { 0x21000053 , 0x24000020 } , <nl> - { 0x21000054 , 0x24000020 } , <nl> - { 0x21000055 , 0x24000020 } , <nl> - { 0x21000056 , 0x24000020 } , <nl> - { 0x21000057 , 0x24000020 } , <nl> - { 0x21000058 , 0x24000020 } , <nl> - { 0x21000059 , 0x24000020 } , <nl> - { 0x2100005a , 0x24000020 } , <nl> - { 0x0900005b , 0x58000000 } , <nl> - { 0x0900005c , 0x54000000 } , <nl> - { 0x0900005d , 0x48000000 } , <nl> - { 0x0900005e , 0x60000000 } , <nl> - { 0x0900005f , 0x40000000 } , <nl> - { 0x09000060 , 0x60000000 } , <nl> - { 0x21000061 , 0x1400ffe0 } , <nl> - { 0x21000062 , 0x1400ffe0 } , <nl> - { 0x21000063 , 0x1400ffe0 } , <nl> - { 0x21000064 , 0x1400ffe0 } , <nl> - { 0x21000065 , 0x1400ffe0 } , <nl> - { 0x21000066 , 0x1400ffe0 } , <nl> - { 0x21000067 , 0x1400ffe0 } , <nl> - { 0x21000068 , 0x1400ffe0 } , <nl> - { 0x21000069 , 0x1400ffe0 } , <nl> - { 0x2100006a , 0x1400ffe0 } , <nl> - { 0x2100006b , 0x1400ffe0 } , <nl> - { 0x2100006c , 0x1400ffe0 } , <nl> - { 0x2100006d , 0x1400ffe0 } , <nl> - { 0x2100006e , 0x1400ffe0 } , <nl> - { 0x2100006f , 0x1400ffe0 } , <nl> - { 0x21000070 , 0x1400ffe0 } , <nl> - { 0x21000071 , 0x1400ffe0 } , <nl> - { 0x21000072 , 0x1400ffe0 } , <nl> - { 0x21000073 , 0x1400ffe0 } , <nl> - { 0x21000074 , 0x1400ffe0 } , <nl> - { 0x21000075 , 0x1400ffe0 } , <nl> - { 0x21000076 , 0x1400ffe0 } , <nl> - { 0x21000077 , 0x1400ffe0 } , <nl> - { 0x21000078 , 0x1400ffe0 } , <nl> - { 0x21000079 , 0x1400ffe0 } , <nl> - { 0x2100007a , 0x1400ffe0 } , <nl> - { 0x0900007b , 0x58000000 } , <nl> - { 0x0900007c , 0x64000000 } , <nl> - { 0x0900007d , 0x48000000 } , <nl> - { 0x0900007e , 0x64000000 } , <nl> - { 0x0980007f , 0x00000020 } , <nl> - { 0x090000a0 , 0x74000000 } , <nl> - { 0x090000a1 , 0x54000000 } , <nl> - { 0x098000a2 , 0x5c000003 } , <nl> - { 0x098000a6 , 0x68000001 } , <nl> - { 0x090000a8 , 0x60000000 } , <nl> - { 0x090000a9 , 0x68000000 } , <nl> - { 0x210000aa , 0x14000000 } , <nl> - { 0x090000ab , 0x50000000 } , <nl> - { 0x090000ac , 0x64000000 } , <nl> - { 0x090000ad , 0x04000000 } , <nl> - { 0x090000ae , 0x68000000 } , <nl> - { 0x090000af , 0x60000000 } , <nl> - { 0x090000b0 , 0x68000000 } , <nl> - { 0x090000b1 , 0x64000000 } , <nl> - { 0x098000b2 , 0x3c000001 } , <nl> - { 0x090000b4 , 0x60000000 } , <nl> - { 0x090000b5 , 0x140002e7 } , <nl> - { 0x090000b6 , 0x68000000 } , <nl> - { 0x090000b7 , 0x54000000 } , <nl> - { 0x090000b8 , 0x60000000 } , <nl> - { 0x090000b9 , 0x3c000000 } , <nl> - { 0x210000ba , 0x14000000 } , <nl> - { 0x090000bb , 0x4c000000 } , <nl> - { 0x098000bc , 0x3c000002 } , <nl> - { 0x090000bf , 0x54000000 } , <nl> - { 0x210000c0 , 0x24000020 } , <nl> - { 0x210000c1 , 0x24000020 } , <nl> - { 0x210000c2 , 0x24000020 } , <nl> - { 0x210000c3 , 0x24000020 } , <nl> - { 0x210000c4 , 0x24000020 } , <nl> - { 0x210000c5 , 0x24000020 } , <nl> - { 0x210000c6 , 0x24000020 } , <nl> - { 0x210000c7 , 0x24000020 } , <nl> - { 0x210000c8 , 0x24000020 } , <nl> - { 0x210000c9 , 0x24000020 } , <nl> - { 0x210000ca , 0x24000020 } , <nl> - { 0x210000cb , 0x24000020 } , <nl> - { 0x210000cc , 0x24000020 } , <nl> - { 0x210000cd , 0x24000020 } , <nl> - { 0x210000ce , 0x24000020 } , <nl> - { 0x210000cf , 0x24000020 } , <nl> - { 0x210000d0 , 0x24000020 } , <nl> - { 0x210000d1 , 0x24000020 } , <nl> - { 0x210000d2 , 0x24000020 } , <nl> - { 0x210000d3 , 0x24000020 } , <nl> - { 0x210000d4 , 0x24000020 } , <nl> - { 0x210000d5 , 0x24000020 } , <nl> - { 0x210000d6 , 0x24000020 } , <nl> - { 0x090000d7 , 0x64000000 } , <nl> - { 0x210000d8 , 0x24000020 } , <nl> - { 0x210000d9 , 0x24000020 } , <nl> - { 0x210000da , 0x24000020 } , <nl> - { 0x210000db , 0x24000020 } , <nl> - { 0x210000dc , 0x24000020 } , <nl> - { 0x210000dd , 0x24000020 } , <nl> - { 0x210000de , 0x24000020 } , <nl> - { 0x210000df , 0x14000000 } , <nl> - { 0x210000e0 , 0x1400ffe0 } , <nl> - { 0x210000e1 , 0x1400ffe0 } , <nl> - { 0x210000e2 , 0x1400ffe0 } , <nl> - { 0x210000e3 , 0x1400ffe0 } , <nl> - { 0x210000e4 , 0x1400ffe0 } , <nl> - { 0x210000e5 , 0x1400ffe0 } , <nl> - { 0x210000e6 , 0x1400ffe0 } , <nl> - { 0x210000e7 , 0x1400ffe0 } , <nl> - { 0x210000e8 , 0x1400ffe0 } , <nl> - { 0x210000e9 , 0x1400ffe0 } , <nl> - { 0x210000ea , 0x1400ffe0 } , <nl> - { 0x210000eb , 0x1400ffe0 } , <nl> - { 0x210000ec , 0x1400ffe0 } , <nl> - { 0x210000ed , 0x1400ffe0 } , <nl> - { 0x210000ee , 0x1400ffe0 } , <nl> - { 0x210000ef , 0x1400ffe0 } , <nl> - { 0x210000f0 , 0x1400ffe0 } , <nl> - { 0x210000f1 , 0x1400ffe0 } , <nl> - { 0x210000f2 , 0x1400ffe0 } , <nl> - { 0x210000f3 , 0x1400ffe0 } , <nl> - { 0x210000f4 , 0x1400ffe0 } , <nl> - { 0x210000f5 , 0x1400ffe0 } , <nl> - { 0x210000f6 , 0x1400ffe0 } , <nl> - { 0x090000f7 , 0x64000000 } , <nl> - { 0x210000f8 , 0x1400ffe0 } , <nl> - { 0x210000f9 , 0x1400ffe0 } , <nl> - { 0x210000fa , 0x1400ffe0 } , <nl> - { 0x210000fb , 0x1400ffe0 } , <nl> - { 0x210000fc , 0x1400ffe0 } , <nl> - { 0x210000fd , 0x1400ffe0 } , <nl> - { 0x210000fe , 0x1400ffe0 } , <nl> - { 0x210000ff , 0x14000079 } , <nl> - { 0x21000100 , 0x24000001 } , <nl> - { 0x21000101 , 0x1400ffff } , <nl> - { 0x21000102 , 0x24000001 } , <nl> - { 0x21000103 , 0x1400ffff } , <nl> - { 0x21000104 , 0x24000001 } , <nl> - { 0x21000105 , 0x1400ffff } , <nl> - { 0x21000106 , 0x24000001 } , <nl> - { 0x21000107 , 0x1400ffff } , <nl> - { 0x21000108 , 0x24000001 } , <nl> - { 0x21000109 , 0x1400ffff } , <nl> - { 0x2100010a , 0x24000001 } , <nl> - { 0x2100010b , 0x1400ffff } , <nl> - { 0x2100010c , 0x24000001 } , <nl> - { 0x2100010d , 0x1400ffff } , <nl> - { 0x2100010e , 0x24000001 } , <nl> - { 0x2100010f , 0x1400ffff } , <nl> - { 0x21000110 , 0x24000001 } , <nl> - { 0x21000111 , 0x1400ffff } , <nl> - { 0x21000112 , 0x24000001 } , <nl> - { 0x21000113 , 0x1400ffff } , <nl> - { 0x21000114 , 0x24000001 } , <nl> - { 0x21000115 , 0x1400ffff } , <nl> - { 0x21000116 , 0x24000001 } , <nl> - { 0x21000117 , 0x1400ffff } , <nl> - { 0x21000118 , 0x24000001 } , <nl> - { 0x21000119 , 0x1400ffff } , <nl> - { 0x2100011a , 0x24000001 } , <nl> - { 0x2100011b , 0x1400ffff } , <nl> - { 0x2100011c , 0x24000001 } , <nl> - { 0x2100011d , 0x1400ffff } , <nl> - { 0x2100011e , 0x24000001 } , <nl> - { 0x2100011f , 0x1400ffff } , <nl> - { 0x21000120 , 0x24000001 } , <nl> - { 0x21000121 , 0x1400ffff } , <nl> - { 0x21000122 , 0x24000001 } , <nl> - { 0x21000123 , 0x1400ffff } , <nl> - { 0x21000124 , 0x24000001 } , <nl> - { 0x21000125 , 0x1400ffff } , <nl> - { 0x21000126 , 0x24000001 } , <nl> - { 0x21000127 , 0x1400ffff } , <nl> - { 0x21000128 , 0x24000001 } , <nl> - { 0x21000129 , 0x1400ffff } , <nl> - { 0x2100012a , 0x24000001 } , <nl> - { 0x2100012b , 0x1400ffff } , <nl> - { 0x2100012c , 0x24000001 } , <nl> - { 0x2100012d , 0x1400ffff } , <nl> - { 0x2100012e , 0x24000001 } , <nl> - { 0x2100012f , 0x1400ffff } , <nl> - { 0x21000130 , 0x2400ff39 } , <nl> - { 0x21000131 , 0x1400ff18 } , <nl> - { 0x21000132 , 0x24000001 } , <nl> - { 0x21000133 , 0x1400ffff } , <nl> - { 0x21000134 , 0x24000001 } , <nl> - { 0x21000135 , 0x1400ffff } , <nl> - { 0x21000136 , 0x24000001 } , <nl> - { 0x21000137 , 0x1400ffff } , <nl> - { 0x21000138 , 0x14000000 } , <nl> - { 0x21000139 , 0x24000001 } , <nl> - { 0x2100013a , 0x1400ffff } , <nl> - { 0x2100013b , 0x24000001 } , <nl> - { 0x2100013c , 0x1400ffff } , <nl> - { 0x2100013d , 0x24000001 } , <nl> - { 0x2100013e , 0x1400ffff } , <nl> - { 0x2100013f , 0x24000001 } , <nl> - { 0x21000140 , 0x1400ffff } , <nl> - { 0x21000141 , 0x24000001 } , <nl> - { 0x21000142 , 0x1400ffff } , <nl> - { 0x21000143 , 0x24000001 } , <nl> - { 0x21000144 , 0x1400ffff } , <nl> - { 0x21000145 , 0x24000001 } , <nl> - { 0x21000146 , 0x1400ffff } , <nl> - { 0x21000147 , 0x24000001 } , <nl> - { 0x21000148 , 0x1400ffff } , <nl> - { 0x21000149 , 0x14000000 } , <nl> - { 0x2100014a , 0x24000001 } , <nl> - { 0x2100014b , 0x1400ffff } , <nl> - { 0x2100014c , 0x24000001 } , <nl> - { 0x2100014d , 0x1400ffff } , <nl> - { 0x2100014e , 0x24000001 } , <nl> - { 0x2100014f , 0x1400ffff } , <nl> - { 0x21000150 , 0x24000001 } , <nl> - { 0x21000151 , 0x1400ffff } , <nl> - { 0x21000152 , 0x24000001 } , <nl> - { 0x21000153 , 0x1400ffff } , <nl> - { 0x21000154 , 0x24000001 } , <nl> - { 0x21000155 , 0x1400ffff } , <nl> - { 0x21000156 , 0x24000001 } , <nl> - { 0x21000157 , 0x1400ffff } , <nl> - { 0x21000158 , 0x24000001 } , <nl> - { 0x21000159 , 0x1400ffff } , <nl> - { 0x2100015a , 0x24000001 } , <nl> - { 0x2100015b , 0x1400ffff } , <nl> - { 0x2100015c , 0x24000001 } , <nl> - { 0x2100015d , 0x1400ffff } , <nl> - { 0x2100015e , 0x24000001 } , <nl> - { 0x2100015f , 0x1400ffff } , <nl> - { 0x21000160 , 0x24000001 } , <nl> - { 0x21000161 , 0x1400ffff } , <nl> - { 0x21000162 , 0x24000001 } , <nl> - { 0x21000163 , 0x1400ffff } , <nl> - { 0x21000164 , 0x24000001 } , <nl> - { 0x21000165 , 0x1400ffff } , <nl> - { 0x21000166 , 0x24000001 } , <nl> - { 0x21000167 , 0x1400ffff } , <nl> - { 0x21000168 , 0x24000001 } , <nl> - { 0x21000169 , 0x1400ffff } , <nl> - { 0x2100016a , 0x24000001 } , <nl> - { 0x2100016b , 0x1400ffff } , <nl> - { 0x2100016c , 0x24000001 } , <nl> - { 0x2100016d , 0x1400ffff } , <nl> - { 0x2100016e , 0x24000001 } , <nl> - { 0x2100016f , 0x1400ffff } , <nl> - { 0x21000170 , 0x24000001 } , <nl> - { 0x21000171 , 0x1400ffff } , <nl> - { 0x21000172 , 0x24000001 } , <nl> - { 0x21000173 , 0x1400ffff } , <nl> - { 0x21000174 , 0x24000001 } , <nl> - { 0x21000175 , 0x1400ffff } , <nl> - { 0x21000176 , 0x24000001 } , <nl> - { 0x21000177 , 0x1400ffff } , <nl> - { 0x21000178 , 0x2400ff87 } , <nl> - { 0x21000179 , 0x24000001 } , <nl> - { 0x2100017a , 0x1400ffff } , <nl> - { 0x2100017b , 0x24000001 } , <nl> - { 0x2100017c , 0x1400ffff } , <nl> - { 0x2100017d , 0x24000001 } , <nl> - { 0x2100017e , 0x1400ffff } , <nl> - { 0x2100017f , 0x1400fed4 } , <nl> - { 0x21000180 , 0x14000000 } , <nl> - { 0x21000181 , 0x240000d2 } , <nl> - { 0x21000182 , 0x24000001 } , <nl> - { 0x21000183 , 0x1400ffff } , <nl> - { 0x21000184 , 0x24000001 } , <nl> - { 0x21000185 , 0x1400ffff } , <nl> - { 0x21000186 , 0x240000ce } , <nl> - { 0x21000187 , 0x24000001 } , <nl> - { 0x21000188 , 0x1400ffff } , <nl> - { 0x21000189 , 0x240000cd } , <nl> - { 0x2100018a , 0x240000cd } , <nl> - { 0x2100018b , 0x24000001 } , <nl> - { 0x2100018c , 0x1400ffff } , <nl> - { 0x2100018d , 0x14000000 } , <nl> - { 0x2100018e , 0x2400004f } , <nl> - { 0x2100018f , 0x240000ca } , <nl> - { 0x21000190 , 0x240000cb } , <nl> - { 0x21000191 , 0x24000001 } , <nl> - { 0x21000192 , 0x1400ffff } , <nl> - { 0x21000193 , 0x240000cd } , <nl> - { 0x21000194 , 0x240000cf } , <nl> - { 0x21000195 , 0x14000061 } , <nl> - { 0x21000196 , 0x240000d3 } , <nl> - { 0x21000197 , 0x240000d1 } , <nl> - { 0x21000198 , 0x24000001 } , <nl> - { 0x21000199 , 0x1400ffff } , <nl> - { 0x2100019a , 0x140000a3 } , <nl> - { 0x2100019b , 0x14000000 } , <nl> - { 0x2100019c , 0x240000d3 } , <nl> - { 0x2100019d , 0x240000d5 } , <nl> - { 0x2100019e , 0x14000082 } , <nl> - { 0x2100019f , 0x240000d6 } , <nl> - { 0x210001a0 , 0x24000001 } , <nl> - { 0x210001a1 , 0x1400ffff } , <nl> - { 0x210001a2 , 0x24000001 } , <nl> - { 0x210001a3 , 0x1400ffff } , <nl> - { 0x210001a4 , 0x24000001 } , <nl> - { 0x210001a5 , 0x1400ffff } , <nl> - { 0x210001a6 , 0x240000da } , <nl> - { 0x210001a7 , 0x24000001 } , <nl> - { 0x210001a8 , 0x1400ffff } , <nl> - { 0x210001a9 , 0x240000da } , <nl> - { 0x218001aa , 0x14000001 } , <nl> - { 0x210001ac , 0x24000001 } , <nl> - { 0x210001ad , 0x1400ffff } , <nl> - { 0x210001ae , 0x240000da } , <nl> - { 0x210001af , 0x24000001 } , <nl> - { 0x210001b0 , 0x1400ffff } , <nl> - { 0x210001b1 , 0x240000d9 } , <nl> - { 0x210001b2 , 0x240000d9 } , <nl> - { 0x210001b3 , 0x24000001 } , <nl> - { 0x210001b4 , 0x1400ffff } , <nl> - { 0x210001b5 , 0x24000001 } , <nl> - { 0x210001b6 , 0x1400ffff } , <nl> - { 0x210001b7 , 0x240000db } , <nl> - { 0x210001b8 , 0x24000001 } , <nl> - { 0x210001b9 , 0x1400ffff } , <nl> - { 0x210001ba , 0x14000000 } , <nl> - { 0x210001bb , 0x1c000000 } , <nl> - { 0x210001bc , 0x24000001 } , <nl> - { 0x210001bd , 0x1400ffff } , <nl> - { 0x210001be , 0x14000000 } , <nl> - { 0x210001bf , 0x14000038 } , <nl> - { 0x218001c0 , 0x1c000003 } , <nl> - { 0x210001c4 , 0x24000002 } , <nl> - { 0x210001c5 , 0x2000ffff } , <nl> - { 0x210001c6 , 0x1400fffe } , <nl> - { 0x210001c7 , 0x24000002 } , <nl> - { 0x210001c8 , 0x2000ffff } , <nl> - { 0x210001c9 , 0x1400fffe } , <nl> - { 0x210001ca , 0x24000002 } , <nl> - { 0x210001cb , 0x2000ffff } , <nl> - { 0x210001cc , 0x1400fffe } , <nl> - { 0x210001cd , 0x24000001 } , <nl> - { 0x210001ce , 0x1400ffff } , <nl> - { 0x210001cf , 0x24000001 } , <nl> - { 0x210001d0 , 0x1400ffff } , <nl> - { 0x210001d1 , 0x24000001 } , <nl> - { 0x210001d2 , 0x1400ffff } , <nl> - { 0x210001d3 , 0x24000001 } , <nl> - { 0x210001d4 , 0x1400ffff } , <nl> - { 0x210001d5 , 0x24000001 } , <nl> - { 0x210001d6 , 0x1400ffff } , <nl> - { 0x210001d7 , 0x24000001 } , <nl> - { 0x210001d8 , 0x1400ffff } , <nl> - { 0x210001d9 , 0x24000001 } , <nl> - { 0x210001da , 0x1400ffff } , <nl> - { 0x210001db , 0x24000001 } , <nl> - { 0x210001dc , 0x1400ffff } , <nl> - { 0x210001dd , 0x1400ffb1 } , <nl> - { 0x210001de , 0x24000001 } , <nl> - { 0x210001df , 0x1400ffff } , <nl> - { 0x210001e0 , 0x24000001 } , <nl> - { 0x210001e1 , 0x1400ffff } , <nl> - { 0x210001e2 , 0x24000001 } , <nl> - { 0x210001e3 , 0x1400ffff } , <nl> - { 0x210001e4 , 0x24000001 } , <nl> - { 0x210001e5 , 0x1400ffff } , <nl> - { 0x210001e6 , 0x24000001 } , <nl> - { 0x210001e7 , 0x1400ffff } , <nl> - { 0x210001e8 , 0x24000001 } , <nl> - { 0x210001e9 , 0x1400ffff } , <nl> - { 0x210001ea , 0x24000001 } , <nl> - { 0x210001eb , 0x1400ffff } , <nl> - { 0x210001ec , 0x24000001 } , <nl> - { 0x210001ed , 0x1400ffff } , <nl> - { 0x210001ee , 0x24000001 } , <nl> - { 0x210001ef , 0x1400ffff } , <nl> - { 0x210001f0 , 0x14000000 } , <nl> - { 0x210001f1 , 0x24000002 } , <nl> - { 0x210001f2 , 0x2000ffff } , <nl> - { 0x210001f3 , 0x1400fffe } , <nl> - { 0x210001f4 , 0x24000001 } , <nl> - { 0x210001f5 , 0x1400ffff } , <nl> - { 0x210001f6 , 0x2400ff9f } , <nl> - { 0x210001f7 , 0x2400ffc8 } , <nl> - { 0x210001f8 , 0x24000001 } , <nl> - { 0x210001f9 , 0x1400ffff } , <nl> - { 0x210001fa , 0x24000001 } , <nl> - { 0x210001fb , 0x1400ffff } , <nl> - { 0x210001fc , 0x24000001 } , <nl> - { 0x210001fd , 0x1400ffff } , <nl> - { 0x210001fe , 0x24000001 } , <nl> - { 0x210001ff , 0x1400ffff } , <nl> - { 0x21000200 , 0x24000001 } , <nl> - { 0x21000201 , 0x1400ffff } , <nl> - { 0x21000202 , 0x24000001 } , <nl> - { 0x21000203 , 0x1400ffff } , <nl> - { 0x21000204 , 0x24000001 } , <nl> - { 0x21000205 , 0x1400ffff } , <nl> - { 0x21000206 , 0x24000001 } , <nl> - { 0x21000207 , 0x1400ffff } , <nl> - { 0x21000208 , 0x24000001 } , <nl> - { 0x21000209 , 0x1400ffff } , <nl> - { 0x2100020a , 0x24000001 } , <nl> - { 0x2100020b , 0x1400ffff } , <nl> - { 0x2100020c , 0x24000001 } , <nl> - { 0x2100020d , 0x1400ffff } , <nl> - { 0x2100020e , 0x24000001 } , <nl> - { 0x2100020f , 0x1400ffff } , <nl> - { 0x21000210 , 0x24000001 } , <nl> - { 0x21000211 , 0x1400ffff } , <nl> - { 0x21000212 , 0x24000001 } , <nl> - { 0x21000213 , 0x1400ffff } , <nl> - { 0x21000214 , 0x24000001 } , <nl> - { 0x21000215 , 0x1400ffff } , <nl> - { 0x21000216 , 0x24000001 } , <nl> - { 0x21000217 , 0x1400ffff } , <nl> - { 0x21000218 , 0x24000001 } , <nl> - { 0x21000219 , 0x1400ffff } , <nl> - { 0x2100021a , 0x24000001 } , <nl> - { 0x2100021b , 0x1400ffff } , <nl> - { 0x2100021c , 0x24000001 } , <nl> - { 0x2100021d , 0x1400ffff } , <nl> - { 0x2100021e , 0x24000001 } , <nl> - { 0x2100021f , 0x1400ffff } , <nl> - { 0x21000220 , 0x2400ff7e } , <nl> - { 0x21000221 , 0x14000000 } , <nl> - { 0x21000222 , 0x24000001 } , <nl> - { 0x21000223 , 0x1400ffff } , <nl> - { 0x21000224 , 0x24000001 } , <nl> - { 0x21000225 , 0x1400ffff } , <nl> - { 0x21000226 , 0x24000001 } , <nl> - { 0x21000227 , 0x1400ffff } , <nl> - { 0x21000228 , 0x24000001 } , <nl> - { 0x21000229 , 0x1400ffff } , <nl> - { 0x2100022a , 0x24000001 } , <nl> - { 0x2100022b , 0x1400ffff } , <nl> - { 0x2100022c , 0x24000001 } , <nl> - { 0x2100022d , 0x1400ffff } , <nl> - { 0x2100022e , 0x24000001 } , <nl> - { 0x2100022f , 0x1400ffff } , <nl> - { 0x21000230 , 0x24000001 } , <nl> - { 0x21000231 , 0x1400ffff } , <nl> - { 0x21000232 , 0x24000001 } , <nl> - { 0x21000233 , 0x1400ffff } , <nl> - { 0x21800234 , 0x14000005 } , <nl> - { 0x2100023a , 0x24000000 } , <nl> - { 0x2100023b , 0x24000001 } , <nl> - { 0x2100023c , 0x1400ffff } , <nl> - { 0x2100023d , 0x2400ff5d } , <nl> - { 0x2100023e , 0x24000000 } , <nl> - { 0x2180023f , 0x14000001 } , <nl> - { 0x21000241 , 0x24000053 } , <nl> - { 0x21800250 , 0x14000002 } , <nl> - { 0x21000253 , 0x1400ff2e } , <nl> - { 0x21000254 , 0x1400ff32 } , <nl> - { 0x21000255 , 0x14000000 } , <nl> - { 0x21000256 , 0x1400ff33 } , <nl> - { 0x21000257 , 0x1400ff33 } , <nl> - { 0x21000258 , 0x14000000 } , <nl> - { 0x21000259 , 0x1400ff36 } , <nl> - { 0x2100025a , 0x14000000 } , <nl> - { 0x2100025b , 0x1400ff35 } , <nl> - { 0x2180025c , 0x14000003 } , <nl> - { 0x21000260 , 0x1400ff33 } , <nl> - { 0x21800261 , 0x14000001 } , <nl> - { 0x21000263 , 0x1400ff31 } , <nl> - { 0x21800264 , 0x14000003 } , <nl> - { 0x21000268 , 0x1400ff2f } , <nl> - { 0x21000269 , 0x1400ff2d } , <nl> - { 0x2180026a , 0x14000004 } , <nl> - { 0x2100026f , 0x1400ff2d } , <nl> - { 0x21800270 , 0x14000001 } , <nl> - { 0x21000272 , 0x1400ff2b } , <nl> - { 0x21800273 , 0x14000001 } , <nl> - { 0x21000275 , 0x1400ff2a } , <nl> - { 0x21800276 , 0x14000009 } , <nl> - { 0x21000280 , 0x1400ff26 } , <nl> - { 0x21800281 , 0x14000001 } , <nl> - { 0x21000283 , 0x1400ff26 } , <nl> - { 0x21800284 , 0x14000003 } , <nl> - { 0x21000288 , 0x1400ff26 } , <nl> - { 0x21000289 , 0x14000000 } , <nl> - { 0x2100028a , 0x1400ff27 } , <nl> - { 0x2100028b , 0x1400ff27 } , <nl> - { 0x2180028c , 0x14000005 } , <nl> - { 0x21000292 , 0x1400ff25 } , <nl> - { 0x21000293 , 0x14000000 } , <nl> - { 0x21000294 , 0x1400ffad } , <nl> - { 0x21800295 , 0x1400001a } , <nl> - { 0x218002b0 , 0x18000011 } , <nl> - { 0x098002c2 , 0x60000003 } , <nl> - { 0x098002c6 , 0x1800000b } , <nl> - { 0x098002d2 , 0x6000000d } , <nl> - { 0x218002e0 , 0x18000004 } , <nl> - { 0x098002e5 , 0x60000008 } , <nl> - { 0x090002ee , 0x18000000 } , <nl> - { 0x098002ef , 0x60000010 } , <nl> - { 0x1b800300 , 0x30000044 } , <nl> - { 0x1b000345 , 0x30000054 } , <nl> - { 0x1b800346 , 0x30000029 } , <nl> - { 0x13800374 , 0x60000001 } , <nl> - { 0x1300037a , 0x18000000 } , <nl> - { 0x0900037e , 0x54000000 } , <nl> - { 0x13800384 , 0x60000001 } , <nl> - { 0x13000386 , 0x24000026 } , <nl> - { 0x09000387 , 0x54000000 } , <nl> - { 0x13000388 , 0x24000025 } , <nl> - { 0x13000389 , 0x24000025 } , <nl> - { 0x1300038a , 0x24000025 } , <nl> - { 0x1300038c , 0x24000040 } , <nl> - { 0x1300038e , 0x2400003f } , <nl> - { 0x1300038f , 0x2400003f } , <nl> - { 0x13000390 , 0x14000000 } , <nl> - { 0x13000391 , 0x24000020 } , <nl> - { 0x13000392 , 0x24000020 } , <nl> - { 0x13000393 , 0x24000020 } , <nl> - { 0x13000394 , 0x24000020 } , <nl> - { 0x13000395 , 0x24000020 } , <nl> - { 0x13000396 , 0x24000020 } , <nl> - { 0x13000397 , 0x24000020 } , <nl> - { 0x13000398 , 0x24000020 } , <nl> - { 0x13000399 , 0x24000020 } , <nl> - { 0x1300039a , 0x24000020 } , <nl> - { 0x1300039b , 0x24000020 } , <nl> - { 0x1300039c , 0x24000020 } , <nl> - { 0x1300039d , 0x24000020 } , <nl> - { 0x1300039e , 0x24000020 } , <nl> - { 0x1300039f , 0x24000020 } , <nl> - { 0x130003a0 , 0x24000020 } , <nl> - { 0x130003a1 , 0x24000020 } , <nl> - { 0x130003a3 , 0x24000020 } , <nl> - { 0x130003a4 , 0x24000020 } , <nl> - { 0x130003a5 , 0x24000020 } , <nl> - { 0x130003a6 , 0x24000020 } , <nl> - { 0x130003a7 , 0x24000020 } , <nl> - { 0x130003a8 , 0x24000020 } , <nl> - { 0x130003a9 , 0x24000020 } , <nl> - { 0x130003aa , 0x24000020 } , <nl> - { 0x130003ab , 0x24000020 } , <nl> - { 0x130003ac , 0x1400ffda } , <nl> - { 0x130003ad , 0x1400ffdb } , <nl> - { 0x130003ae , 0x1400ffdb } , <nl> - { 0x130003af , 0x1400ffdb } , <nl> - { 0x130003b0 , 0x14000000 } , <nl> - { 0x130003b1 , 0x1400ffe0 } , <nl> - { 0x130003b2 , 0x1400ffe0 } , <nl> - { 0x130003b3 , 0x1400ffe0 } , <nl> - { 0x130003b4 , 0x1400ffe0 } , <nl> - { 0x130003b5 , 0x1400ffe0 } , <nl> - { 0x130003b6 , 0x1400ffe0 } , <nl> - { 0x130003b7 , 0x1400ffe0 } , <nl> - { 0x130003b8 , 0x1400ffe0 } , <nl> - { 0x130003b9 , 0x1400ffe0 } , <nl> - { 0x130003ba , 0x1400ffe0 } , <nl> - { 0x130003bb , 0x1400ffe0 } , <nl> - { 0x130003bc , 0x1400ffe0 } , <nl> - { 0x130003bd , 0x1400ffe0 } , <nl> - { 0x130003be , 0x1400ffe0 } , <nl> - { 0x130003bf , 0x1400ffe0 } , <nl> - { 0x130003c0 , 0x1400ffe0 } , <nl> - { 0x130003c1 , 0x1400ffe0 } , <nl> - { 0x130003c2 , 0x1400ffe1 } , <nl> - { 0x130003c3 , 0x1400ffe0 } , <nl> - { 0x130003c4 , 0x1400ffe0 } , <nl> - { 0x130003c5 , 0x1400ffe0 } , <nl> - { 0x130003c6 , 0x1400ffe0 } , <nl> - { 0x130003c7 , 0x1400ffe0 } , <nl> - { 0x130003c8 , 0x1400ffe0 } , <nl> - { 0x130003c9 , 0x1400ffe0 } , <nl> - { 0x130003ca , 0x1400ffe0 } , <nl> - { 0x130003cb , 0x1400ffe0 } , <nl> - { 0x130003cc , 0x1400ffc0 } , <nl> - { 0x130003cd , 0x1400ffc1 } , <nl> - { 0x130003ce , 0x1400ffc1 } , <nl> - { 0x130003d0 , 0x1400ffc2 } , <nl> - { 0x130003d1 , 0x1400ffc7 } , <nl> - { 0x138003d2 , 0x24000002 } , <nl> - { 0x130003d5 , 0x1400ffd1 } , <nl> - { 0x130003d6 , 0x1400ffca } , <nl> - { 0x130003d7 , 0x14000000 } , <nl> - { 0x130003d8 , 0x24000001 } , <nl> - { 0x130003d9 , 0x1400ffff } , <nl> - { 0x130003da , 0x24000001 } , <nl> - { 0x130003db , 0x1400ffff } , <nl> - { 0x130003dc , 0x24000001 } , <nl> - { 0x130003dd , 0x1400ffff } , <nl> - { 0x130003de , 0x24000001 } , <nl> - { 0x130003df , 0x1400ffff } , <nl> - { 0x130003e0 , 0x24000001 } , <nl> - { 0x130003e1 , 0x1400ffff } , <nl> - { 0x0a0003e2 , 0x24000001 } , <nl> - { 0x0a0003e3 , 0x1400ffff } , <nl> - { 0x0a0003e4 , 0x24000001 } , <nl> - { 0x0a0003e5 , 0x1400ffff } , <nl> - { 0x0a0003e6 , 0x24000001 } , <nl> - { 0x0a0003e7 , 0x1400ffff } , <nl> - { 0x0a0003e8 , 0x24000001 } , <nl> - { 0x0a0003e9 , 0x1400ffff } , <nl> - { 0x0a0003ea , 0x24000001 } , <nl> - { 0x0a0003eb , 0x1400ffff } , <nl> - { 0x0a0003ec , 0x24000001 } , <nl> - { 0x0a0003ed , 0x1400ffff } , <nl> - { 0x0a0003ee , 0x24000001 } , <nl> - { 0x0a0003ef , 0x1400ffff } , <nl> - { 0x130003f0 , 0x1400ffaa } , <nl> - { 0x130003f1 , 0x1400ffb0 } , <nl> - { 0x130003f2 , 0x14000007 } , <nl> - { 0x130003f3 , 0x14000000 } , <nl> - { 0x130003f4 , 0x2400ffc4 } , <nl> - { 0x130003f5 , 0x1400ffa0 } , <nl> - { 0x130003f6 , 0x64000000 } , <nl> - { 0x130003f7 , 0x24000001 } , <nl> - { 0x130003f8 , 0x1400ffff } , <nl> - { 0x130003f9 , 0x2400fff9 } , <nl> - { 0x130003fa , 0x24000001 } , <nl> - { 0x130003fb , 0x1400ffff } , <nl> - { 0x130003fc , 0x14000000 } , <nl> - { 0x138003fd , 0x24000002 } , <nl> - { 0x0c000400 , 0x24000050 } , <nl> - { 0x0c000401 , 0x24000050 } , <nl> - { 0x0c000402 , 0x24000050 } , <nl> - { 0x0c000403 , 0x24000050 } , <nl> - { 0x0c000404 , 0x24000050 } , <nl> - { 0x0c000405 , 0x24000050 } , <nl> - { 0x0c000406 , 0x24000050 } , <nl> - { 0x0c000407 , 0x24000050 } , <nl> - { 0x0c000408 , 0x24000050 } , <nl> - { 0x0c000409 , 0x24000050 } , <nl> - { 0x0c00040a , 0x24000050 } , <nl> - { 0x0c00040b , 0x24000050 } , <nl> - { 0x0c00040c , 0x24000050 } , <nl> - { 0x0c00040d , 0x24000050 } , <nl> - { 0x0c00040e , 0x24000050 } , <nl> - { 0x0c00040f , 0x24000050 } , <nl> - { 0x0c000410 , 0x24000020 } , <nl> - { 0x0c000411 , 0x24000020 } , <nl> - { 0x0c000412 , 0x24000020 } , <nl> - { 0x0c000413 , 0x24000020 } , <nl> - { 0x0c000414 , 0x24000020 } , <nl> - { 0x0c000415 , 0x24000020 } , <nl> - { 0x0c000416 , 0x24000020 } , <nl> - { 0x0c000417 , 0x24000020 } , <nl> - { 0x0c000418 , 0x24000020 } , <nl> - { 0x0c000419 , 0x24000020 } , <nl> - { 0x0c00041a , 0x24000020 } , <nl> - { 0x0c00041b , 0x24000020 } , <nl> - { 0x0c00041c , 0x24000020 } , <nl> - { 0x0c00041d , 0x24000020 } , <nl> - { 0x0c00041e , 0x24000020 } , <nl> - { 0x0c00041f , 0x24000020 } , <nl> - { 0x0c000420 , 0x24000020 } , <nl> - { 0x0c000421 , 0x24000020 } , <nl> - { 0x0c000422 , 0x24000020 } , <nl> - { 0x0c000423 , 0x24000020 } , <nl> - { 0x0c000424 , 0x24000020 } , <nl> - { 0x0c000425 , 0x24000020 } , <nl> - { 0x0c000426 , 0x24000020 } , <nl> - { 0x0c000427 , 0x24000020 } , <nl> - { 0x0c000428 , 0x24000020 } , <nl> - { 0x0c000429 , 0x24000020 } , <nl> - { 0x0c00042a , 0x24000020 } , <nl> - { 0x0c00042b , 0x24000020 } , <nl> - { 0x0c00042c , 0x24000020 } , <nl> - { 0x0c00042d , 0x24000020 } , <nl> - { 0x0c00042e , 0x24000020 } , <nl> - { 0x0c00042f , 0x24000020 } , <nl> - { 0x0c000430 , 0x1400ffe0 } , <nl> - { 0x0c000431 , 0x1400ffe0 } , <nl> - { 0x0c000432 , 0x1400ffe0 } , <nl> - { 0x0c000433 , 0x1400ffe0 } , <nl> - { 0x0c000434 , 0x1400ffe0 } , <nl> - { 0x0c000435 , 0x1400ffe0 } , <nl> - { 0x0c000436 , 0x1400ffe0 } , <nl> - { 0x0c000437 , 0x1400ffe0 } , <nl> - { 0x0c000438 , 0x1400ffe0 } , <nl> - { 0x0c000439 , 0x1400ffe0 } , <nl> - { 0x0c00043a , 0x1400ffe0 } , <nl> - { 0x0c00043b , 0x1400ffe0 } , <nl> - { 0x0c00043c , 0x1400ffe0 } , <nl> - { 0x0c00043d , 0x1400ffe0 } , <nl> - { 0x0c00043e , 0x1400ffe0 } , <nl> - { 0x0c00043f , 0x1400ffe0 } , <nl> - { 0x0c000440 , 0x1400ffe0 } , <nl> - { 0x0c000441 , 0x1400ffe0 } , <nl> - { 0x0c000442 , 0x1400ffe0 } , <nl> - { 0x0c000443 , 0x1400ffe0 } , <nl> - { 0x0c000444 , 0x1400ffe0 } , <nl> - { 0x0c000445 , 0x1400ffe0 } , <nl> - { 0x0c000446 , 0x1400ffe0 } , <nl> - { 0x0c000447 , 0x1400ffe0 } , <nl> - { 0x0c000448 , 0x1400ffe0 } , <nl> - { 0x0c000449 , 0x1400ffe0 } , <nl> - { 0x0c00044a , 0x1400ffe0 } , <nl> - { 0x0c00044b , 0x1400ffe0 } , <nl> - { 0x0c00044c , 0x1400ffe0 } , <nl> - { 0x0c00044d , 0x1400ffe0 } , <nl> - { 0x0c00044e , 0x1400ffe0 } , <nl> - { 0x0c00044f , 0x1400ffe0 } , <nl> - { 0x0c000450 , 0x1400ffb0 } , <nl> - { 0x0c000451 , 0x1400ffb0 } , <nl> - { 0x0c000452 , 0x1400ffb0 } , <nl> - { 0x0c000453 , 0x1400ffb0 } , <nl> - { 0x0c000454 , 0x1400ffb0 } , <nl> - { 0x0c000455 , 0x1400ffb0 } , <nl> - { 0x0c000456 , 0x1400ffb0 } , <nl> - { 0x0c000457 , 0x1400ffb0 } , <nl> - { 0x0c000458 , 0x1400ffb0 } , <nl> - { 0x0c000459 , 0x1400ffb0 } , <nl> - { 0x0c00045a , 0x1400ffb0 } , <nl> - { 0x0c00045b , 0x1400ffb0 } , <nl> - { 0x0c00045c , 0x1400ffb0 } , <nl> - { 0x0c00045d , 0x1400ffb0 } , <nl> - { 0x0c00045e , 0x1400ffb0 } , <nl> - { 0x0c00045f , 0x1400ffb0 } , <nl> - { 0x0c000460 , 0x24000001 } , <nl> - { 0x0c000461 , 0x1400ffff } , <nl> - { 0x0c000462 , 0x24000001 } , <nl> - { 0x0c000463 , 0x1400ffff } , <nl> - { 0x0c000464 , 0x24000001 } , <nl> - { 0x0c000465 , 0x1400ffff } , <nl> - { 0x0c000466 , 0x24000001 } , <nl> - { 0x0c000467 , 0x1400ffff } , <nl> - { 0x0c000468 , 0x24000001 } , <nl> - { 0x0c000469 , 0x1400ffff } , <nl> - { 0x0c00046a , 0x24000001 } , <nl> - { 0x0c00046b , 0x1400ffff } , <nl> - { 0x0c00046c , 0x24000001 } , <nl> - { 0x0c00046d , 0x1400ffff } , <nl> - { 0x0c00046e , 0x24000001 } , <nl> - { 0x0c00046f , 0x1400ffff } , <nl> - { 0x0c000470 , 0x24000001 } , <nl> - { 0x0c000471 , 0x1400ffff } , <nl> - { 0x0c000472 , 0x24000001 } , <nl> - { 0x0c000473 , 0x1400ffff } , <nl> - { 0x0c000474 , 0x24000001 } , <nl> - { 0x0c000475 , 0x1400ffff } , <nl> - { 0x0c000476 , 0x24000001 } , <nl> - { 0x0c000477 , 0x1400ffff } , <nl> - { 0x0c000478 , 0x24000001 } , <nl> - { 0x0c000479 , 0x1400ffff } , <nl> - { 0x0c00047a , 0x24000001 } , <nl> - { 0x0c00047b , 0x1400ffff } , <nl> - { 0x0c00047c , 0x24000001 } , <nl> - { 0x0c00047d , 0x1400ffff } , <nl> - { 0x0c00047e , 0x24000001 } , <nl> - { 0x0c00047f , 0x1400ffff } , <nl> - { 0x0c000480 , 0x24000001 } , <nl> - { 0x0c000481 , 0x1400ffff } , <nl> - { 0x0c000482 , 0x68000000 } , <nl> - { 0x0c800483 , 0x30000003 } , <nl> - { 0x0c800488 , 0x2c000001 } , <nl> - { 0x0c00048a , 0x24000001 } , <nl> - { 0x0c00048b , 0x1400ffff } , <nl> - { 0x0c00048c , 0x24000001 } , <nl> - { 0x0c00048d , 0x1400ffff } , <nl> - { 0x0c00048e , 0x24000001 } , <nl> - { 0x0c00048f , 0x1400ffff } , <nl> - { 0x0c000490 , 0x24000001 } , <nl> - { 0x0c000491 , 0x1400ffff } , <nl> - { 0x0c000492 , 0x24000001 } , <nl> - { 0x0c000493 , 0x1400ffff } , <nl> - { 0x0c000494 , 0x24000001 } , <nl> - { 0x0c000495 , 0x1400ffff } , <nl> - { 0x0c000496 , 0x24000001 } , <nl> - { 0x0c000497 , 0x1400ffff } , <nl> - { 0x0c000498 , 0x24000001 } , <nl> - { 0x0c000499 , 0x1400ffff } , <nl> - { 0x0c00049a , 0x24000001 } , <nl> - { 0x0c00049b , 0x1400ffff } , <nl> - { 0x0c00049c , 0x24000001 } , <nl> - { 0x0c00049d , 0x1400ffff } , <nl> - { 0x0c00049e , 0x24000001 } , <nl> - { 0x0c00049f , 0x1400ffff } , <nl> - { 0x0c0004a0 , 0x24000001 } , <nl> - { 0x0c0004a1 , 0x1400ffff } , <nl> - { 0x0c0004a2 , 0x24000001 } , <nl> - { 0x0c0004a3 , 0x1400ffff } , <nl> - { 0x0c0004a4 , 0x24000001 } , <nl> - { 0x0c0004a5 , 0x1400ffff } , <nl> - { 0x0c0004a6 , 0x24000001 } , <nl> - { 0x0c0004a7 , 0x1400ffff } , <nl> - { 0x0c0004a8 , 0x24000001 } , <nl> - { 0x0c0004a9 , 0x1400ffff } , <nl> - { 0x0c0004aa , 0x24000001 } , <nl> - { 0x0c0004ab , 0x1400ffff } , <nl> - { 0x0c0004ac , 0x24000001 } , <nl> - { 0x0c0004ad , 0x1400ffff } , <nl> - { 0x0c0004ae , 0x24000001 } , <nl> - { 0x0c0004af , 0x1400ffff } , <nl> - { 0x0c0004b0 , 0x24000001 } , <nl> - { 0x0c0004b1 , 0x1400ffff } , <nl> - { 0x0c0004b2 , 0x24000001 } , <nl> - { 0x0c0004b3 , 0x1400ffff } , <nl> - { 0x0c0004b4 , 0x24000001 } , <nl> - { 0x0c0004b5 , 0x1400ffff } , <nl> - { 0x0c0004b6 , 0x24000001 } , <nl> - { 0x0c0004b7 , 0x1400ffff } , <nl> - { 0x0c0004b8 , 0x24000001 } , <nl> - { 0x0c0004b9 , 0x1400ffff } , <nl> - { 0x0c0004ba , 0x24000001 } , <nl> - { 0x0c0004bb , 0x1400ffff } , <nl> - { 0x0c0004bc , 0x24000001 } , <nl> - { 0x0c0004bd , 0x1400ffff } , <nl> - { 0x0c0004be , 0x24000001 } , <nl> - { 0x0c0004bf , 0x1400ffff } , <nl> - { 0x0c0004c0 , 0x24000000 } , <nl> - { 0x0c0004c1 , 0x24000001 } , <nl> - { 0x0c0004c2 , 0x1400ffff } , <nl> - { 0x0c0004c3 , 0x24000001 } , <nl> - { 0x0c0004c4 , 0x1400ffff } , <nl> - { 0x0c0004c5 , 0x24000001 } , <nl> - { 0x0c0004c6 , 0x1400ffff } , <nl> - { 0x0c0004c7 , 0x24000001 } , <nl> - { 0x0c0004c8 , 0x1400ffff } , <nl> - { 0x0c0004c9 , 0x24000001 } , <nl> - { 0x0c0004ca , 0x1400ffff } , <nl> - { 0x0c0004cb , 0x24000001 } , <nl> - { 0x0c0004cc , 0x1400ffff } , <nl> - { 0x0c0004cd , 0x24000001 } , <nl> - { 0x0c0004ce , 0x1400ffff } , <nl> - { 0x0c0004d0 , 0x24000001 } , <nl> - { 0x0c0004d1 , 0x1400ffff } , <nl> - { 0x0c0004d2 , 0x24000001 } , <nl> - { 0x0c0004d3 , 0x1400ffff } , <nl> - { 0x0c0004d4 , 0x24000001 } , <nl> - { 0x0c0004d5 , 0x1400ffff } , <nl> - { 0x0c0004d6 , 0x24000001 } , <nl> - { 0x0c0004d7 , 0x1400ffff } , <nl> - { 0x0c0004d8 , 0x24000001 } , <nl> - { 0x0c0004d9 , 0x1400ffff } , <nl> - { 0x0c0004da , 0x24000001 } , <nl> - { 0x0c0004db , 0x1400ffff } , <nl> - { 0x0c0004dc , 0x24000001 } , <nl> - { 0x0c0004dd , 0x1400ffff } , <nl> - { 0x0c0004de , 0x24000001 } , <nl> - { 0x0c0004df , 0x1400ffff } , <nl> - { 0x0c0004e0 , 0x24000001 } , <nl> - { 0x0c0004e1 , 0x1400ffff } , <nl> - { 0x0c0004e2 , 0x24000001 } , <nl> - { 0x0c0004e3 , 0x1400ffff } , <nl> - { 0x0c0004e4 , 0x24000001 } , <nl> - { 0x0c0004e5 , 0x1400ffff } , <nl> - { 0x0c0004e6 , 0x24000001 } , <nl> - { 0x0c0004e7 , 0x1400ffff } , <nl> - { 0x0c0004e8 , 0x24000001 } , <nl> - { 0x0c0004e9 , 0x1400ffff } , <nl> - { 0x0c0004ea , 0x24000001 } , <nl> - { 0x0c0004eb , 0x1400ffff } , <nl> - { 0x0c0004ec , 0x24000001 } , <nl> - { 0x0c0004ed , 0x1400ffff } , <nl> - { 0x0c0004ee , 0x24000001 } , <nl> - { 0x0c0004ef , 0x1400ffff } , <nl> - { 0x0c0004f0 , 0x24000001 } , <nl> - { 0x0c0004f1 , 0x1400ffff } , <nl> - { 0x0c0004f2 , 0x24000001 } , <nl> - { 0x0c0004f3 , 0x1400ffff } , <nl> - { 0x0c0004f4 , 0x24000001 } , <nl> - { 0x0c0004f5 , 0x1400ffff } , <nl> - { 0x0c0004f6 , 0x24000001 } , <nl> - { 0x0c0004f7 , 0x1400ffff } , <nl> - { 0x0c0004f8 , 0x24000001 } , <nl> - { 0x0c0004f9 , 0x1400ffff } , <nl> - { 0x0c000500 , 0x24000001 } , <nl> - { 0x0c000501 , 0x1400ffff } , <nl> - { 0x0c000502 , 0x24000001 } , <nl> - { 0x0c000503 , 0x1400ffff } , <nl> - { 0x0c000504 , 0x24000001 } , <nl> - { 0x0c000505 , 0x1400ffff } , <nl> - { 0x0c000506 , 0x24000001 } , <nl> - { 0x0c000507 , 0x1400ffff } , <nl> - { 0x0c000508 , 0x24000001 } , <nl> - { 0x0c000509 , 0x1400ffff } , <nl> - { 0x0c00050a , 0x24000001 } , <nl> - { 0x0c00050b , 0x1400ffff } , <nl> - { 0x0c00050c , 0x24000001 } , <nl> - { 0x0c00050d , 0x1400ffff } , <nl> - { 0x0c00050e , 0x24000001 } , <nl> - { 0x0c00050f , 0x1400ffff } , <nl> - { 0x01000531 , 0x24000030 } , <nl> - { 0x01000532 , 0x24000030 } , <nl> - { 0x01000533 , 0x24000030 } , <nl> - { 0x01000534 , 0x24000030 } , <nl> - { 0x01000535 , 0x24000030 } , <nl> - { 0x01000536 , 0x24000030 } , <nl> - { 0x01000537 , 0x24000030 } , <nl> - { 0x01000538 , 0x24000030 } , <nl> - { 0x01000539 , 0x24000030 } , <nl> - { 0x0100053a , 0x24000030 } , <nl> - { 0x0100053b , 0x24000030 } , <nl> - { 0x0100053c , 0x24000030 } , <nl> - { 0x0100053d , 0x24000030 } , <nl> - { 0x0100053e , 0x24000030 } , <nl> - { 0x0100053f , 0x24000030 } , <nl> - { 0x01000540 , 0x24000030 } , <nl> - { 0x01000541 , 0x24000030 } , <nl> - { 0x01000542 , 0x24000030 } , <nl> - { 0x01000543 , 0x24000030 } , <nl> - { 0x01000544 , 0x24000030 } , <nl> - { 0x01000545 , 0x24000030 } , <nl> - { 0x01000546 , 0x24000030 } , <nl> - { 0x01000547 , 0x24000030 } , <nl> - { 0x01000548 , 0x24000030 } , <nl> - { 0x01000549 , 0x24000030 } , <nl> - { 0x0100054a , 0x24000030 } , <nl> - { 0x0100054b , 0x24000030 } , <nl> - { 0x0100054c , 0x24000030 } , <nl> - { 0x0100054d , 0x24000030 } , <nl> - { 0x0100054e , 0x24000030 } , <nl> - { 0x0100054f , 0x24000030 } , <nl> - { 0x01000550 , 0x24000030 } , <nl> - { 0x01000551 , 0x24000030 } , <nl> - { 0x01000552 , 0x24000030 } , <nl> - { 0x01000553 , 0x24000030 } , <nl> - { 0x01000554 , 0x24000030 } , <nl> - { 0x01000555 , 0x24000030 } , <nl> - { 0x01000556 , 0x24000030 } , <nl> - { 0x01000559 , 0x18000000 } , <nl> - { 0x0180055a , 0x54000005 } , <nl> - { 0x01000561 , 0x1400ffd0 } , <nl> - { 0x01000562 , 0x1400ffd0 } , <nl> - { 0x01000563 , 0x1400ffd0 } , <nl> - { 0x01000564 , 0x1400ffd0 } , <nl> - { 0x01000565 , 0x1400ffd0 } , <nl> - { 0x01000566 , 0x1400ffd0 } , <nl> - { 0x01000567 , 0x1400ffd0 } , <nl> - { 0x01000568 , 0x1400ffd0 } , <nl> - { 0x01000569 , 0x1400ffd0 } , <nl> - { 0x0100056a , 0x1400ffd0 } , <nl> - { 0x0100056b , 0x1400ffd0 } , <nl> - { 0x0100056c , 0x1400ffd0 } , <nl> - { 0x0100056d , 0x1400ffd0 } , <nl> - { 0x0100056e , 0x1400ffd0 } , <nl> - { 0x0100056f , 0x1400ffd0 } , <nl> - { 0x01000570 , 0x1400ffd0 } , <nl> - { 0x01000571 , 0x1400ffd0 } , <nl> - { 0x01000572 , 0x1400ffd0 } , <nl> - { 0x01000573 , 0x1400ffd0 } , <nl> - { 0x01000574 , 0x1400ffd0 } , <nl> - { 0x01000575 , 0x1400ffd0 } , <nl> - { 0x01000576 , 0x1400ffd0 } , <nl> - { 0x01000577 , 0x1400ffd0 } , <nl> - { 0x01000578 , 0x1400ffd0 } , <nl> - { 0x01000579 , 0x1400ffd0 } , <nl> - { 0x0100057a , 0x1400ffd0 } , <nl> - { 0x0100057b , 0x1400ffd0 } , <nl> - { 0x0100057c , 0x1400ffd0 } , <nl> - { 0x0100057d , 0x1400ffd0 } , <nl> - { 0x0100057e , 0x1400ffd0 } , <nl> - { 0x0100057f , 0x1400ffd0 } , <nl> - { 0x01000580 , 0x1400ffd0 } , <nl> - { 0x01000581 , 0x1400ffd0 } , <nl> - { 0x01000582 , 0x1400ffd0 } , <nl> - { 0x01000583 , 0x1400ffd0 } , <nl> - { 0x01000584 , 0x1400ffd0 } , <nl> - { 0x01000585 , 0x1400ffd0 } , <nl> - { 0x01000586 , 0x1400ffd0 } , <nl> - { 0x01000587 , 0x14000000 } , <nl> - { 0x09000589 , 0x54000000 } , <nl> - { 0x0100058a , 0x44000000 } , <nl> - { 0x19800591 , 0x30000028 } , <nl> - { 0x198005bb , 0x30000002 } , <nl> - { 0x190005be , 0x54000000 } , <nl> - { 0x190005bf , 0x30000000 } , <nl> - { 0x190005c0 , 0x54000000 } , <nl> - { 0x198005c1 , 0x30000001 } , <nl> - { 0x190005c3 , 0x54000000 } , <nl> - { 0x198005c4 , 0x30000001 } , <nl> - { 0x190005c6 , 0x54000000 } , <nl> - { 0x190005c7 , 0x30000000 } , <nl> - { 0x198005d0 , 0x1c00001a } , <nl> - { 0x198005f0 , 0x1c000002 } , <nl> - { 0x198005f3 , 0x54000001 } , <nl> - { 0x09800600 , 0x04000003 } , <nl> - { 0x0000060b , 0x5c000000 } , <nl> - { 0x0980060c , 0x54000001 } , <nl> - { 0x0080060e , 0x68000001 } , <nl> - { 0x00800610 , 0x30000005 } , <nl> - { 0x0900061b , 0x54000000 } , <nl> - { 0x0080061e , 0x54000001 } , <nl> - { 0x00800621 , 0x1c000019 } , <nl> - { 0x09000640 , 0x18000000 } , <nl> - { 0x00800641 , 0x1c000009 } , <nl> - { 0x1b80064b , 0x30000013 } , <nl> - { 0x09800660 , 0x34000009 } , <nl> - { 0x0080066a , 0x54000003 } , <nl> - { 0x0080066e , 0x1c000001 } , <nl> - { 0x1b000670 , 0x30000000 } , <nl> - { 0x00800671 , 0x1c000062 } , <nl> - { 0x000006d4 , 0x54000000 } , <nl> - { 0x000006d5 , 0x1c000000 } , <nl> - { 0x008006d6 , 0x30000006 } , <nl> - { 0x090006dd , 0x04000000 } , <nl> - { 0x000006de , 0x2c000000 } , <nl> - { 0x008006df , 0x30000005 } , <nl> - { 0x008006e5 , 0x18000001 } , <nl> - { 0x008006e7 , 0x30000001 } , <nl> - { 0x000006e9 , 0x68000000 } , <nl> - { 0x008006ea , 0x30000003 } , <nl> - { 0x008006ee , 0x1c000001 } , <nl> - { 0x008006f0 , 0x34000009 } , <nl> - { 0x008006fa , 0x1c000002 } , <nl> - { 0x008006fd , 0x68000001 } , <nl> - { 0x000006ff , 0x1c000000 } , <nl> - { 0x31800700 , 0x5400000d } , <nl> - { 0x3100070f , 0x04000000 } , <nl> - { 0x31000710 , 0x1c000000 } , <nl> - { 0x31000711 , 0x30000000 } , <nl> - { 0x31800712 , 0x1c00001d } , <nl> - { 0x31800730 , 0x3000001a } , <nl> - { 0x3180074d , 0x1c000020 } , <nl> - { 0x37800780 , 0x1c000025 } , <nl> - { 0x378007a6 , 0x3000000a } , <nl> - { 0x370007b1 , 0x1c000000 } , <nl> - { 0x0e800901 , 0x30000001 } , <nl> - { 0x0e000903 , 0x28000000 } , <nl> - { 0x0e800904 , 0x1c000035 } , <nl> - { 0x0e00093c , 0x30000000 } , <nl> - { 0x0e00093d , 0x1c000000 } , <nl> - { 0x0e80093e , 0x28000002 } , <nl> - { 0x0e800941 , 0x30000007 } , <nl> - { 0x0e800949 , 0x28000003 } , <nl> - { 0x0e00094d , 0x30000000 } , <nl> - { 0x0e000950 , 0x1c000000 } , <nl> - { 0x0e800951 , 0x30000003 } , <nl> - { 0x0e800958 , 0x1c000009 } , <nl> - { 0x0e800962 , 0x30000001 } , <nl> - { 0x09800964 , 0x54000001 } , <nl> - { 0x0e800966 , 0x34000009 } , <nl> - { 0x09000970 , 0x54000000 } , <nl> - { 0x0e00097d , 0x1c000000 } , <nl> - { 0x02000981 , 0x30000000 } , <nl> - { 0x02800982 , 0x28000001 } , <nl> - { 0x02800985 , 0x1c000007 } , <nl> - { 0x0280098f , 0x1c000001 } , <nl> - { 0x02800993 , 0x1c000015 } , <nl> - { 0x028009aa , 0x1c000006 } , <nl> - { 0x020009b2 , 0x1c000000 } , <nl> - { 0x028009b6 , 0x1c000003 } , <nl> - { 0x020009bc , 0x30000000 } , <nl> - { 0x020009bd , 0x1c000000 } , <nl> - { 0x028009be , 0x28000002 } , <nl> - { 0x028009c1 , 0x30000003 } , <nl> - { 0x028009c7 , 0x28000001 } , <nl> - { 0x028009cb , 0x28000001 } , <nl> - { 0x020009cd , 0x30000000 } , <nl> - { 0x020009ce , 0x1c000000 } , <nl> - { 0x020009d7 , 0x28000000 } , <nl> - { 0x028009dc , 0x1c000001 } , <nl> - { 0x028009df , 0x1c000002 } , <nl> - { 0x028009e2 , 0x30000001 } , <nl> - { 0x028009e6 , 0x34000009 } , <nl> - { 0x028009f0 , 0x1c000001 } , <nl> - { 0x028009f2 , 0x5c000001 } , <nl> - { 0x028009f4 , 0x3c000005 } , <nl> - { 0x020009fa , 0x68000000 } , <nl> - { 0x15800a01 , 0x30000001 } , <nl> - { 0x15000a03 , 0x28000000 } , <nl> - { 0x15800a05 , 0x1c000005 } , <nl> - { 0x15800a0f , 0x1c000001 } , <nl> - { 0x15800a13 , 0x1c000015 } , <nl> - { 0x15800a2a , 0x1c000006 } , <nl> - { 0x15800a32 , 0x1c000001 } , <nl> - { 0x15800a35 , 0x1c000001 } , <nl> - { 0x15800a38 , 0x1c000001 } , <nl> - { 0x15000a3c , 0x30000000 } , <nl> - { 0x15800a3e , 0x28000002 } , <nl> - { 0x15800a41 , 0x30000001 } , <nl> - { 0x15800a47 , 0x30000001 } , <nl> - { 0x15800a4b , 0x30000002 } , <nl> - { 0x15800a59 , 0x1c000003 } , <nl> - { 0x15000a5e , 0x1c000000 } , <nl> - { 0x15800a66 , 0x34000009 } , <nl> - { 0x15800a70 , 0x30000001 } , <nl> - { 0x15800a72 , 0x1c000002 } , <nl> - { 0x14800a81 , 0x30000001 } , <nl> - { 0x14000a83 , 0x28000000 } , <nl> - { 0x14800a85 , 0x1c000008 } , <nl> - { 0x14800a8f , 0x1c000002 } , <nl> - { 0x14800a93 , 0x1c000015 } , <nl> - { 0x14800aaa , 0x1c000006 } , <nl> - { 0x14800ab2 , 0x1c000001 } , <nl> - { 0x14800ab5 , 0x1c000004 } , <nl> - { 0x14000abc , 0x30000000 } , <nl> - { 0x14000abd , 0x1c000000 } , <nl> - { 0x14800abe , 0x28000002 } , <nl> - { 0x14800ac1 , 0x30000004 } , <nl> - { 0x14800ac7 , 0x30000001 } , <nl> - { 0x14000ac9 , 0x28000000 } , <nl> - { 0x14800acb , 0x28000001 } , <nl> - { 0x14000acd , 0x30000000 } , <nl> - { 0x14000ad0 , 0x1c000000 } , <nl> - { 0x14800ae0 , 0x1c000001 } , <nl> - { 0x14800ae2 , 0x30000001 } , <nl> - { 0x14800ae6 , 0x34000009 } , <nl> - { 0x14000af1 , 0x5c000000 } , <nl> - { 0x2b000b01 , 0x30000000 } , <nl> - { 0x2b800b02 , 0x28000001 } , <nl> - { 0x2b800b05 , 0x1c000007 } , <nl> - { 0x2b800b0f , 0x1c000001 } , <nl> - { 0x2b800b13 , 0x1c000015 } , <nl> - { 0x2b800b2a , 0x1c000006 } , <nl> - { 0x2b800b32 , 0x1c000001 } , <nl> - { 0x2b800b35 , 0x1c000004 } , <nl> - { 0x2b000b3c , 0x30000000 } , <nl> - { 0x2b000b3d , 0x1c000000 } , <nl> - { 0x2b000b3e , 0x28000000 } , <nl> - { 0x2b000b3f , 0x30000000 } , <nl> - { 0x2b000b40 , 0x28000000 } , <nl> - { 0x2b800b41 , 0x30000002 } , <nl> - { 0x2b800b47 , 0x28000001 } , <nl> - { 0x2b800b4b , 0x28000001 } , <nl> - { 0x2b000b4d , 0x30000000 } , <nl> - { 0x2b000b56 , 0x30000000 } , <nl> - { 0x2b000b57 , 0x28000000 } , <nl> - { 0x2b800b5c , 0x1c000001 } , <nl> - { 0x2b800b5f , 0x1c000002 } , <nl> - { 0x2b800b66 , 0x34000009 } , <nl> - { 0x2b000b70 , 0x68000000 } , <nl> - { 0x2b000b71 , 0x1c000000 } , <nl> - { 0x35000b82 , 0x30000000 } , <nl> - { 0x35000b83 , 0x1c000000 } , <nl> - { 0x35800b85 , 0x1c000005 } , <nl> - { 0x35800b8e , 0x1c000002 } , <nl> - { 0x35800b92 , 0x1c000003 } , <nl> - { 0x35800b99 , 0x1c000001 } , <nl> - { 0x35000b9c , 0x1c000000 } , <nl> - { 0x35800b9e , 0x1c000001 } , <nl> - { 0x35800ba3 , 0x1c000001 } , <nl> - { 0x35800ba8 , 0x1c000002 } , <nl> - { 0x35800bae , 0x1c00000b } , <nl> - { 0x35800bbe , 0x28000001 } , <nl> - { 0x35000bc0 , 0x30000000 } , <nl> - { 0x35800bc1 , 0x28000001 } , <nl> - { 0x35800bc6 , 0x28000002 } , <nl> - { 0x35800bca , 0x28000002 } , <nl> - { 0x35000bcd , 0x30000000 } , <nl> - { 0x35000bd7 , 0x28000000 } , <nl> - { 0x35800be6 , 0x34000009 } , <nl> - { 0x35800bf0 , 0x3c000002 } , <nl> - { 0x35800bf3 , 0x68000005 } , <nl> - { 0x35000bf9 , 0x5c000000 } , <nl> - { 0x35000bfa , 0x68000000 } , <nl> - { 0x36800c01 , 0x28000002 } , <nl> - { 0x36800c05 , 0x1c000007 } , <nl> - { 0x36800c0e , 0x1c000002 } , <nl> - { 0x36800c12 , 0x1c000016 } , <nl> - { 0x36800c2a , 0x1c000009 } , <nl> - { 0x36800c35 , 0x1c000004 } , <nl> - { 0x36800c3e , 0x30000002 } , <nl> - { 0x36800c41 , 0x28000003 } , <nl> - { 0x36800c46 , 0x30000002 } , <nl> - { 0x36800c4a , 0x30000003 } , <nl> - { 0x36800c55 , 0x30000001 } , <nl> - { 0x36800c60 , 0x1c000001 } , <nl> - { 0x36800c66 , 0x34000009 } , <nl> - { 0x1c800c82 , 0x28000001 } , <nl> - { 0x1c800c85 , 0x1c000007 } , <nl> - { 0x1c800c8e , 0x1c000002 } , <nl> - { 0x1c800c92 , 0x1c000016 } , <nl> - { 0x1c800caa , 0x1c000009 } , <nl> - { 0x1c800cb5 , 0x1c000004 } , <nl> - { 0x1c000cbc , 0x30000000 } , <nl> - { 0x1c000cbd , 0x1c000000 } , <nl> - { 0x1c000cbe , 0x28000000 } , <nl> - { 0x1c000cbf , 0x30000000 } , <nl> - { 0x1c800cc0 , 0x28000004 } , <nl> - { 0x1c000cc6 , 0x30000000 } , <nl> - { 0x1c800cc7 , 0x28000001 } , <nl> - { 0x1c800cca , 0x28000001 } , <nl> - { 0x1c800ccc , 0x30000001 } , <nl> - { 0x1c800cd5 , 0x28000001 } , <nl> - { 0x1c000cde , 0x1c000000 } , <nl> - { 0x1c800ce0 , 0x1c000001 } , <nl> - { 0x1c800ce6 , 0x34000009 } , <nl> - { 0x24800d02 , 0x28000001 } , <nl> - { 0x24800d05 , 0x1c000007 } , <nl> - { 0x24800d0e , 0x1c000002 } , <nl> - { 0x24800d12 , 0x1c000016 } , <nl> - { 0x24800d2a , 0x1c00000f } , <nl> - { 0x24800d3e , 0x28000002 } , <nl> - { 0x24800d41 , 0x30000002 } , <nl> - { 0x24800d46 , 0x28000002 } , <nl> - { 0x24800d4a , 0x28000002 } , <nl> - { 0x24000d4d , 0x30000000 } , <nl> - { 0x24000d57 , 0x28000000 } , <nl> - { 0x24800d60 , 0x1c000001 } , <nl> - { 0x24800d66 , 0x34000009 } , <nl> - { 0x2f800d82 , 0x28000001 } , <nl> - { 0x2f800d85 , 0x1c000011 } , <nl> - { 0x2f800d9a , 0x1c000017 } , <nl> - { 0x2f800db3 , 0x1c000008 } , <nl> - { 0x2f000dbd , 0x1c000000 } , <nl> - { 0x2f800dc0 , 0x1c000006 } , <nl> - { 0x2f000dca , 0x30000000 } , <nl> - { 0x2f800dcf , 0x28000002 } , <nl> - { 0x2f800dd2 , 0x30000002 } , <nl> - { 0x2f000dd6 , 0x30000000 } , <nl> - { 0x2f800dd8 , 0x28000007 } , <nl> - { 0x2f800df2 , 0x28000001 } , <nl> - { 0x2f000df4 , 0x54000000 } , <nl> - { 0x38800e01 , 0x1c00002f } , <nl> - { 0x38000e31 , 0x30000000 } , <nl> - { 0x38800e32 , 0x1c000001 } , <nl> - { 0x38800e34 , 0x30000006 } , <nl> - { 0x09000e3f , 0x5c000000 } , <nl> - { 0x38800e40 , 0x1c000005 } , <nl> - { 0x38000e46 , 0x18000000 } , <nl> - { 0x38800e47 , 0x30000007 } , <nl> - { 0x38000e4f , 0x54000000 } , <nl> - { 0x38800e50 , 0x34000009 } , <nl> - { 0x38800e5a , 0x54000001 } , <nl> - { 0x20800e81 , 0x1c000001 } , <nl> - { 0x20000e84 , 0x1c000000 } , <nl> - { 0x20800e87 , 0x1c000001 } , <nl> - { 0x20000e8a , 0x1c000000 } , <nl> - { 0x20000e8d , 0x1c000000 } , <nl> - { 0x20800e94 , 0x1c000003 } , <nl> - { 0x20800e99 , 0x1c000006 } , <nl> - { 0x20800ea1 , 0x1c000002 } , <nl> - { 0x20000ea5 , 0x1c000000 } , <nl> - { 0x20000ea7 , 0x1c000000 } , <nl> - { 0x20800eaa , 0x1c000001 } , <nl> - { 0x20800ead , 0x1c000003 } , <nl> - { 0x20000eb1 , 0x30000000 } , <nl> - { 0x20800eb2 , 0x1c000001 } , <nl> - { 0x20800eb4 , 0x30000005 } , <nl> - { 0x20800ebb , 0x30000001 } , <nl> - { 0x20000ebd , 0x1c000000 } , <nl> - { 0x20800ec0 , 0x1c000004 } , <nl> - { 0x20000ec6 , 0x18000000 } , <nl> - { 0x20800ec8 , 0x30000005 } , <nl> - { 0x20800ed0 , 0x34000009 } , <nl> - { 0x20800edc , 0x1c000001 } , <nl> - { 0x39000f00 , 0x1c000000 } , <nl> - { 0x39800f01 , 0x68000002 } , <nl> - { 0x39800f04 , 0x5400000e } , <nl> - { 0x39800f13 , 0x68000004 } , <nl> - { 0x39800f18 , 0x30000001 } , <nl> - { 0x39800f1a , 0x68000005 } , <nl> - { 0x39800f20 , 0x34000009 } , <nl> - { 0x39800f2a , 0x3c000009 } , <nl> - { 0x39000f34 , 0x68000000 } , <nl> - { 0x39000f35 , 0x30000000 } , <nl> - { 0x39000f36 , 0x68000000 } , <nl> - { 0x39000f37 , 0x30000000 } , <nl> - { 0x39000f38 , 0x68000000 } , <nl> - { 0x39000f39 , 0x30000000 } , <nl> - { 0x39000f3a , 0x58000000 } , <nl> - { 0x39000f3b , 0x48000000 } , <nl> - { 0x39000f3c , 0x58000000 } , <nl> - { 0x39000f3d , 0x48000000 } , <nl> - { 0x39800f3e , 0x28000001 } , <nl> - { 0x39800f40 , 0x1c000007 } , <nl> - { 0x39800f49 , 0x1c000021 } , <nl> - { 0x39800f71 , 0x3000000d } , <nl> - { 0x39000f7f , 0x28000000 } , <nl> - { 0x39800f80 , 0x30000004 } , <nl> - { 0x39000f85 , 0x54000000 } , <nl> - { 0x39800f86 , 0x30000001 } , <nl> - { 0x39800f88 , 0x1c000003 } , <nl> - { 0x39800f90 , 0x30000007 } , <nl> - { 0x39800f99 , 0x30000023 } , <nl> - { 0x39800fbe , 0x68000007 } , <nl> - { 0x39000fc6 , 0x30000000 } , <nl> - { 0x39800fc7 , 0x68000005 } , <nl> - { 0x39000fcf , 0x68000000 } , <nl> - { 0x39800fd0 , 0x54000001 } , <nl> - { 0x26801000 , 0x1c000021 } , <nl> - { 0x26801023 , 0x1c000004 } , <nl> - { 0x26801029 , 0x1c000001 } , <nl> - { 0x2600102c , 0x28000000 } , <nl> - { 0x2680102d , 0x30000003 } , <nl> - { 0x26001031 , 0x28000000 } , <nl> - { 0x26001032 , 0x30000000 } , <nl> - { 0x26801036 , 0x30000001 } , <nl> - { 0x26001038 , 0x28000000 } , <nl> - { 0x26001039 , 0x30000000 } , <nl> - { 0x26801040 , 0x34000009 } , <nl> - { 0x2680104a , 0x54000005 } , <nl> - { 0x26801050 , 0x1c000005 } , <nl> - { 0x26801056 , 0x28000001 } , <nl> - { 0x26801058 , 0x30000001 } , <nl> - { 0x100010a0 , 0x24001c60 } , <nl> - { 0x100010a1 , 0x24001c60 } , <nl> - { 0x100010a2 , 0x24001c60 } , <nl> - { 0x100010a3 , 0x24001c60 } , <nl> - { 0x100010a4 , 0x24001c60 } , <nl> - { 0x100010a5 , 0x24001c60 } , <nl> - { 0x100010a6 , 0x24001c60 } , <nl> - { 0x100010a7 , 0x24001c60 } , <nl> - { 0x100010a8 , 0x24001c60 } , <nl> - { 0x100010a9 , 0x24001c60 } , <nl> - { 0x100010aa , 0x24001c60 } , <nl> - { 0x100010ab , 0x24001c60 } , <nl> - { 0x100010ac , 0x24001c60 } , <nl> - { 0x100010ad , 0x24001c60 } , <nl> - { 0x100010ae , 0x24001c60 } , <nl> - { 0x100010af , 0x24001c60 } , <nl> - { 0x100010b0 , 0x24001c60 } , <nl> - { 0x100010b1 , 0x24001c60 } , <nl> - { 0x100010b2 , 0x24001c60 } , <nl> - { 0x100010b3 , 0x24001c60 } , <nl> - { 0x100010b4 , 0x24001c60 } , <nl> - { 0x100010b5 , 0x24001c60 } , <nl> - { 0x100010b6 , 0x24001c60 } , <nl> - { 0x100010b7 , 0x24001c60 } , <nl> - { 0x100010b8 , 0x24001c60 } , <nl> - { 0x100010b9 , 0x24001c60 } , <nl> - { 0x100010ba , 0x24001c60 } , <nl> - { 0x100010bb , 0x24001c60 } , <nl> - { 0x100010bc , 0x24001c60 } , <nl> - { 0x100010bd , 0x24001c60 } , <nl> - { 0x100010be , 0x24001c60 } , <nl> - { 0x100010bf , 0x24001c60 } , <nl> - { 0x100010c0 , 0x24001c60 } , <nl> - { 0x100010c1 , 0x24001c60 } , <nl> - { 0x100010c2 , 0x24001c60 } , <nl> - { 0x100010c3 , 0x24001c60 } , <nl> - { 0x100010c4 , 0x24001c60 } , <nl> - { 0x100010c5 , 0x24001c60 } , <nl> - { 0x108010d0 , 0x1c00002a } , <nl> - { 0x090010fb , 0x54000000 } , <nl> - { 0x100010fc , 0x18000000 } , <nl> - { 0x17801100 , 0x1c000059 } , <nl> - { 0x1780115f , 0x1c000043 } , <nl> - { 0x178011a8 , 0x1c000051 } , <nl> - { 0x0f801200 , 0x1c000048 } , <nl> - { 0x0f80124a , 0x1c000003 } , <nl> - { 0x0f801250 , 0x1c000006 } , <nl> - { 0x0f001258 , 0x1c000000 } , <nl> - { 0x0f80125a , 0x1c000003 } , <nl> - { 0x0f801260 , 0x1c000028 } , <nl> - { 0x0f80128a , 0x1c000003 } , <nl> - { 0x0f801290 , 0x1c000020 } , <nl> - { 0x0f8012b2 , 0x1c000003 } , <nl> - { 0x0f8012b8 , 0x1c000006 } , <nl> - { 0x0f0012c0 , 0x1c000000 } , <nl> - { 0x0f8012c2 , 0x1c000003 } , <nl> - { 0x0f8012c8 , 0x1c00000e } , <nl> - { 0x0f8012d8 , 0x1c000038 } , <nl> - { 0x0f801312 , 0x1c000003 } , <nl> - { 0x0f801318 , 0x1c000042 } , <nl> - { 0x0f00135f , 0x30000000 } , <nl> - { 0x0f001360 , 0x68000000 } , <nl> - { 0x0f801361 , 0x54000007 } , <nl> - { 0x0f801369 , 0x3c000013 } , <nl> - { 0x0f801380 , 0x1c00000f } , <nl> - { 0x0f801390 , 0x68000009 } , <nl> - { 0x088013a0 , 0x1c000054 } , <nl> - { 0x07801401 , 0x1c00026b } , <nl> - { 0x0780166d , 0x54000001 } , <nl> - { 0x0780166f , 0x1c000007 } , <nl> - { 0x28001680 , 0x74000000 } , <nl> - { 0x28801681 , 0x1c000019 } , <nl> - { 0x2800169b , 0x58000000 } , <nl> - { 0x2800169c , 0x48000000 } , <nl> - { 0x2d8016a0 , 0x1c00004a } , <nl> - { 0x098016eb , 0x54000002 } , <nl> - { 0x2d8016ee , 0x38000002 } , <nl> - { 0x32801700 , 0x1c00000c } , <nl> - { 0x3280170e , 0x1c000003 } , <nl> - { 0x32801712 , 0x30000002 } , <nl> - { 0x18801720 , 0x1c000011 } , <nl> - { 0x18801732 , 0x30000002 } , <nl> - { 0x09801735 , 0x54000001 } , <nl> - { 0x06801740 , 0x1c000011 } , <nl> - { 0x06801752 , 0x30000001 } , <nl> - { 0x33801760 , 0x1c00000c } , <nl> - { 0x3380176e , 0x1c000002 } , <nl> - { 0x33801772 , 0x30000001 } , <nl> - { 0x1f801780 , 0x1c000033 } , <nl> - { 0x1f8017b4 , 0x04000001 } , <nl> - { 0x1f0017b6 , 0x28000000 } , <nl> - { 0x1f8017b7 , 0x30000006 } , <nl> - { 0x1f8017be , 0x28000007 } , <nl> - { 0x1f0017c6 , 0x30000000 } , <nl> - { 0x1f8017c7 , 0x28000001 } , <nl> - { 0x1f8017c9 , 0x3000000a } , <nl> - { 0x1f8017d4 , 0x54000002 } , <nl> - { 0x1f0017d7 , 0x18000000 } , <nl> - { 0x1f8017d8 , 0x54000002 } , <nl> - { 0x1f0017db , 0x5c000000 } , <nl> - { 0x1f0017dc , 0x1c000000 } , <nl> - { 0x1f0017dd , 0x30000000 } , <nl> - { 0x1f8017e0 , 0x34000009 } , <nl> - { 0x1f8017f0 , 0x3c000009 } , <nl> - { 0x25801800 , 0x54000005 } , <nl> - { 0x25001806 , 0x44000000 } , <nl> - { 0x25801807 , 0x54000003 } , <nl> - { 0x2580180b , 0x30000002 } , <nl> - { 0x2500180e , 0x74000000 } , <nl> - { 0x25801810 , 0x34000009 } , <nl> - { 0x25801820 , 0x1c000022 } , <nl> - { 0x25001843 , 0x18000000 } , <nl> - { 0x25801844 , 0x1c000033 } , <nl> - { 0x25801880 , 0x1c000028 } , <nl> - { 0x250018a9 , 0x30000000 } , <nl> - { 0x22801900 , 0x1c00001c } , <nl> - { 0x22801920 , 0x30000002 } , <nl> - { 0x22801923 , 0x28000003 } , <nl> - { 0x22801927 , 0x30000001 } , <nl> - { 0x22801929 , 0x28000002 } , <nl> - { 0x22801930 , 0x28000001 } , <nl> - { 0x22001932 , 0x30000000 } , <nl> - { 0x22801933 , 0x28000005 } , <nl> - { 0x22801939 , 0x30000002 } , <nl> - { 0x22001940 , 0x68000000 } , <nl> - { 0x22801944 , 0x54000001 } , <nl> - { 0x22801946 , 0x34000009 } , <nl> - { 0x34801950 , 0x1c00001d } , <nl> - { 0x34801970 , 0x1c000004 } , <nl> - { 0x27801980 , 0x1c000029 } , <nl> - { 0x278019b0 , 0x28000010 } , <nl> - { 0x278019c1 , 0x1c000006 } , <nl> - { 0x278019c8 , 0x28000001 } , <nl> - { 0x278019d0 , 0x34000009 } , <nl> - { 0x278019de , 0x54000001 } , <nl> - { 0x1f8019e0 , 0x6800001f } , <nl> - { 0x05801a00 , 0x1c000016 } , <nl> - { 0x05801a17 , 0x30000001 } , <nl> - { 0x05801a19 , 0x28000002 } , <nl> - { 0x05801a1e , 0x54000001 } , <nl> - { 0x21801d00 , 0x1400002b } , <nl> - { 0x21801d2c , 0x18000035 } , <nl> - { 0x21801d62 , 0x14000015 } , <nl> - { 0x0c001d78 , 0x18000000 } , <nl> - { 0x21801d79 , 0x14000021 } , <nl> - { 0x21801d9b , 0x18000024 } , <nl> - { 0x1b801dc0 , 0x30000003 } , <nl> - { 0x21001e00 , 0x24000001 } , <nl> - { 0x21001e01 , 0x1400ffff } , <nl> - { 0x21001e02 , 0x24000001 } , <nl> - { 0x21001e03 , 0x1400ffff } , <nl> - { 0x21001e04 , 0x24000001 } , <nl> - { 0x21001e05 , 0x1400ffff } , <nl> - { 0x21001e06 , 0x24000001 } , <nl> - { 0x21001e07 , 0x1400ffff } , <nl> - { 0x21001e08 , 0x24000001 } , <nl> - { 0x21001e09 , 0x1400ffff } , <nl> - { 0x21001e0a , 0x24000001 } , <nl> - { 0x21001e0b , 0x1400ffff } , <nl> - { 0x21001e0c , 0x24000001 } , <nl> - { 0x21001e0d , 0x1400ffff } , <nl> - { 0x21001e0e , 0x24000001 } , <nl> - { 0x21001e0f , 0x1400ffff } , <nl> - { 0x21001e10 , 0x24000001 } , <nl> - { 0x21001e11 , 0x1400ffff } , <nl> - { 0x21001e12 , 0x24000001 } , <nl> - { 0x21001e13 , 0x1400ffff } , <nl> - { 0x21001e14 , 0x24000001 } , <nl> - { 0x21001e15 , 0x1400ffff } , <nl> - { 0x21001e16 , 0x24000001 } , <nl> - { 0x21001e17 , 0x1400ffff } , <nl> - { 0x21001e18 , 0x24000001 } , <nl> - { 0x21001e19 , 0x1400ffff } , <nl> - { 0x21001e1a , 0x24000001 } , <nl> - { 0x21001e1b , 0x1400ffff } , <nl> - { 0x21001e1c , 0x24000001 } , <nl> - { 0x21001e1d , 0x1400ffff } , <nl> - { 0x21001e1e , 0x24000001 } , <nl> - { 0x21001e1f , 0x1400ffff } , <nl> - { 0x21001e20 , 0x24000001 } , <nl> - { 0x21001e21 , 0x1400ffff } , <nl> - { 0x21001e22 , 0x24000001 } , <nl> - { 0x21001e23 , 0x1400ffff } , <nl> - { 0x21001e24 , 0x24000001 } , <nl> - { 0x21001e25 , 0x1400ffff } , <nl> - { 0x21001e26 , 0x24000001 } , <nl> - { 0x21001e27 , 0x1400ffff } , <nl> - { 0x21001e28 , 0x24000001 } , <nl> - { 0x21001e29 , 0x1400ffff } , <nl> - { 0x21001e2a , 0x24000001 } , <nl> - { 0x21001e2b , 0x1400ffff } , <nl> - { 0x21001e2c , 0x24000001 } , <nl> - { 0x21001e2d , 0x1400ffff } , <nl> - { 0x21001e2e , 0x24000001 } , <nl> - { 0x21001e2f , 0x1400ffff } , <nl> - { 0x21001e30 , 0x24000001 } , <nl> - { 0x21001e31 , 0x1400ffff } , <nl> - { 0x21001e32 , 0x24000001 } , <nl> - { 0x21001e33 , 0x1400ffff } , <nl> - { 0x21001e34 , 0x24000001 } , <nl> - { 0x21001e35 , 0x1400ffff } , <nl> - { 0x21001e36 , 0x24000001 } , <nl> - { 0x21001e37 , 0x1400ffff } , <nl> - { 0x21001e38 , 0x24000001 } , <nl> - { 0x21001e39 , 0x1400ffff } , <nl> - { 0x21001e3a , 0x24000001 } , <nl> - { 0x21001e3b , 0x1400ffff } , <nl> - { 0x21001e3c , 0x24000001 } , <nl> - { 0x21001e3d , 0x1400ffff } , <nl> - { 0x21001e3e , 0x24000001 } , <nl> - { 0x21001e3f , 0x1400ffff } , <nl> - { 0x21001e40 , 0x24000001 } , <nl> - { 0x21001e41 , 0x1400ffff } , <nl> - { 0x21001e42 , 0x24000001 } , <nl> - { 0x21001e43 , 0x1400ffff } , <nl> - { 0x21001e44 , 0x24000001 } , <nl> - { 0x21001e45 , 0x1400ffff } , <nl> - { 0x21001e46 , 0x24000001 } , <nl> - { 0x21001e47 , 0x1400ffff } , <nl> - { 0x21001e48 , 0x24000001 } , <nl> - { 0x21001e49 , 0x1400ffff } , <nl> - { 0x21001e4a , 0x24000001 } , <nl> - { 0x21001e4b , 0x1400ffff } , <nl> - { 0x21001e4c , 0x24000001 } , <nl> - { 0x21001e4d , 0x1400ffff } , <nl> - { 0x21001e4e , 0x24000001 } , <nl> - { 0x21001e4f , 0x1400ffff } , <nl> - { 0x21001e50 , 0x24000001 } , <nl> - { 0x21001e51 , 0x1400ffff } , <nl> - { 0x21001e52 , 0x24000001 } , <nl> - { 0x21001e53 , 0x1400ffff } , <nl> - { 0x21001e54 , 0x24000001 } , <nl> - { 0x21001e55 , 0x1400ffff } , <nl> - { 0x21001e56 , 0x24000001 } , <nl> - { 0x21001e57 , 0x1400ffff } , <nl> - { 0x21001e58 , 0x24000001 } , <nl> - { 0x21001e59 , 0x1400ffff } , <nl> - { 0x21001e5a , 0x24000001 } , <nl> - { 0x21001e5b , 0x1400ffff } , <nl> - { 0x21001e5c , 0x24000001 } , <nl> - { 0x21001e5d , 0x1400ffff } , <nl> - { 0x21001e5e , 0x24000001 } , <nl> - { 0x21001e5f , 0x1400ffff } , <nl> - { 0x21001e60 , 0x24000001 } , <nl> - { 0x21001e61 , 0x1400ffff } , <nl> - { 0x21001e62 , 0x24000001 } , <nl> - { 0x21001e63 , 0x1400ffff } , <nl> - { 0x21001e64 , 0x24000001 } , <nl> - { 0x21001e65 , 0x1400ffff } , <nl> - { 0x21001e66 , 0x24000001 } , <nl> - { 0x21001e67 , 0x1400ffff } , <nl> - { 0x21001e68 , 0x24000001 } , <nl> - { 0x21001e69 , 0x1400ffff } , <nl> - { 0x21001e6a , 0x24000001 } , <nl> - { 0x21001e6b , 0x1400ffff } , <nl> - { 0x21001e6c , 0x24000001 } , <nl> - { 0x21001e6d , 0x1400ffff } , <nl> - { 0x21001e6e , 0x24000001 } , <nl> - { 0x21001e6f , 0x1400ffff } , <nl> - { 0x21001e70 , 0x24000001 } , <nl> - { 0x21001e71 , 0x1400ffff } , <nl> - { 0x21001e72 , 0x24000001 } , <nl> - { 0x21001e73 , 0x1400ffff } , <nl> - { 0x21001e74 , 0x24000001 } , <nl> - { 0x21001e75 , 0x1400ffff } , <nl> - { 0x21001e76 , 0x24000001 } , <nl> - { 0x21001e77 , 0x1400ffff } , <nl> - { 0x21001e78 , 0x24000001 } , <nl> - { 0x21001e79 , 0x1400ffff } , <nl> - { 0x21001e7a , 0x24000001 } , <nl> - { 0x21001e7b , 0x1400ffff } , <nl> - { 0x21001e7c , 0x24000001 } , <nl> - { 0x21001e7d , 0x1400ffff } , <nl> - { 0x21001e7e , 0x24000001 } , <nl> - { 0x21001e7f , 0x1400ffff } , <nl> - { 0x21001e80 , 0x24000001 } , <nl> - { 0x21001e81 , 0x1400ffff } , <nl> - { 0x21001e82 , 0x24000001 } , <nl> - { 0x21001e83 , 0x1400ffff } , <nl> - { 0x21001e84 , 0x24000001 } , <nl> - { 0x21001e85 , 0x1400ffff } , <nl> - { 0x21001e86 , 0x24000001 } , <nl> - { 0x21001e87 , 0x1400ffff } , <nl> - { 0x21001e88 , 0x24000001 } , <nl> - { 0x21001e89 , 0x1400ffff } , <nl> - { 0x21001e8a , 0x24000001 } , <nl> - { 0x21001e8b , 0x1400ffff } , <nl> - { 0x21001e8c , 0x24000001 } , <nl> - { 0x21001e8d , 0x1400ffff } , <nl> - { 0x21001e8e , 0x24000001 } , <nl> - { 0x21001e8f , 0x1400ffff } , <nl> - { 0x21001e90 , 0x24000001 } , <nl> - { 0x21001e91 , 0x1400ffff } , <nl> - { 0x21001e92 , 0x24000001 } , <nl> - { 0x21001e93 , 0x1400ffff } , <nl> - { 0x21001e94 , 0x24000001 } , <nl> - { 0x21001e95 , 0x1400ffff } , <nl> - { 0x21801e96 , 0x14000004 } , <nl> - { 0x21001e9b , 0x1400ffc5 } , <nl> - { 0x21001ea0 , 0x24000001 } , <nl> - { 0x21001ea1 , 0x1400ffff } , <nl> - { 0x21001ea2 , 0x24000001 } , <nl> - { 0x21001ea3 , 0x1400ffff } , <nl> - { 0x21001ea4 , 0x24000001 } , <nl> - { 0x21001ea5 , 0x1400ffff } , <nl> - { 0x21001ea6 , 0x24000001 } , <nl> - { 0x21001ea7 , 0x1400ffff } , <nl> - { 0x21001ea8 , 0x24000001 } , <nl> - { 0x21001ea9 , 0x1400ffff } , <nl> - { 0x21001eaa , 0x24000001 } , <nl> - { 0x21001eab , 0x1400ffff } , <nl> - { 0x21001eac , 0x24000001 } , <nl> - { 0x21001ead , 0x1400ffff } , <nl> - { 0x21001eae , 0x24000001 } , <nl> - { 0x21001eaf , 0x1400ffff } , <nl> - { 0x21001eb0 , 0x24000001 } , <nl> - { 0x21001eb1 , 0x1400ffff } , <nl> - { 0x21001eb2 , 0x24000001 } , <nl> - { 0x21001eb3 , 0x1400ffff } , <nl> - { 0x21001eb4 , 0x24000001 } , <nl> - { 0x21001eb5 , 0x1400ffff } , <nl> - { 0x21001eb6 , 0x24000001 } , <nl> - { 0x21001eb7 , 0x1400ffff } , <nl> - { 0x21001eb8 , 0x24000001 } , <nl> - { 0x21001eb9 , 0x1400ffff } , <nl> - { 0x21001eba , 0x24000001 } , <nl> - { 0x21001ebb , 0x1400ffff } , <nl> - { 0x21001ebc , 0x24000001 } , <nl> - { 0x21001ebd , 0x1400ffff } , <nl> - { 0x21001ebe , 0x24000001 } , <nl> - { 0x21001ebf , 0x1400ffff } , <nl> - { 0x21001ec0 , 0x24000001 } , <nl> - { 0x21001ec1 , 0x1400ffff } , <nl> - { 0x21001ec2 , 0x24000001 } , <nl> - { 0x21001ec3 , 0x1400ffff } , <nl> - { 0x21001ec4 , 0x24000001 } , <nl> - { 0x21001ec5 , 0x1400ffff } , <nl> - { 0x21001ec6 , 0x24000001 } , <nl> - { 0x21001ec7 , 0x1400ffff } , <nl> - { 0x21001ec8 , 0x24000001 } , <nl> - { 0x21001ec9 , 0x1400ffff } , <nl> - { 0x21001eca , 0x24000001 } , <nl> - { 0x21001ecb , 0x1400ffff } , <nl> - { 0x21001ecc , 0x24000001 } , <nl> - { 0x21001ecd , 0x1400ffff } , <nl> - { 0x21001ece , 0x24000001 } , <nl> - { 0x21001ecf , 0x1400ffff } , <nl> - { 0x21001ed0 , 0x24000001 } , <nl> - { 0x21001ed1 , 0x1400ffff } , <nl> - { 0x21001ed2 , 0x24000001 } , <nl> - { 0x21001ed3 , 0x1400ffff } , <nl> - { 0x21001ed4 , 0x24000001 } , <nl> - { 0x21001ed5 , 0x1400ffff } , <nl> - { 0x21001ed6 , 0x24000001 } , <nl> - { 0x21001ed7 , 0x1400ffff } , <nl> - { 0x21001ed8 , 0x24000001 } , <nl> - { 0x21001ed9 , 0x1400ffff } , <nl> - { 0x21001eda , 0x24000001 } , <nl> - { 0x21001edb , 0x1400ffff } , <nl> - { 0x21001edc , 0x24000001 } , <nl> - { 0x21001edd , 0x1400ffff } , <nl> - { 0x21001ede , 0x24000001 } , <nl> - { 0x21001edf , 0x1400ffff } , <nl> - { 0x21001ee0 , 0x24000001 } , <nl> - { 0x21001ee1 , 0x1400ffff } , <nl> - { 0x21001ee2 , 0x24000001 } , <nl> - { 0x21001ee3 , 0x1400ffff } , <nl> - { 0x21001ee4 , 0x24000001 } , <nl> - { 0x21001ee5 , 0x1400ffff } , <nl> - { 0x21001ee6 , 0x24000001 } , <nl> - { 0x21001ee7 , 0x1400ffff } , <nl> - { 0x21001ee8 , 0x24000001 } , <nl> - { 0x21001ee9 , 0x1400ffff } , <nl> - { 0x21001eea , 0x24000001 } , <nl> - { 0x21001eeb , 0x1400ffff } , <nl> - { 0x21001eec , 0x24000001 } , <nl> - { 0x21001eed , 0x1400ffff } , <nl> - { 0x21001eee , 0x24000001 } , <nl> - { 0x21001eef , 0x1400ffff } , <nl> - { 0x21001ef0 , 0x24000001 } , <nl> - { 0x21001ef1 , 0x1400ffff } , <nl> - { 0x21001ef2 , 0x24000001 } , <nl> - { 0x21001ef3 , 0x1400ffff } , <nl> - { 0x21001ef4 , 0x24000001 } , <nl> - { 0x21001ef5 , 0x1400ffff } , <nl> - { 0x21001ef6 , 0x24000001 } , <nl> - { 0x21001ef7 , 0x1400ffff } , <nl> - { 0x21001ef8 , 0x24000001 } , <nl> - { 0x21001ef9 , 0x1400ffff } , <nl> - { 0x13001f00 , 0x14000008 } , <nl> - { 0x13001f01 , 0x14000008 } , <nl> - { 0x13001f02 , 0x14000008 } , <nl> - { 0x13001f03 , 0x14000008 } , <nl> - { 0x13001f04 , 0x14000008 } , <nl> - { 0x13001f05 , 0x14000008 } , <nl> - { 0x13001f06 , 0x14000008 } , <nl> - { 0x13001f07 , 0x14000008 } , <nl> - { 0x13001f08 , 0x2400fff8 } , <nl> - { 0x13001f09 , 0x2400fff8 } , <nl> - { 0x13001f0a , 0x2400fff8 } , <nl> - { 0x13001f0b , 0x2400fff8 } , <nl> - { 0x13001f0c , 0x2400fff8 } , <nl> - { 0x13001f0d , 0x2400fff8 } , <nl> - { 0x13001f0e , 0x2400fff8 } , <nl> - { 0x13001f0f , 0x2400fff8 } , <nl> - { 0x13001f10 , 0x14000008 } , <nl> - { 0x13001f11 , 0x14000008 } , <nl> - { 0x13001f12 , 0x14000008 } , <nl> - { 0x13001f13 , 0x14000008 } , <nl> - { 0x13001f14 , 0x14000008 } , <nl> - { 0x13001f15 , 0x14000008 } , <nl> - { 0x13001f18 , 0x2400fff8 } , <nl> - { 0x13001f19 , 0x2400fff8 } , <nl> - { 0x13001f1a , 0x2400fff8 } , <nl> - { 0x13001f1b , 0x2400fff8 } , <nl> - { 0x13001f1c , 0x2400fff8 } , <nl> - { 0x13001f1d , 0x2400fff8 } , <nl> - { 0x13001f20 , 0x14000008 } , <nl> - { 0x13001f21 , 0x14000008 } , <nl> - { 0x13001f22 , 0x14000008 } , <nl> - { 0x13001f23 , 0x14000008 } , <nl> - { 0x13001f24 , 0x14000008 } , <nl> - { 0x13001f25 , 0x14000008 } , <nl> - { 0x13001f26 , 0x14000008 } , <nl> - { 0x13001f27 , 0x14000008 } , <nl> - { 0x13001f28 , 0x2400fff8 } , <nl> - { 0x13001f29 , 0x2400fff8 } , <nl> - { 0x13001f2a , 0x2400fff8 } , <nl> - { 0x13001f2b , 0x2400fff8 } , <nl> - { 0x13001f2c , 0x2400fff8 } , <nl> - { 0x13001f2d , 0x2400fff8 } , <nl> - { 0x13001f2e , 0x2400fff8 } , <nl> - { 0x13001f2f , 0x2400fff8 } , <nl> - { 0x13001f30 , 0x14000008 } , <nl> - { 0x13001f31 , 0x14000008 } , <nl> - { 0x13001f32 , 0x14000008 } , <nl> - { 0x13001f33 , 0x14000008 } , <nl> - { 0x13001f34 , 0x14000008 } , <nl> - { 0x13001f35 , 0x14000008 } , <nl> - { 0x13001f36 , 0x14000008 } , <nl> - { 0x13001f37 , 0x14000008 } , <nl> - { 0x13001f38 , 0x2400fff8 } , <nl> - { 0x13001f39 , 0x2400fff8 } , <nl> - { 0x13001f3a , 0x2400fff8 } , <nl> - { 0x13001f3b , 0x2400fff8 } , <nl> - { 0x13001f3c , 0x2400fff8 } , <nl> - { 0x13001f3d , 0x2400fff8 } , <nl> - { 0x13001f3e , 0x2400fff8 } , <nl> - { 0x13001f3f , 0x2400fff8 } , <nl> - { 0x13001f40 , 0x14000008 } , <nl> - { 0x13001f41 , 0x14000008 } , <nl> - { 0x13001f42 , 0x14000008 } , <nl> - { 0x13001f43 , 0x14000008 } , <nl> - { 0x13001f44 , 0x14000008 } , <nl> - { 0x13001f45 , 0x14000008 } , <nl> - { 0x13001f48 , 0x2400fff8 } , <nl> - { 0x13001f49 , 0x2400fff8 } , <nl> - { 0x13001f4a , 0x2400fff8 } , <nl> - { 0x13001f4b , 0x2400fff8 } , <nl> - { 0x13001f4c , 0x2400fff8 } , <nl> - { 0x13001f4d , 0x2400fff8 } , <nl> - { 0x13001f50 , 0x14000000 } , <nl> - { 0x13001f51 , 0x14000008 } , <nl> - { 0x13001f52 , 0x14000000 } , <nl> - { 0x13001f53 , 0x14000008 } , <nl> - { 0x13001f54 , 0x14000000 } , <nl> - { 0x13001f55 , 0x14000008 } , <nl> - { 0x13001f56 , 0x14000000 } , <nl> - { 0x13001f57 , 0x14000008 } , <nl> - { 0x13001f59 , 0x2400fff8 } , <nl> - { 0x13001f5b , 0x2400fff8 } , <nl> - { 0x13001f5d , 0x2400fff8 } , <nl> - { 0x13001f5f , 0x2400fff8 } , <nl> - { 0x13001f60 , 0x14000008 } , <nl> - { 0x13001f61 , 0x14000008 } , <nl> - { 0x13001f62 , 0x14000008 } , <nl> - { 0x13001f63 , 0x14000008 } , <nl> - { 0x13001f64 , 0x14000008 } , <nl> - { 0x13001f65 , 0x14000008 } , <nl> - { 0x13001f66 , 0x14000008 } , <nl> - { 0x13001f67 , 0x14000008 } , <nl> - { 0x13001f68 , 0x2400fff8 } , <nl> - { 0x13001f69 , 0x2400fff8 } , <nl> - { 0x13001f6a , 0x2400fff8 } , <nl> - { 0x13001f6b , 0x2400fff8 } , <nl> - { 0x13001f6c , 0x2400fff8 } , <nl> - { 0x13001f6d , 0x2400fff8 } , <nl> - { 0x13001f6e , 0x2400fff8 } , <nl> - { 0x13001f6f , 0x2400fff8 } , <nl> - { 0x13001f70 , 0x1400004a } , <nl> - { 0x13001f71 , 0x1400004a } , <nl> - { 0x13001f72 , 0x14000056 } , <nl> - { 0x13001f73 , 0x14000056 } , <nl> - { 0x13001f74 , 0x14000056 } , <nl> - { 0x13001f75 , 0x14000056 } , <nl> - { 0x13001f76 , 0x14000064 } , <nl> - { 0x13001f77 , 0x14000064 } , <nl> - { 0x13001f78 , 0x14000080 } , <nl> - { 0x13001f79 , 0x14000080 } , <nl> - { 0x13001f7a , 0x14000070 } , <nl> - { 0x13001f7b , 0x14000070 } , <nl> - { 0x13001f7c , 0x1400007e } , <nl> - { 0x13001f7d , 0x1400007e } , <nl> - { 0x13001f80 , 0x14000008 } , <nl> - { 0x13001f81 , 0x14000008 } , <nl> - { 0x13001f82 , 0x14000008 } , <nl> - { 0x13001f83 , 0x14000008 } , <nl> - { 0x13001f84 , 0x14000008 } , <nl> - { 0x13001f85 , 0x14000008 } , <nl> - { 0x13001f86 , 0x14000008 } , <nl> - { 0x13001f87 , 0x14000008 } , <nl> - { 0x13001f88 , 0x2000fff8 } , <nl> - { 0x13001f89 , 0x2000fff8 } , <nl> - { 0x13001f8a , 0x2000fff8 } , <nl> - { 0x13001f8b , 0x2000fff8 } , <nl> - { 0x13001f8c , 0x2000fff8 } , <nl> - { 0x13001f8d , 0x2000fff8 } , <nl> - { 0x13001f8e , 0x2000fff8 } , <nl> - { 0x13001f8f , 0x2000fff8 } , <nl> - { 0x13001f90 , 0x14000008 } , <nl> - { 0x13001f91 , 0x14000008 } , <nl> - { 0x13001f92 , 0x14000008 } , <nl> - { 0x13001f93 , 0x14000008 } , <nl> - { 0x13001f94 , 0x14000008 } , <nl> - { 0x13001f95 , 0x14000008 } , <nl> - { 0x13001f96 , 0x14000008 } , <nl> - { 0x13001f97 , 0x14000008 } , <nl> - { 0x13001f98 , 0x2000fff8 } , <nl> - { 0x13001f99 , 0x2000fff8 } , <nl> - { 0x13001f9a , 0x2000fff8 } , <nl> - { 0x13001f9b , 0x2000fff8 } , <nl> - { 0x13001f9c , 0x2000fff8 } , <nl> - { 0x13001f9d , 0x2000fff8 } , <nl> - { 0x13001f9e , 0x2000fff8 } , <nl> - { 0x13001f9f , 0x2000fff8 } , <nl> - { 0x13001fa0 , 0x14000008 } , <nl> - { 0x13001fa1 , 0x14000008 } , <nl> - { 0x13001fa2 , 0x14000008 } , <nl> - { 0x13001fa3 , 0x14000008 } , <nl> - { 0x13001fa4 , 0x14000008 } , <nl> - { 0x13001fa5 , 0x14000008 } , <nl> - { 0x13001fa6 , 0x14000008 } , <nl> - { 0x13001fa7 , 0x14000008 } , <nl> - { 0x13001fa8 , 0x2000fff8 } , <nl> - { 0x13001fa9 , 0x2000fff8 } , <nl> - { 0x13001faa , 0x2000fff8 } , <nl> - { 0x13001fab , 0x2000fff8 } , <nl> - { 0x13001fac , 0x2000fff8 } , <nl> - { 0x13001fad , 0x2000fff8 } , <nl> - { 0x13001fae , 0x2000fff8 } , <nl> - { 0x13001faf , 0x2000fff8 } , <nl> - { 0x13001fb0 , 0x14000008 } , <nl> - { 0x13001fb1 , 0x14000008 } , <nl> - { 0x13001fb2 , 0x14000000 } , <nl> - { 0x13001fb3 , 0x14000009 } , <nl> - { 0x13001fb4 , 0x14000000 } , <nl> - { 0x13801fb6 , 0x14000001 } , <nl> - { 0x13001fb8 , 0x2400fff8 } , <nl> - { 0x13001fb9 , 0x2400fff8 } , <nl> - { 0x13001fba , 0x2400ffb6 } , <nl> - { 0x13001fbb , 0x2400ffb6 } , <nl> - { 0x13001fbc , 0x2000fff7 } , <nl> - { 0x13001fbd , 0x60000000 } , <nl> - { 0x13001fbe , 0x1400e3db } , <nl> - { 0x13801fbf , 0x60000002 } , <nl> - { 0x13001fc2 , 0x14000000 } , <nl> - { 0x13001fc3 , 0x14000009 } , <nl> - { 0x13001fc4 , 0x14000000 } , <nl> - { 0x13801fc6 , 0x14000001 } , <nl> - { 0x13001fc8 , 0x2400ffaa } , <nl> - { 0x13001fc9 , 0x2400ffaa } , <nl> - { 0x13001fca , 0x2400ffaa } , <nl> - { 0x13001fcb , 0x2400ffaa } , <nl> - { 0x13001fcc , 0x2000fff7 } , <nl> - { 0x13801fcd , 0x60000002 } , <nl> - { 0x13001fd0 , 0x14000008 } , <nl> - { 0x13001fd1 , 0x14000008 } , <nl> - { 0x13801fd2 , 0x14000001 } , <nl> - { 0x13801fd6 , 0x14000001 } , <nl> - { 0x13001fd8 , 0x2400fff8 } , <nl> - { 0x13001fd9 , 0x2400fff8 } , <nl> - { 0x13001fda , 0x2400ff9c } , <nl> - { 0x13001fdb , 0x2400ff9c } , <nl> - { 0x13801fdd , 0x60000002 } , <nl> - { 0x13001fe0 , 0x14000008 } , <nl> - { 0x13001fe1 , 0x14000008 } , <nl> - { 0x13801fe2 , 0x14000002 } , <nl> - { 0x13001fe5 , 0x14000007 } , <nl> - { 0x13801fe6 , 0x14000001 } , <nl> - { 0x13001fe8 , 0x2400fff8 } , <nl> - { 0x13001fe9 , 0x2400fff8 } , <nl> - { 0x13001fea , 0x2400ff90 } , <nl> - { 0x13001feb , 0x2400ff90 } , <nl> - { 0x13001fec , 0x2400fff9 } , <nl> - { 0x13801fed , 0x60000002 } , <nl> - { 0x13001ff2 , 0x14000000 } , <nl> - { 0x13001ff3 , 0x14000009 } , <nl> - { 0x13001ff4 , 0x14000000 } , <nl> - { 0x13801ff6 , 0x14000001 } , <nl> - { 0x13001ff8 , 0x2400ff80 } , <nl> - { 0x13001ff9 , 0x2400ff80 } , <nl> - { 0x13001ffa , 0x2400ff82 } , <nl> - { 0x13001ffb , 0x2400ff82 } , <nl> - { 0x13001ffc , 0x2000fff7 } , <nl> - { 0x13801ffd , 0x60000001 } , <nl> - { 0x09802000 , 0x7400000a } , <nl> - { 0x0980200b , 0x04000004 } , <nl> - { 0x09802010 , 0x44000005 } , <nl> - { 0x09802016 , 0x54000001 } , <nl> - { 0x09002018 , 0x50000000 } , <nl> - { 0x09002019 , 0x4c000000 } , <nl> - { 0x0900201a , 0x58000000 } , <nl> - { 0x0980201b , 0x50000001 } , <nl> - { 0x0900201d , 0x4c000000 } , <nl> - { 0x0900201e , 0x58000000 } , <nl> - { 0x0900201f , 0x50000000 } , <nl> - { 0x09802020 , 0x54000007 } , <nl> - { 0x09002028 , 0x6c000000 } , <nl> - { 0x09002029 , 0x70000000 } , <nl> - { 0x0980202a , 0x04000004 } , <nl> - { 0x0900202f , 0x74000000 } , <nl> - { 0x09802030 , 0x54000008 } , <nl> - { 0x09002039 , 0x50000000 } , <nl> - { 0x0900203a , 0x4c000000 } , <nl> - { 0x0980203b , 0x54000003 } , <nl> - { 0x0980203f , 0x40000001 } , <nl> - { 0x09802041 , 0x54000002 } , <nl> - { 0x09002044 , 0x64000000 } , <nl> - { 0x09002045 , 0x58000000 } , <nl> - { 0x09002046 , 0x48000000 } , <nl> - { 0x09802047 , 0x5400000a } , <nl> - { 0x09002052 , 0x64000000 } , <nl> - { 0x09002053 , 0x54000000 } , <nl> - { 0x09002054 , 0x40000000 } , <nl> - { 0x09802055 , 0x54000009 } , <nl> - { 0x0900205f , 0x74000000 } , <nl> - { 0x09802060 , 0x04000003 } , <nl> - { 0x0980206a , 0x04000005 } , <nl> - { 0x09002070 , 0x3c000000 } , <nl> - { 0x21002071 , 0x14000000 } , <nl> - { 0x09802074 , 0x3c000005 } , <nl> - { 0x0980207a , 0x64000002 } , <nl> - { 0x0900207d , 0x58000000 } , <nl> - { 0x0900207e , 0x48000000 } , <nl> - { 0x2100207f , 0x14000000 } , <nl> - { 0x09802080 , 0x3c000009 } , <nl> - { 0x0980208a , 0x64000002 } , <nl> - { 0x0900208d , 0x58000000 } , <nl> - { 0x0900208e , 0x48000000 } , <nl> - { 0x21802090 , 0x18000004 } , <nl> - { 0x098020a0 , 0x5c000015 } , <nl> - { 0x1b8020d0 , 0x3000000c } , <nl> - { 0x1b8020dd , 0x2c000003 } , <nl> - { 0x1b0020e1 , 0x30000000 } , <nl> - { 0x1b8020e2 , 0x2c000002 } , <nl> - { 0x1b8020e5 , 0x30000006 } , <nl> - { 0x09802100 , 0x68000001 } , <nl> - { 0x09002102 , 0x24000000 } , <nl> - { 0x09802103 , 0x68000003 } , <nl> - { 0x09002107 , 0x24000000 } , <nl> - { 0x09802108 , 0x68000001 } , <nl> - { 0x0900210a , 0x14000000 } , <nl> - { 0x0980210b , 0x24000002 } , <nl> - { 0x0980210e , 0x14000001 } , <nl> - { 0x09802110 , 0x24000002 } , <nl> - { 0x09002113 , 0x14000000 } , <nl> - { 0x09002114 , 0x68000000 } , <nl> - { 0x09002115 , 0x24000000 } , <nl> - { 0x09802116 , 0x68000002 } , <nl> - { 0x09802119 , 0x24000004 } , <nl> - { 0x0980211e , 0x68000005 } , <nl> - { 0x09002124 , 0x24000000 } , <nl> - { 0x09002125 , 0x68000000 } , <nl> - { 0x13002126 , 0x2400e2a3 } , <nl> - { 0x09002127 , 0x68000000 } , <nl> - { 0x09002128 , 0x24000000 } , <nl> - { 0x09002129 , 0x68000000 } , <nl> - { 0x2100212a , 0x2400df41 } , <nl> - { 0x2100212b , 0x2400dfba } , <nl> - { 0x0980212c , 0x24000001 } , <nl> - { 0x0900212e , 0x68000000 } , <nl> - { 0x0900212f , 0x14000000 } , <nl> - { 0x09802130 , 0x24000001 } , <nl> - { 0x09002132 , 0x68000000 } , <nl> - { 0x09002133 , 0x24000000 } , <nl> - { 0x09002134 , 0x14000000 } , <nl> - { 0x09802135 , 0x1c000003 } , <nl> - { 0x09002139 , 0x14000000 } , <nl> - { 0x0980213a , 0x68000001 } , <nl> - { 0x0980213c , 0x14000001 } , <nl> - { 0x0980213e , 0x24000001 } , <nl> - { 0x09802140 , 0x64000004 } , <nl> - { 0x09002145 , 0x24000000 } , <nl> - { 0x09802146 , 0x14000003 } , <nl> - { 0x0900214a , 0x68000000 } , <nl> - { 0x0900214b , 0x64000000 } , <nl> - { 0x0900214c , 0x68000000 } , <nl> - { 0x09802153 , 0x3c00000c } , <nl> - { 0x09002160 , 0x38000010 } , <nl> - { 0x09002161 , 0x38000010 } , <nl> - { 0x09002162 , 0x38000010 } , <nl> - { 0x09002163 , 0x38000010 } , <nl> - { 0x09002164 , 0x38000010 } , <nl> - { 0x09002165 , 0x38000010 } , <nl> - { 0x09002166 , 0x38000010 } , <nl> - { 0x09002167 , 0x38000010 } , <nl> - { 0x09002168 , 0x38000010 } , <nl> - { 0x09002169 , 0x38000010 } , <nl> - { 0x0900216a , 0x38000010 } , <nl> - { 0x0900216b , 0x38000010 } , <nl> - { 0x0900216c , 0x38000010 } , <nl> - { 0x0900216d , 0x38000010 } , <nl> - { 0x0900216e , 0x38000010 } , <nl> - { 0x0900216f , 0x38000010 } , <nl> - { 0x09002170 , 0x3800fff0 } , <nl> - { 0x09002171 , 0x3800fff0 } , <nl> - { 0x09002172 , 0x3800fff0 } , <nl> - { 0x09002173 , 0x3800fff0 } , <nl> - { 0x09002174 , 0x3800fff0 } , <nl> - { 0x09002175 , 0x3800fff0 } , <nl> - { 0x09002176 , 0x3800fff0 } , <nl> - { 0x09002177 , 0x3800fff0 } , <nl> - { 0x09002178 , 0x3800fff0 } , <nl> - { 0x09002179 , 0x3800fff0 } , <nl> - { 0x0900217a , 0x3800fff0 } , <nl> - { 0x0900217b , 0x3800fff0 } , <nl> - { 0x0900217c , 0x3800fff0 } , <nl> - { 0x0900217d , 0x3800fff0 } , <nl> - { 0x0900217e , 0x3800fff0 } , <nl> - { 0x0900217f , 0x3800fff0 } , <nl> - { 0x09802180 , 0x38000003 } , <nl> - { 0x09802190 , 0x64000004 } , <nl> - { 0x09802195 , 0x68000004 } , <nl> - { 0x0980219a , 0x64000001 } , <nl> - { 0x0980219c , 0x68000003 } , <nl> - { 0x090021a0 , 0x64000000 } , <nl> - { 0x098021a1 , 0x68000001 } , <nl> - { 0x090021a3 , 0x64000000 } , <nl> - { 0x098021a4 , 0x68000001 } , <nl> - { 0x090021a6 , 0x64000000 } , <nl> - { 0x098021a7 , 0x68000006 } , <nl> - { 0x090021ae , 0x64000000 } , <nl> - { 0x098021af , 0x6800001e } , <nl> - { 0x098021ce , 0x64000001 } , <nl> - { 0x098021d0 , 0x68000001 } , <nl> - { 0x090021d2 , 0x64000000 } , <nl> - { 0x090021d3 , 0x68000000 } , <nl> - { 0x090021d4 , 0x64000000 } , <nl> - { 0x098021d5 , 0x6800001e } , <nl> - { 0x098021f4 , 0x6400010b } , <nl> - { 0x09802300 , 0x68000007 } , <nl> - { 0x09802308 , 0x64000003 } , <nl> - { 0x0980230c , 0x68000013 } , <nl> - { 0x09802320 , 0x64000001 } , <nl> - { 0x09802322 , 0x68000006 } , <nl> - { 0x09002329 , 0x58000000 } , <nl> - { 0x0900232a , 0x48000000 } , <nl> - { 0x0980232b , 0x68000050 } , <nl> - { 0x0900237c , 0x64000000 } , <nl> - { 0x0980237d , 0x6800001d } , <nl> - { 0x0980239b , 0x64000018 } , <nl> - { 0x090023b4 , 0x58000000 } , <nl> - { 0x090023b5 , 0x48000000 } , <nl> - { 0x090023b6 , 0x54000000 } , <nl> - { 0x098023b7 , 0x68000024 } , <nl> - { 0x09802400 , 0x68000026 } , <nl> - { 0x09802440 , 0x6800000a } , <nl> - { 0x09802460 , 0x3c00003b } , <nl> - { 0x0980249c , 0x68000019 } , <nl> - { 0x090024b6 , 0x6800001a } , <nl> - { 0x090024b7 , 0x6800001a } , <nl> - { 0x090024b8 , 0x6800001a } , <nl> - { 0x090024b9 , 0x6800001a } , <nl> - { 0x090024ba , 0x6800001a } , <nl> - { 0x090024bb , 0x6800001a } , <nl> - { 0x090024bc , 0x6800001a } , <nl> - { 0x090024bd , 0x6800001a } , <nl> - { 0x090024be , 0x6800001a } , <nl> - { 0x090024bf , 0x6800001a } , <nl> - { 0x090024c0 , 0x6800001a } , <nl> - { 0x090024c1 , 0x6800001a } , <nl> - { 0x090024c2 , 0x6800001a } , <nl> - { 0x090024c3 , 0x6800001a } , <nl> - { 0x090024c4 , 0x6800001a } , <nl> - { 0x090024c5 , 0x6800001a } , <nl> - { 0x090024c6 , 0x6800001a } , <nl> - { 0x090024c7 , 0x6800001a } , <nl> - { 0x090024c8 , 0x6800001a } , <nl> - { 0x090024c9 , 0x6800001a } , <nl> - { 0x090024ca , 0x6800001a } , <nl> - { 0x090024cb , 0x6800001a } , <nl> - { 0x090024cc , 0x6800001a } , <nl> - { 0x090024cd , 0x6800001a } , <nl> - { 0x090024ce , 0x6800001a } , <nl> - { 0x090024cf , 0x6800001a } , <nl> - { 0x090024d0 , 0x6800ffe6 } , <nl> - { 0x090024d1 , 0x6800ffe6 } , <nl> - { 0x090024d2 , 0x6800ffe6 } , <nl> - { 0x090024d3 , 0x6800ffe6 } , <nl> - { 0x090024d4 , 0x6800ffe6 } , <nl> - { 0x090024d5 , 0x6800ffe6 } , <nl> - { 0x090024d6 , 0x6800ffe6 } , <nl> - { 0x090024d7 , 0x6800ffe6 } , <nl> - { 0x090024d8 , 0x6800ffe6 } , <nl> - { 0x090024d9 , 0x6800ffe6 } , <nl> - { 0x090024da , 0x6800ffe6 } , <nl> - { 0x090024db , 0x6800ffe6 } , <nl> - { 0x090024dc , 0x6800ffe6 } , <nl> - { 0x090024dd , 0x6800ffe6 } , <nl> - { 0x090024de , 0x6800ffe6 } , <nl> - { 0x090024df , 0x6800ffe6 } , <nl> - { 0x090024e0 , 0x6800ffe6 } , <nl> - { 0x090024e1 , 0x6800ffe6 } , <nl> - { 0x090024e2 , 0x6800ffe6 } , <nl> - { 0x090024e3 , 0x6800ffe6 } , <nl> - { 0x090024e4 , 0x6800ffe6 } , <nl> - { 0x090024e5 , 0x6800ffe6 } , <nl> - { 0x090024e6 , 0x6800ffe6 } , <nl> - { 0x090024e7 , 0x6800ffe6 } , <nl> - { 0x090024e8 , 0x6800ffe6 } , <nl> - { 0x090024e9 , 0x6800ffe6 } , <nl> - { 0x098024ea , 0x3c000015 } , <nl> - { 0x09802500 , 0x680000b6 } , <nl> - { 0x090025b7 , 0x64000000 } , <nl> - { 0x098025b8 , 0x68000008 } , <nl> - { 0x090025c1 , 0x64000000 } , <nl> - { 0x098025c2 , 0x68000035 } , <nl> - { 0x098025f8 , 0x64000007 } , <nl> - { 0x09802600 , 0x6800006e } , <nl> - { 0x0900266f , 0x64000000 } , <nl> - { 0x09802670 , 0x6800002c } , <nl> - { 0x098026a0 , 0x68000011 } , <nl> - { 0x09802701 , 0x68000003 } , <nl> - { 0x09802706 , 0x68000003 } , <nl> - { 0x0980270c , 0x6800001b } , <nl> - { 0x09802729 , 0x68000022 } , <nl> - { 0x0900274d , 0x68000000 } , <nl> - { 0x0980274f , 0x68000003 } , <nl> - { 0x09002756 , 0x68000000 } , <nl> - { 0x09802758 , 0x68000006 } , <nl> - { 0x09802761 , 0x68000006 } , <nl> - { 0x09002768 , 0x58000000 } , <nl> - { 0x09002769 , 0x48000000 } , <nl> - { 0x0900276a , 0x58000000 } , <nl> - { 0x0900276b , 0x48000000 } , <nl> - { 0x0900276c , 0x58000000 } , <nl> - { 0x0900276d , 0x48000000 } , <nl> - { 0x0900276e , 0x58000000 } , <nl> - { 0x0900276f , 0x48000000 } , <nl> - { 0x09002770 , 0x58000000 } , <nl> - { 0x09002771 , 0x48000000 } , <nl> - { 0x09002772 , 0x58000000 } , <nl> - { 0x09002773 , 0x48000000 } , <nl> - { 0x09002774 , 0x58000000 } , <nl> - { 0x09002775 , 0x48000000 } , <nl> - { 0x09802776 , 0x3c00001d } , <nl> - { 0x09002794 , 0x68000000 } , <nl> - { 0x09802798 , 0x68000017 } , <nl> - { 0x098027b1 , 0x6800000d } , <nl> - { 0x098027c0 , 0x64000004 } , <nl> - { 0x090027c5 , 0x58000000 } , <nl> - { 0x090027c6 , 0x48000000 } , <nl> - { 0x098027d0 , 0x64000015 } , <nl> - { 0x090027e6 , 0x58000000 } , <nl> - { 0x090027e7 , 0x48000000 } , <nl> - { 0x090027e8 , 0x58000000 } , <nl> - { 0x090027e9 , 0x48000000 } , <nl> - { 0x090027ea , 0x58000000 } , <nl> - { 0x090027eb , 0x48000000 } , <nl> - { 0x098027f0 , 0x6400000f } , <nl> - { 0x04802800 , 0x680000ff } , <nl> - { 0x09802900 , 0x64000082 } , <nl> - { 0x09002983 , 0x58000000 } , <nl> - { 0x09002984 , 0x48000000 } , <nl> - { 0x09002985 , 0x58000000 } , <nl> - { 0x09002986 , 0x48000000 } , <nl> - { 0x09002987 , 0x58000000 } , <nl> - { 0x09002988 , 0x48000000 } , <nl> - { 0x09002989 , 0x58000000 } , <nl> - { 0x0900298a , 0x48000000 } , <nl> - { 0x0900298b , 0x58000000 } , <nl> - { 0x0900298c , 0x48000000 } , <nl> - { 0x0900298d , 0x58000000 } , <nl> - { 0x0900298e , 0x48000000 } , <nl> - { 0x0900298f , 0x58000000 } , <nl> - { 0x09002990 , 0x48000000 } , <nl> - { 0x09002991 , 0x58000000 } , <nl> - { 0x09002992 , 0x48000000 } , <nl> - { 0x09002993 , 0x58000000 } , <nl> - { 0x09002994 , 0x48000000 } , <nl> - { 0x09002995 , 0x58000000 } , <nl> - { 0x09002996 , 0x48000000 } , <nl> - { 0x09002997 , 0x58000000 } , <nl> - { 0x09002998 , 0x48000000 } , <nl> - { 0x09802999 , 0x6400003e } , <nl> - { 0x090029d8 , 0x58000000 } , <nl> - { 0x090029d9 , 0x48000000 } , <nl> - { 0x090029da , 0x58000000 } , <nl> - { 0x090029db , 0x48000000 } , <nl> - { 0x098029dc , 0x6400001f } , <nl> - { 0x090029fc , 0x58000000 } , <nl> - { 0x090029fd , 0x48000000 } , <nl> - { 0x098029fe , 0x64000101 } , <nl> - { 0x09802b00 , 0x68000013 } , <nl> - { 0x11002c00 , 0x24000030 } , <nl> - { 0x11002c01 , 0x24000030 } , <nl> - { 0x11002c02 , 0x24000030 } , <nl> - { 0x11002c03 , 0x24000030 } , <nl> - { 0x11002c04 , 0x24000030 } , <nl> - { 0x11002c05 , 0x24000030 } , <nl> - { 0x11002c06 , 0x24000030 } , <nl> - { 0x11002c07 , 0x24000030 } , <nl> - { 0x11002c08 , 0x24000030 } , <nl> - { 0x11002c09 , 0x24000030 } , <nl> - { 0x11002c0a , 0x24000030 } , <nl> - { 0x11002c0b , 0x24000030 } , <nl> - { 0x11002c0c , 0x24000030 } , <nl> - { 0x11002c0d , 0x24000030 } , <nl> - { 0x11002c0e , 0x24000030 } , <nl> - { 0x11002c0f , 0x24000030 } , <nl> - { 0x11002c10 , 0x24000030 } , <nl> - { 0x11002c11 , 0x24000030 } , <nl> - { 0x11002c12 , 0x24000030 } , <nl> - { 0x11002c13 , 0x24000030 } , <nl> - { 0x11002c14 , 0x24000030 } , <nl> - { 0x11002c15 , 0x24000030 } , <nl> - { 0x11002c16 , 0x24000030 } , <nl> - { 0x11002c17 , 0x24000030 } , <nl> - { 0x11002c18 , 0x24000030 } , <nl> - { 0x11002c19 , 0x24000030 } , <nl> - { 0x11002c1a , 0x24000030 } , <nl> - { 0x11002c1b , 0x24000030 } , <nl> - { 0x11002c1c , 0x24000030 } , <nl> - { 0x11002c1d , 0x24000030 } , <nl> - { 0x11002c1e , 0x24000030 } , <nl> - { 0x11002c1f , 0x24000030 } , <nl> - { 0x11002c20 , 0x24000030 } , <nl> - { 0x11002c21 , 0x24000030 } , <nl> - { 0x11002c22 , 0x24000030 } , <nl> - { 0x11002c23 , 0x24000030 } , <nl> - { 0x11002c24 , 0x24000030 } , <nl> - { 0x11002c25 , 0x24000030 } , <nl> - { 0x11002c26 , 0x24000030 } , <nl> - { 0x11002c27 , 0x24000030 } , <nl> - { 0x11002c28 , 0x24000030 } , <nl> - { 0x11002c29 , 0x24000030 } , <nl> - { 0x11002c2a , 0x24000030 } , <nl> - { 0x11002c2b , 0x24000030 } , <nl> - { 0x11002c2c , 0x24000030 } , <nl> - { 0x11002c2d , 0x24000030 } , <nl> - { 0x11002c2e , 0x24000030 } , <nl> - { 0x11002c30 , 0x1400ffd0 } , <nl> - { 0x11002c31 , 0x1400ffd0 } , <nl> - { 0x11002c32 , 0x1400ffd0 } , <nl> - { 0x11002c33 , 0x1400ffd0 } , <nl> - { 0x11002c34 , 0x1400ffd0 } , <nl> - { 0x11002c35 , 0x1400ffd0 } , <nl> - { 0x11002c36 , 0x1400ffd0 } , <nl> - { 0x11002c37 , 0x1400ffd0 } , <nl> - { 0x11002c38 , 0x1400ffd0 } , <nl> - { 0x11002c39 , 0x1400ffd0 } , <nl> - { 0x11002c3a , 0x1400ffd0 } , <nl> - { 0x11002c3b , 0x1400ffd0 } , <nl> - { 0x11002c3c , 0x1400ffd0 } , <nl> - { 0x11002c3d , 0x1400ffd0 } , <nl> - { 0x11002c3e , 0x1400ffd0 } , <nl> - { 0x11002c3f , 0x1400ffd0 } , <nl> - { 0x11002c40 , 0x1400ffd0 } , <nl> - { 0x11002c41 , 0x1400ffd0 } , <nl> - { 0x11002c42 , 0x1400ffd0 } , <nl> - { 0x11002c43 , 0x1400ffd0 } , <nl> - { 0x11002c44 , 0x1400ffd0 } , <nl> - { 0x11002c45 , 0x1400ffd0 } , <nl> - { 0x11002c46 , 0x1400ffd0 } , <nl> - { 0x11002c47 , 0x1400ffd0 } , <nl> - { 0x11002c48 , 0x1400ffd0 } , <nl> - { 0x11002c49 , 0x1400ffd0 } , <nl> - { 0x11002c4a , 0x1400ffd0 } , <nl> - { 0x11002c4b , 0x1400ffd0 } , <nl> - { 0x11002c4c , 0x1400ffd0 } , <nl> - { 0x11002c4d , 0x1400ffd0 } , <nl> - { 0x11002c4e , 0x1400ffd0 } , <nl> - { 0x11002c4f , 0x1400ffd0 } , <nl> - { 0x11002c50 , 0x1400ffd0 } , <nl> - { 0x11002c51 , 0x1400ffd0 } , <nl> - { 0x11002c52 , 0x1400ffd0 } , <nl> - { 0x11002c53 , 0x1400ffd0 } , <nl> - { 0x11002c54 , 0x1400ffd0 } , <nl> - { 0x11002c55 , 0x1400ffd0 } , <nl> - { 0x11002c56 , 0x1400ffd0 } , <nl> - { 0x11002c57 , 0x1400ffd0 } , <nl> - { 0x11002c58 , 0x1400ffd0 } , <nl> - { 0x11002c59 , 0x1400ffd0 } , <nl> - { 0x11002c5a , 0x1400ffd0 } , <nl> - { 0x11002c5b , 0x1400ffd0 } , <nl> - { 0x11002c5c , 0x1400ffd0 } , <nl> - { 0x11002c5d , 0x1400ffd0 } , <nl> - { 0x11002c5e , 0x1400ffd0 } , <nl> - { 0x0a002c80 , 0x24000001 } , <nl> - { 0x0a002c81 , 0x1400ffff } , <nl> - { 0x0a002c82 , 0x24000001 } , <nl> - { 0x0a002c83 , 0x1400ffff } , <nl> - { 0x0a002c84 , 0x24000001 } , <nl> - { 0x0a002c85 , 0x1400ffff } , <nl> - { 0x0a002c86 , 0x24000001 } , <nl> - { 0x0a002c87 , 0x1400ffff } , <nl> - { 0x0a002c88 , 0x24000001 } , <nl> - { 0x0a002c89 , 0x1400ffff } , <nl> - { 0x0a002c8a , 0x24000001 } , <nl> - { 0x0a002c8b , 0x1400ffff } , <nl> - { 0x0a002c8c , 0x24000001 } , <nl> - { 0x0a002c8d , 0x1400ffff } , <nl> - { 0x0a002c8e , 0x24000001 } , <nl> - { 0x0a002c8f , 0x1400ffff } , <nl> - { 0x0a002c90 , 0x24000001 } , <nl> - { 0x0a002c91 , 0x1400ffff } , <nl> - { 0x0a002c92 , 0x24000001 } , <nl> - { 0x0a002c93 , 0x1400ffff } , <nl> - { 0x0a002c94 , 0x24000001 } , <nl> - { 0x0a002c95 , 0x1400ffff } , <nl> - { 0x0a002c96 , 0x24000001 } , <nl> - { 0x0a002c97 , 0x1400ffff } , <nl> - { 0x0a002c98 , 0x24000001 } , <nl> - { 0x0a002c99 , 0x1400ffff } , <nl> - { 0x0a002c9a , 0x24000001 } , <nl> - { 0x0a002c9b , 0x1400ffff } , <nl> - { 0x0a002c9c , 0x24000001 } , <nl> - { 0x0a002c9d , 0x1400ffff } , <nl> - { 0x0a002c9e , 0x24000001 } , <nl> - { 0x0a002c9f , 0x1400ffff } , <nl> - { 0x0a002ca0 , 0x24000001 } , <nl> - { 0x0a002ca1 , 0x1400ffff } , <nl> - { 0x0a002ca2 , 0x24000001 } , <nl> - { 0x0a002ca3 , 0x1400ffff } , <nl> - { 0x0a002ca4 , 0x24000001 } , <nl> - { 0x0a002ca5 , 0x1400ffff } , <nl> - { 0x0a002ca6 , 0x24000001 } , <nl> - { 0x0a002ca7 , 0x1400ffff } , <nl> - { 0x0a002ca8 , 0x24000001 } , <nl> - { 0x0a002ca9 , 0x1400ffff } , <nl> - { 0x0a002caa , 0x24000001 } , <nl> - { 0x0a002cab , 0x1400ffff } , <nl> - { 0x0a002cac , 0x24000001 } , <nl> - { 0x0a002cad , 0x1400ffff } , <nl> - { 0x0a002cae , 0x24000001 } , <nl> - { 0x0a002caf , 0x1400ffff } , <nl> - { 0x0a002cb0 , 0x24000001 } , <nl> - { 0x0a002cb1 , 0x1400ffff } , <nl> - { 0x0a002cb2 , 0x24000001 } , <nl> - { 0x0a002cb3 , 0x1400ffff } , <nl> - { 0x0a002cb4 , 0x24000001 } , <nl> - { 0x0a002cb5 , 0x1400ffff } , <nl> - { 0x0a002cb6 , 0x24000001 } , <nl> - { 0x0a002cb7 , 0x1400ffff } , <nl> - { 0x0a002cb8 , 0x24000001 } , <nl> - { 0x0a002cb9 , 0x1400ffff } , <nl> - { 0x0a002cba , 0x24000001 } , <nl> - { 0x0a002cbb , 0x1400ffff } , <nl> - { 0x0a002cbc , 0x24000001 } , <nl> - { 0x0a002cbd , 0x1400ffff } , <nl> - { 0x0a002cbe , 0x24000001 } , <nl> - { 0x0a002cbf , 0x1400ffff } , <nl> - { 0x0a002cc0 , 0x24000001 } , <nl> - { 0x0a002cc1 , 0x1400ffff } , <nl> - { 0x0a002cc2 , 0x24000001 } , <nl> - { 0x0a002cc3 , 0x1400ffff } , <nl> - { 0x0a002cc4 , 0x24000001 } , <nl> - { 0x0a002cc5 , 0x1400ffff } , <nl> - { 0x0a002cc6 , 0x24000001 } , <nl> - { 0x0a002cc7 , 0x1400ffff } , <nl> - { 0x0a002cc8 , 0x24000001 } , <nl> - { 0x0a002cc9 , 0x1400ffff } , <nl> - { 0x0a002cca , 0x24000001 } , <nl> - { 0x0a002ccb , 0x1400ffff } , <nl> - { 0x0a002ccc , 0x24000001 } , <nl> - { 0x0a002ccd , 0x1400ffff } , <nl> - { 0x0a002cce , 0x24000001 } , <nl> - { 0x0a002ccf , 0x1400ffff } , <nl> - { 0x0a002cd0 , 0x24000001 } , <nl> - { 0x0a002cd1 , 0x1400ffff } , <nl> - { 0x0a002cd2 , 0x24000001 } , <nl> - { 0x0a002cd3 , 0x1400ffff } , <nl> - { 0x0a002cd4 , 0x24000001 } , <nl> - { 0x0a002cd5 , 0x1400ffff } , <nl> - { 0x0a002cd6 , 0x24000001 } , <nl> - { 0x0a002cd7 , 0x1400ffff } , <nl> - { 0x0a002cd8 , 0x24000001 } , <nl> - { 0x0a002cd9 , 0x1400ffff } , <nl> - { 0x0a002cda , 0x24000001 } , <nl> - { 0x0a002cdb , 0x1400ffff } , <nl> - { 0x0a002cdc , 0x24000001 } , <nl> - { 0x0a002cdd , 0x1400ffff } , <nl> - { 0x0a002cde , 0x24000001 } , <nl> - { 0x0a002cdf , 0x1400ffff } , <nl> - { 0x0a002ce0 , 0x24000001 } , <nl> - { 0x0a002ce1 , 0x1400ffff } , <nl> - { 0x0a002ce2 , 0x24000001 } , <nl> - { 0x0a002ce3 , 0x1400ffff } , <nl> - { 0x0a002ce4 , 0x14000000 } , <nl> - { 0x0a802ce5 , 0x68000005 } , <nl> - { 0x0a802cf9 , 0x54000003 } , <nl> - { 0x0a002cfd , 0x3c000000 } , <nl> - { 0x0a802cfe , 0x54000001 } , <nl> - { 0x10002d00 , 0x1400e3a0 } , <nl> - { 0x10002d01 , 0x1400e3a0 } , <nl> - { 0x10002d02 , 0x1400e3a0 } , <nl> - { 0x10002d03 , 0x1400e3a0 } , <nl> - { 0x10002d04 , 0x1400e3a0 } , <nl> - { 0x10002d05 , 0x1400e3a0 } , <nl> - { 0x10002d06 , 0x1400e3a0 } , <nl> - { 0x10002d07 , 0x1400e3a0 } , <nl> - { 0x10002d08 , 0x1400e3a0 } , <nl> - { 0x10002d09 , 0x1400e3a0 } , <nl> - { 0x10002d0a , 0x1400e3a0 } , <nl> - { 0x10002d0b , 0x1400e3a0 } , <nl> - { 0x10002d0c , 0x1400e3a0 } , <nl> - { 0x10002d0d , 0x1400e3a0 } , <nl> - { 0x10002d0e , 0x1400e3a0 } , <nl> - { 0x10002d0f , 0x1400e3a0 } , <nl> - { 0x10002d10 , 0x1400e3a0 } , <nl> - { 0x10002d11 , 0x1400e3a0 } , <nl> - { 0x10002d12 , 0x1400e3a0 } , <nl> - { 0x10002d13 , 0x1400e3a0 } , <nl> - { 0x10002d14 , 0x1400e3a0 } , <nl> - { 0x10002d15 , 0x1400e3a0 } , <nl> - { 0x10002d16 , 0x1400e3a0 } , <nl> - { 0x10002d17 , 0x1400e3a0 } , <nl> - { 0x10002d18 , 0x1400e3a0 } , <nl> - { 0x10002d19 , 0x1400e3a0 } , <nl> - { 0x10002d1a , 0x1400e3a0 } , <nl> - { 0x10002d1b , 0x1400e3a0 } , <nl> - { 0x10002d1c , 0x1400e3a0 } , <nl> - { 0x10002d1d , 0x1400e3a0 } , <nl> - { 0x10002d1e , 0x1400e3a0 } , <nl> - { 0x10002d1f , 0x1400e3a0 } , <nl> - { 0x10002d20 , 0x1400e3a0 } , <nl> - { 0x10002d21 , 0x1400e3a0 } , <nl> - { 0x10002d22 , 0x1400e3a0 } , <nl> - { 0x10002d23 , 0x1400e3a0 } , <nl> - { 0x10002d24 , 0x1400e3a0 } , <nl> - { 0x10002d25 , 0x1400e3a0 } , <nl> - { 0x3a802d30 , 0x1c000035 } , <nl> - { 0x3a002d6f , 0x18000000 } , <nl> - { 0x0f802d80 , 0x1c000016 } , <nl> - { 0x0f802da0 , 0x1c000006 } , <nl> - { 0x0f802da8 , 0x1c000006 } , <nl> - { 0x0f802db0 , 0x1c000006 } , <nl> - { 0x0f802db8 , 0x1c000006 } , <nl> - { 0x0f802dc0 , 0x1c000006 } , <nl> - { 0x0f802dc8 , 0x1c000006 } , <nl> - { 0x0f802dd0 , 0x1c000006 } , <nl> - { 0x0f802dd8 , 0x1c000006 } , <nl> - { 0x09802e00 , 0x54000001 } , <nl> - { 0x09002e02 , 0x50000000 } , <nl> - { 0x09002e03 , 0x4c000000 } , <nl> - { 0x09002e04 , 0x50000000 } , <nl> - { 0x09002e05 , 0x4c000000 } , <nl> - { 0x09802e06 , 0x54000002 } , <nl> - { 0x09002e09 , 0x50000000 } , <nl> - { 0x09002e0a , 0x4c000000 } , <nl> - { 0x09002e0b , 0x54000000 } , <nl> - { 0x09002e0c , 0x50000000 } , <nl> - { 0x09002e0d , 0x4c000000 } , <nl> - { 0x09802e0e , 0x54000008 } , <nl> - { 0x09002e17 , 0x44000000 } , <nl> - { 0x09002e1c , 0x50000000 } , <nl> - { 0x09002e1d , 0x4c000000 } , <nl> - { 0x16802e80 , 0x68000019 } , <nl> - { 0x16802e9b , 0x68000058 } , <nl> - { 0x16802f00 , 0x680000d5 } , <nl> - { 0x09802ff0 , 0x6800000b } , <nl> - { 0x09003000 , 0x74000000 } , <nl> - { 0x09803001 , 0x54000002 } , <nl> - { 0x09003004 , 0x68000000 } , <nl> - { 0x16003005 , 0x18000000 } , <nl> - { 0x09003006 , 0x1c000000 } , <nl> - { 0x16003007 , 0x38000000 } , <nl> - { 0x09003008 , 0x58000000 } , <nl> - { 0x09003009 , 0x48000000 } , <nl> - { 0x0900300a , 0x58000000 } , <nl> - { 0x0900300b , 0x48000000 } , <nl> - { 0x0900300c , 0x58000000 } , <nl> - { 0x0900300d , 0x48000000 } , <nl> - { 0x0900300e , 0x58000000 } , <nl> - { 0x0900300f , 0x48000000 } , <nl> - { 0x09003010 , 0x58000000 } , <nl> - { 0x09003011 , 0x48000000 } , <nl> - { 0x09803012 , 0x68000001 } , <nl> - { 0x09003014 , 0x58000000 } , <nl> - { 0x09003015 , 0x48000000 } , <nl> - { 0x09003016 , 0x58000000 } , <nl> - { 0x09003017 , 0x48000000 } , <nl> - { 0x09003018 , 0x58000000 } , <nl> - { 0x09003019 , 0x48000000 } , <nl> - { 0x0900301a , 0x58000000 } , <nl> - { 0x0900301b , 0x48000000 } , <nl> - { 0x0900301c , 0x44000000 } , <nl> - { 0x0900301d , 0x58000000 } , <nl> - { 0x0980301e , 0x48000001 } , <nl> - { 0x09003020 , 0x68000000 } , <nl> - { 0x16803021 , 0x38000008 } , <nl> - { 0x1b80302a , 0x30000005 } , <nl> - { 0x09003030 , 0x44000000 } , <nl> - { 0x09803031 , 0x18000004 } , <nl> - { 0x09803036 , 0x68000001 } , <nl> - { 0x16803038 , 0x38000002 } , <nl> - { 0x1600303b , 0x18000000 } , <nl> - { 0x0900303c , 0x1c000000 } , <nl> - { 0x0900303d , 0x54000000 } , <nl> - { 0x0980303e , 0x68000001 } , <nl> - { 0x1a803041 , 0x1c000055 } , <nl> - { 0x1b803099 , 0x30000001 } , <nl> - { 0x0980309b , 0x60000001 } , <nl> - { 0x1a80309d , 0x18000001 } , <nl> - { 0x1a00309f , 0x1c000000 } , <nl> - { 0x090030a0 , 0x44000000 } , <nl> - { 0x1d8030a1 , 0x1c000059 } , <nl> - { 0x090030fb , 0x54000000 } , <nl> - { 0x098030fc , 0x18000002 } , <nl> - { 0x1d0030ff , 0x1c000000 } , <nl> - { 0x03803105 , 0x1c000027 } , <nl> - { 0x17803131 , 0x1c00005d } , <nl> - { 0x09803190 , 0x68000001 } , <nl> - { 0x09803192 , 0x3c000003 } , <nl> - { 0x09803196 , 0x68000009 } , <nl> - { 0x038031a0 , 0x1c000017 } , <nl> - { 0x098031c0 , 0x6800000f } , <nl> - { 0x1d8031f0 , 0x1c00000f } , <nl> - { 0x17803200 , 0x6800001e } , <nl> - { 0x09803220 , 0x3c000009 } , <nl> - { 0x0980322a , 0x68000019 } , <nl> - { 0x09003250 , 0x68000000 } , <nl> - { 0x09803251 , 0x3c00000e } , <nl> - { 0x17803260 , 0x6800001f } , <nl> - { 0x09803280 , 0x3c000009 } , <nl> - { 0x0980328a , 0x68000026 } , <nl> - { 0x098032b1 , 0x3c00000e } , <nl> - { 0x098032c0 , 0x6800003e } , <nl> - { 0x09803300 , 0x680000ff } , <nl> - { 0x16803400 , 0x1c0019b5 } , <nl> - { 0x09804dc0 , 0x6800003f } , <nl> - { 0x16804e00 , 0x1c0051bb } , <nl> - { 0x3c80a000 , 0x1c000014 } , <nl> - { 0x3c00a015 , 0x18000000 } , <nl> - { 0x3c80a016 , 0x1c000476 } , <nl> - { 0x3c80a490 , 0x68000036 } , <nl> - { 0x0980a700 , 0x60000016 } , <nl> - { 0x3080a800 , 0x1c000001 } , <nl> - { 0x3000a802 , 0x28000000 } , <nl> - { 0x3080a803 , 0x1c000002 } , <nl> - { 0x3000a806 , 0x30000000 } , <nl> - { 0x3080a807 , 0x1c000003 } , <nl> - { 0x3000a80b , 0x30000000 } , <nl> - { 0x3080a80c , 0x1c000016 } , <nl> - { 0x3080a823 , 0x28000001 } , <nl> - { 0x3080a825 , 0x30000001 } , <nl> - { 0x3000a827 , 0x28000000 } , <nl> - { 0x3080a828 , 0x68000003 } , <nl> - { 0x1780ac00 , 0x1c002ba3 } , <nl> - { 0x0980d800 , 0x1000037f } , <nl> - { 0x0980db80 , 0x1000007f } , <nl> - { 0x0980dc00 , 0x100003ff } , <nl> - { 0x0980e000 , 0x0c0018ff } , <nl> - { 0x1680f900 , 0x1c00012d } , <nl> - { 0x1680fa30 , 0x1c00003a } , <nl> - { 0x1680fa70 , 0x1c000069 } , <nl> - { 0x2180fb00 , 0x14000006 } , <nl> - { 0x0180fb13 , 0x14000004 } , <nl> - { 0x1900fb1d , 0x1c000000 } , <nl> - { 0x1900fb1e , 0x30000000 } , <nl> - { 0x1980fb1f , 0x1c000009 } , <nl> - { 0x1900fb29 , 0x64000000 } , <nl> - { 0x1980fb2a , 0x1c00000c } , <nl> - { 0x1980fb38 , 0x1c000004 } , <nl> - { 0x1900fb3e , 0x1c000000 } , <nl> - { 0x1980fb40 , 0x1c000001 } , <nl> - { 0x1980fb43 , 0x1c000001 } , <nl> - { 0x1980fb46 , 0x1c00006b } , <nl> - { 0x0080fbd3 , 0x1c00016a } , <nl> - { 0x0900fd3e , 0x58000000 } , <nl> - { 0x0900fd3f , 0x48000000 } , <nl> - { 0x0080fd50 , 0x1c00003f } , <nl> - { 0x0080fd92 , 0x1c000035 } , <nl> - { 0x0080fdf0 , 0x1c00000b } , <nl> - { 0x0000fdfc , 0x5c000000 } , <nl> - { 0x0900fdfd , 0x68000000 } , <nl> - { 0x1b80fe00 , 0x3000000f } , <nl> - { 0x0980fe10 , 0x54000006 } , <nl> - { 0x0900fe17 , 0x58000000 } , <nl> - { 0x0900fe18 , 0x48000000 } , <nl> - { 0x0900fe19 , 0x54000000 } , <nl> - { 0x1b80fe20 , 0x30000003 } , <nl> - { 0x0900fe30 , 0x54000000 } , <nl> - { 0x0980fe31 , 0x44000001 } , <nl> - { 0x0980fe33 , 0x40000001 } , <nl> - { 0x0900fe35 , 0x58000000 } , <nl> - { 0x0900fe36 , 0x48000000 } , <nl> - { 0x0900fe37 , 0x58000000 } , <nl> - { 0x0900fe38 , 0x48000000 } , <nl> - { 0x0900fe39 , 0x58000000 } , <nl> - { 0x0900fe3a , 0x48000000 } , <nl> - { 0x0900fe3b , 0x58000000 } , <nl> - { 0x0900fe3c , 0x48000000 } , <nl> - { 0x0900fe3d , 0x58000000 } , <nl> - { 0x0900fe3e , 0x48000000 } , <nl> - { 0x0900fe3f , 0x58000000 } , <nl> - { 0x0900fe40 , 0x48000000 } , <nl> - { 0x0900fe41 , 0x58000000 } , <nl> - { 0x0900fe42 , 0x48000000 } , <nl> - { 0x0900fe43 , 0x58000000 } , <nl> - { 0x0900fe44 , 0x48000000 } , <nl> - { 0x0980fe45 , 0x54000001 } , <nl> - { 0x0900fe47 , 0x58000000 } , <nl> - { 0x0900fe48 , 0x48000000 } , <nl> - { 0x0980fe49 , 0x54000003 } , <nl> - { 0x0980fe4d , 0x40000002 } , <nl> - { 0x0980fe50 , 0x54000002 } , <nl> - { 0x0980fe54 , 0x54000003 } , <nl> - { 0x0900fe58 , 0x44000000 } , <nl> - { 0x0900fe59 , 0x58000000 } , <nl> - { 0x0900fe5a , 0x48000000 } , <nl> - { 0x0900fe5b , 0x58000000 } , <nl> - { 0x0900fe5c , 0x48000000 } , <nl> - { 0x0900fe5d , 0x58000000 } , <nl> - { 0x0900fe5e , 0x48000000 } , <nl> - { 0x0980fe5f , 0x54000002 } , <nl> - { 0x0900fe62 , 0x64000000 } , <nl> - { 0x0900fe63 , 0x44000000 } , <nl> - { 0x0980fe64 , 0x64000002 } , <nl> - { 0x0900fe68 , 0x54000000 } , <nl> - { 0x0900fe69 , 0x5c000000 } , <nl> - { 0x0980fe6a , 0x54000001 } , <nl> - { 0x0080fe70 , 0x1c000004 } , <nl> - { 0x0080fe76 , 0x1c000086 } , <nl> - { 0x0900feff , 0x04000000 } , <nl> - { 0x0980ff01 , 0x54000002 } , <nl> - { 0x0900ff04 , 0x5c000000 } , <nl> - { 0x0980ff05 , 0x54000002 } , <nl> - { 0x0900ff08 , 0x58000000 } , <nl> - { 0x0900ff09 , 0x48000000 } , <nl> - { 0x0900ff0a , 0x54000000 } , <nl> - { 0x0900ff0b , 0x64000000 } , <nl> - { 0x0900ff0c , 0x54000000 } , <nl> - { 0x0900ff0d , 0x44000000 } , <nl> - { 0x0980ff0e , 0x54000001 } , <nl> - { 0x0980ff10 , 0x34000009 } , <nl> - { 0x0980ff1a , 0x54000001 } , <nl> - { 0x0980ff1c , 0x64000002 } , <nl> - { 0x0980ff1f , 0x54000001 } , <nl> - { 0x2100ff21 , 0x24000020 } , <nl> - { 0x2100ff22 , 0x24000020 } , <nl> - { 0x2100ff23 , 0x24000020 } , <nl> - { 0x2100ff24 , 0x24000020 } , <nl> - { 0x2100ff25 , 0x24000020 } , <nl> - { 0x2100ff26 , 0x24000020 } , <nl> - { 0x2100ff27 , 0x24000020 } , <nl> - { 0x2100ff28 , 0x24000020 } , <nl> - { 0x2100ff29 , 0x24000020 } , <nl> - { 0x2100ff2a , 0x24000020 } , <nl> - { 0x2100ff2b , 0x24000020 } , <nl> - { 0x2100ff2c , 0x24000020 } , <nl> - { 0x2100ff2d , 0x24000020 } , <nl> - { 0x2100ff2e , 0x24000020 } , <nl> - { 0x2100ff2f , 0x24000020 } , <nl> - { 0x2100ff30 , 0x24000020 } , <nl> - { 0x2100ff31 , 0x24000020 } , <nl> - { 0x2100ff32 , 0x24000020 } , <nl> - { 0x2100ff33 , 0x24000020 } , <nl> - { 0x2100ff34 , 0x24000020 } , <nl> - { 0x2100ff35 , 0x24000020 } , <nl> - { 0x2100ff36 , 0x24000020 } , <nl> - { 0x2100ff37 , 0x24000020 } , <nl> - { 0x2100ff38 , 0x24000020 } , <nl> - { 0x2100ff39 , 0x24000020 } , <nl> - { 0x2100ff3a , 0x24000020 } , <nl> - { 0x0900ff3b , 0x58000000 } , <nl> - { 0x0900ff3c , 0x54000000 } , <nl> - { 0x0900ff3d , 0x48000000 } , <nl> - { 0x0900ff3e , 0x60000000 } , <nl> - { 0x0900ff3f , 0x40000000 } , <nl> - { 0x0900ff40 , 0x60000000 } , <nl> - { 0x2100ff41 , 0x1400ffe0 } , <nl> - { 0x2100ff42 , 0x1400ffe0 } , <nl> - { 0x2100ff43 , 0x1400ffe0 } , <nl> - { 0x2100ff44 , 0x1400ffe0 } , <nl> - { 0x2100ff45 , 0x1400ffe0 } , <nl> - { 0x2100ff46 , 0x1400ffe0 } , <nl> - { 0x2100ff47 , 0x1400ffe0 } , <nl> - { 0x2100ff48 , 0x1400ffe0 } , <nl> - { 0x2100ff49 , 0x1400ffe0 } , <nl> - { 0x2100ff4a , 0x1400ffe0 } , <nl> - { 0x2100ff4b , 0x1400ffe0 } , <nl> - { 0x2100ff4c , 0x1400ffe0 } , <nl> - { 0x2100ff4d , 0x1400ffe0 } , <nl> - { 0x2100ff4e , 0x1400ffe0 } , <nl> - { 0x2100ff4f , 0x1400ffe0 } , <nl> - { 0x2100ff50 , 0x1400ffe0 } , <nl> - { 0x2100ff51 , 0x1400ffe0 } , <nl> - { 0x2100ff52 , 0x1400ffe0 } , <nl> - { 0x2100ff53 , 0x1400ffe0 } , <nl> - { 0x2100ff54 , 0x1400ffe0 } , <nl> - { 0x2100ff55 , 0x1400ffe0 } , <nl> - { 0x2100ff56 , 0x1400ffe0 } , <nl> - { 0x2100ff57 , 0x1400ffe0 } , <nl> - { 0x2100ff58 , 0x1400ffe0 } , <nl> - { 0x2100ff59 , 0x1400ffe0 } , <nl> - { 0x2100ff5a , 0x1400ffe0 } , <nl> - { 0x0900ff5b , 0x58000000 } , <nl> - { 0x0900ff5c , 0x64000000 } , <nl> - { 0x0900ff5d , 0x48000000 } , <nl> - { 0x0900ff5e , 0x64000000 } , <nl> - { 0x0900ff5f , 0x58000000 } , <nl> - { 0x0900ff60 , 0x48000000 } , <nl> - { 0x0900ff61 , 0x54000000 } , <nl> - { 0x0900ff62 , 0x58000000 } , <nl> - { 0x0900ff63 , 0x48000000 } , <nl> - { 0x0980ff64 , 0x54000001 } , <nl> - { 0x1d80ff66 , 0x1c000009 } , <nl> - { 0x0900ff70 , 0x18000000 } , <nl> - { 0x1d80ff71 , 0x1c00002c } , <nl> - { 0x0980ff9e , 0x18000001 } , <nl> - { 0x1780ffa0 , 0x1c00001e } , <nl> - { 0x1780ffc2 , 0x1c000005 } , <nl> - { 0x1780ffca , 0x1c000005 } , <nl> - { 0x1780ffd2 , 0x1c000005 } , <nl> - { 0x1780ffda , 0x1c000002 } , <nl> - { 0x0980ffe0 , 0x5c000001 } , <nl> - { 0x0900ffe2 , 0x64000000 } , <nl> - { 0x0900ffe3 , 0x60000000 } , <nl> - { 0x0900ffe4 , 0x68000000 } , <nl> - { 0x0980ffe5 , 0x5c000001 } , <nl> - { 0x0900ffe8 , 0x68000000 } , <nl> - { 0x0980ffe9 , 0x64000003 } , <nl> - { 0x0980ffed , 0x68000001 } , <nl> - { 0x0980fff9 , 0x04000002 } , <nl> - { 0x0980fffc , 0x68000001 } , <nl> - { 0x23810000 , 0x1c00000b } , <nl> - { 0x2381000d , 0x1c000019 } , <nl> - { 0x23810028 , 0x1c000012 } , <nl> - { 0x2381003c , 0x1c000001 } , <nl> - { 0x2381003f , 0x1c00000e } , <nl> - { 0x23810050 , 0x1c00000d } , <nl> - { 0x23810080 , 0x1c00007a } , <nl> - { 0x09810100 , 0x54000001 } , <nl> - { 0x09010102 , 0x68000000 } , <nl> - { 0x09810107 , 0x3c00002c } , <nl> - { 0x09810137 , 0x68000008 } , <nl> - { 0x13810140 , 0x38000034 } , <nl> - { 0x13810175 , 0x3c000003 } , <nl> - { 0x13810179 , 0x68000010 } , <nl> - { 0x1301018a , 0x3c000000 } , <nl> - { 0x29810300 , 0x1c00001e } , <nl> - { 0x29810320 , 0x3c000003 } , <nl> - { 0x12810330 , 0x1c000019 } , <nl> - { 0x1201034a , 0x38000000 } , <nl> - { 0x3b810380 , 0x1c00001d } , <nl> - { 0x3b01039f , 0x54000000 } , <nl> - { 0x2a8103a0 , 0x1c000023 } , <nl> - { 0x2a8103c8 , 0x1c000007 } , <nl> - { 0x2a0103d0 , 0x68000000 } , <nl> - { 0x2a8103d1 , 0x38000004 } , <nl> - { 0x0d010400 , 0x24000028 } , <nl> - { 0x0d010401 , 0x24000028 } , <nl> - { 0x0d010402 , 0x24000028 } , <nl> - { 0x0d010403 , 0x24000028 } , <nl> - { 0x0d010404 , 0x24000028 } , <nl> - { 0x0d010405 , 0x24000028 } , <nl> - { 0x0d010406 , 0x24000028 } , <nl> - { 0x0d010407 , 0x24000028 } , <nl> - { 0x0d010408 , 0x24000028 } , <nl> - { 0x0d010409 , 0x24000028 } , <nl> - { 0x0d01040a , 0x24000028 } , <nl> - { 0x0d01040b , 0x24000028 } , <nl> - { 0x0d01040c , 0x24000028 } , <nl> - { 0x0d01040d , 0x24000028 } , <nl> - { 0x0d01040e , 0x24000028 } , <nl> - { 0x0d01040f , 0x24000028 } , <nl> - { 0x0d010410 , 0x24000028 } , <nl> - { 0x0d010411 , 0x24000028 } , <nl> - { 0x0d010412 , 0x24000028 } , <nl> - { 0x0d010413 , 0x24000028 } , <nl> - { 0x0d010414 , 0x24000028 } , <nl> - { 0x0d010415 , 0x24000028 } , <nl> - { 0x0d010416 , 0x24000028 } , <nl> - { 0x0d010417 , 0x24000028 } , <nl> - { 0x0d010418 , 0x24000028 } , <nl> - { 0x0d010419 , 0x24000028 } , <nl> - { 0x0d01041a , 0x24000028 } , <nl> - { 0x0d01041b , 0x24000028 } , <nl> - { 0x0d01041c , 0x24000028 } , <nl> - { 0x0d01041d , 0x24000028 } , <nl> - { 0x0d01041e , 0x24000028 } , <nl> - { 0x0d01041f , 0x24000028 } , <nl> - { 0x0d010420 , 0x24000028 } , <nl> - { 0x0d010421 , 0x24000028 } , <nl> - { 0x0d010422 , 0x24000028 } , <nl> - { 0x0d010423 , 0x24000028 } , <nl> - { 0x0d010424 , 0x24000028 } , <nl> - { 0x0d010425 , 0x24000028 } , <nl> - { 0x0d010426 , 0x24000028 } , <nl> - { 0x0d010427 , 0x24000028 } , <nl> - { 0x0d010428 , 0x1400ffd8 } , <nl> - { 0x0d010429 , 0x1400ffd8 } , <nl> - { 0x0d01042a , 0x1400ffd8 } , <nl> - { 0x0d01042b , 0x1400ffd8 } , <nl> - { 0x0d01042c , 0x1400ffd8 } , <nl> - { 0x0d01042d , 0x1400ffd8 } , <nl> - { 0x0d01042e , 0x1400ffd8 } , <nl> - { 0x0d01042f , 0x1400ffd8 } , <nl> - { 0x0d010430 , 0x1400ffd8 } , <nl> - { 0x0d010431 , 0x1400ffd8 } , <nl> - { 0x0d010432 , 0x1400ffd8 } , <nl> - { 0x0d010433 , 0x1400ffd8 } , <nl> - { 0x0d010434 , 0x1400ffd8 } , <nl> - { 0x0d010435 , 0x1400ffd8 } , <nl> - { 0x0d010436 , 0x1400ffd8 } , <nl> - { 0x0d010437 , 0x1400ffd8 } , <nl> - { 0x0d010438 , 0x1400ffd8 } , <nl> - { 0x0d010439 , 0x1400ffd8 } , <nl> - { 0x0d01043a , 0x1400ffd8 } , <nl> - { 0x0d01043b , 0x1400ffd8 } , <nl> - { 0x0d01043c , 0x1400ffd8 } , <nl> - { 0x0d01043d , 0x1400ffd8 } , <nl> - { 0x0d01043e , 0x1400ffd8 } , <nl> - { 0x0d01043f , 0x1400ffd8 } , <nl> - { 0x0d010440 , 0x1400ffd8 } , <nl> - { 0x0d010441 , 0x1400ffd8 } , <nl> - { 0x0d010442 , 0x1400ffd8 } , <nl> - { 0x0d010443 , 0x1400ffd8 } , <nl> - { 0x0d010444 , 0x1400ffd8 } , <nl> - { 0x0d010445 , 0x1400ffd8 } , <nl> - { 0x0d010446 , 0x1400ffd8 } , <nl> - { 0x0d010447 , 0x1400ffd8 } , <nl> - { 0x0d010448 , 0x1400ffd8 } , <nl> - { 0x0d010449 , 0x1400ffd8 } , <nl> - { 0x0d01044a , 0x1400ffd8 } , <nl> - { 0x0d01044b , 0x1400ffd8 } , <nl> - { 0x0d01044c , 0x1400ffd8 } , <nl> - { 0x0d01044d , 0x1400ffd8 } , <nl> - { 0x0d01044e , 0x1400ffd8 } , <nl> - { 0x0d01044f , 0x1400ffd8 } , <nl> - { 0x2e810450 , 0x1c00004d } , <nl> - { 0x2c8104a0 , 0x34000009 } , <nl> - { 0x0b810800 , 0x1c000005 } , <nl> - { 0x0b010808 , 0x1c000000 } , <nl> - { 0x0b81080a , 0x1c00002b } , <nl> - { 0x0b810837 , 0x1c000001 } , <nl> - { 0x0b01083c , 0x1c000000 } , <nl> - { 0x0b01083f , 0x1c000000 } , <nl> - { 0x1e010a00 , 0x1c000000 } , <nl> - { 0x1e810a01 , 0x30000002 } , <nl> - { 0x1e810a05 , 0x30000001 } , <nl> - { 0x1e810a0c , 0x30000003 } , <nl> - { 0x1e810a10 , 0x1c000003 } , <nl> - { 0x1e810a15 , 0x1c000002 } , <nl> - { 0x1e810a19 , 0x1c00001a } , <nl> - { 0x1e810a38 , 0x30000002 } , <nl> - { 0x1e010a3f , 0x30000000 } , <nl> - { 0x1e810a40 , 0x3c000007 } , <nl> - { 0x1e810a50 , 0x54000008 } , <nl> - { 0x0981d000 , 0x680000f5 } , <nl> - { 0x0981d100 , 0x68000026 } , <nl> - { 0x0981d12a , 0x6800003a } , <nl> - { 0x0981d165 , 0x28000001 } , <nl> - { 0x1b81d167 , 0x30000002 } , <nl> - { 0x0981d16a , 0x68000002 } , <nl> - { 0x0981d16d , 0x28000005 } , <nl> - { 0x0981d173 , 0x04000007 } , <nl> - { 0x1b81d17b , 0x30000007 } , <nl> - { 0x0981d183 , 0x68000001 } , <nl> - { 0x1b81d185 , 0x30000006 } , <nl> - { 0x0981d18c , 0x6800001d } , <nl> - { 0x1b81d1aa , 0x30000003 } , <nl> - { 0x0981d1ae , 0x6800002f } , <nl> - { 0x1381d200 , 0x68000041 } , <nl> - { 0x1381d242 , 0x30000002 } , <nl> - { 0x1301d245 , 0x68000000 } , <nl> - { 0x0981d300 , 0x68000056 } , <nl> - { 0x0981d400 , 0x24000019 } , <nl> - { 0x0981d41a , 0x14000019 } , <nl> - { 0x0981d434 , 0x24000019 } , <nl> - { 0x0981d44e , 0x14000006 } , <nl> - { 0x0981d456 , 0x14000011 } , <nl> - { 0x0981d468 , 0x24000019 } , <nl> - { 0x0981d482 , 0x14000019 } , <nl> - { 0x0901d49c , 0x24000000 } , <nl> - { 0x0981d49e , 0x24000001 } , <nl> - { 0x0901d4a2 , 0x24000000 } , <nl> - { 0x0981d4a5 , 0x24000001 } , <nl> - { 0x0981d4a9 , 0x24000003 } , <nl> - { 0x0981d4ae , 0x24000007 } , <nl> - { 0x0981d4b6 , 0x14000003 } , <nl> - { 0x0901d4bb , 0x14000000 } , <nl> - { 0x0981d4bd , 0x14000006 } , <nl> - { 0x0981d4c5 , 0x1400000a } , <nl> - { 0x0981d4d0 , 0x24000019 } , <nl> - { 0x0981d4ea , 0x14000019 } , <nl> - { 0x0981d504 , 0x24000001 } , <nl> - { 0x0981d507 , 0x24000003 } , <nl> - { 0x0981d50d , 0x24000007 } , <nl> - { 0x0981d516 , 0x24000006 } , <nl> - { 0x0981d51e , 0x14000019 } , <nl> - { 0x0981d538 , 0x24000001 } , <nl> - { 0x0981d53b , 0x24000003 } , <nl> - { 0x0981d540 , 0x24000004 } , <nl> - { 0x0901d546 , 0x24000000 } , <nl> - { 0x0981d54a , 0x24000006 } , <nl> - { 0x0981d552 , 0x14000019 } , <nl> - { 0x0981d56c , 0x24000019 } , <nl> - { 0x0981d586 , 0x14000019 } , <nl> - { 0x0981d5a0 , 0x24000019 } , <nl> - { 0x0981d5ba , 0x14000019 } , <nl> - { 0x0981d5d4 , 0x24000019 } , <nl> - { 0x0981d5ee , 0x14000019 } , <nl> - { 0x0981d608 , 0x24000019 } , <nl> - { 0x0981d622 , 0x14000019 } , <nl> - { 0x0981d63c , 0x24000019 } , <nl> - { 0x0981d656 , 0x14000019 } , <nl> - { 0x0981d670 , 0x24000019 } , <nl> - { 0x0981d68a , 0x1400001b } , <nl> - { 0x0981d6a8 , 0x24000018 } , <nl> - { 0x0901d6c1 , 0x64000000 } , <nl> - { 0x0981d6c2 , 0x14000018 } , <nl> - { 0x0901d6db , 0x64000000 } , <nl> - { 0x0981d6dc , 0x14000005 } , <nl> - { 0x0981d6e2 , 0x24000018 } , <nl> - { 0x0901d6fb , 0x64000000 } , <nl> - { 0x0981d6fc , 0x14000018 } , <nl> - { 0x0901d715 , 0x64000000 } , <nl> - { 0x0981d716 , 0x14000005 } , <nl> - { 0x0981d71c , 0x24000018 } , <nl> - { 0x0901d735 , 0x64000000 } , <nl> - { 0x0981d736 , 0x14000018 } , <nl> - { 0x0901d74f , 0x64000000 } , <nl> - { 0x0981d750 , 0x14000005 } , <nl> - { 0x0981d756 , 0x24000018 } , <nl> - { 0x0901d76f , 0x64000000 } , <nl> - { 0x0981d770 , 0x14000018 } , <nl> - { 0x0901d789 , 0x64000000 } , <nl> - { 0x0981d78a , 0x14000005 } , <nl> - { 0x0981d790 , 0x24000018 } , <nl> - { 0x0901d7a9 , 0x64000000 } , <nl> - { 0x0981d7aa , 0x14000018 } , <nl> - { 0x0901d7c3 , 0x64000000 } , <nl> - { 0x0981d7c4 , 0x14000005 } , <nl> - { 0x0981d7ce , 0x34000031 } , <nl> - { 0x16820000 , 0x1c00a6d6 } , <nl> - { 0x1682f800 , 0x1c00021d } , <nl> - { 0x090e0001 , 0x04000000 } , <nl> - { 0x098e0020 , 0x0400005f } , <nl> - { 0x1b8e0100 , 0x300000ef } , <nl> - { 0x098f0000 , 0x0c00fffd } , <nl> - { 0x09900000 , 0x0c00fffd } , <nl> - } ; <nl> mmm a / tools / v8 . xcodeproj / project . pbxproj <nl> ppp b / tools / v8 . xcodeproj / project . pbxproj <nl> <nl> 890A14020EE9C4B400E49346 / * regexp - macro - assembler - irregexp . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 89A15C750EE466D000B48DEB / * regexp - macro - assembler - irregexp . cc * / ; } ; <nl> 890A14030EE9C4B500E49346 / * regexp - macro - assembler - tracer . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 89A15C770EE466D000B48DEB / * regexp - macro - assembler - tracer . cc * / ; } ; <nl> 890A14040EE9C4B700E49346 / * regexp - macro - assembler . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 89A15C790EE466D000B48DEB / * regexp - macro - assembler . cc * / ; } ; <nl> - 893988060F2A35FA007D5254 / * libjscre . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 897FF1BF0E719CB600D62E90 / * libjscre . a * / ; } ; <nl> 893988070F2A35FA007D5254 / * libv8 . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 8970F2F00E719FB2006AE7B5 / * libv8 . a * / ; } ; <nl> 8939880D0F2A362A007D5254 / * d8 . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 89A15C920EE46A1700B48DEB / * d8 . cc * / ; } ; <nl> 893988160F2A3688007D5254 / * d8 - debug . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 893988150F2A3686007D5254 / * d8 - debug . cc * / ; } ; <nl> <nl> 89495E490E79FC23001F68C3 / * compilation - cache . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 89495E460E79FC23001F68C3 / * compilation - cache . cc * / ; } ; <nl> 896FD03A0E78D717003DFB6A / * libv8 - arm . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 89F23C870E78D5B2006B2466 / * libv8 - arm . a * / ; } ; <nl> 897F767F0E71B690007ACF34 / * shell . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF1B50E719C0900D62E90 / * shell . cc * / ; } ; <nl> - 897F76840E71B6B1007ACF34 / * libjscre . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 897FF1BF0E719CB600D62E90 / * libjscre . a * / ; } ; <nl> 897F76850E71B6B1007ACF34 / * libv8 . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 8970F2F00E719FB2006AE7B5 / * libv8 . a * / ; } ; <nl> 897FF1C40E719D6B00D62E90 / * pcre_compile . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF0EA0E719B3500D62E90 / * pcre_compile . cpp * / ; } ; <nl> 897FF1C50E719D6E00D62E90 / * pcre_exec . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF0EB0E719B3500D62E90 / * pcre_exec . cpp * / ; } ; <nl> <nl> 89F23C810E78D5B2006B2466 / * variables . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF19F0E719B8F00D62E90 / * variables . cc * / ; } ; <nl> 89F23C820E78D5B2006B2466 / * zone . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF1A20E719B8F00D62E90 / * zone . cc * / ; } ; <nl> 89F23C8E0E78D5B6006B2466 / * shell . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF1B50E719C0900D62E90 / * shell . cc * / ; } ; <nl> - 89F23C900E78D5B6006B2466 / * libjscre . a in Frameworks * / = { isa = PBXBuildFile ; fileRef = 897FF1BF0E719CB600D62E90 / * libjscre . a * / ; } ; <nl> 89F23C970E78D5E3006B2466 / * assembler - arm . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF0FE0E719B8F00D62E90 / * assembler - arm . cc * / ; } ; <nl> 89F23C980E78D5E7006B2466 / * builtins - arm . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF1090E719B8F00D62E90 / * builtins - arm . cc * / ; } ; <nl> 89F23C990E78D5E9006B2466 / * codegen - arm . cc in Sources * / = { isa = PBXBuildFile ; fileRef = 897FF1140E719B8F00D62E90 / * codegen - arm . cc * / ; } ; <nl> <nl> remoteGlobalIDString = 897F76790E71B4CC007ACF34 ; <nl> remoteInfo = v8_shell ; <nl> } ; <nl> - 7BF8919A0E7309AD000BAF8A / * PBXContainerItemProxy * / = { <nl> - isa = PBXContainerItemProxy ; <nl> - containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> - proxyType = 1 ; <nl> - remoteGlobalIDString = 897FF1BE0E719CB600D62E90 ; <nl> - remoteInfo = jscre ; <nl> - } ; <nl> - 893988000F2A35FA007D5254 / * PBXContainerItemProxy * / = { <nl> - isa = PBXContainerItemProxy ; <nl> - containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> - proxyType = 1 ; <nl> - remoteGlobalIDString = 897FF1BE0E719CB600D62E90 ; <nl> - remoteInfo = jscre ; <nl> - } ; <nl> 893988020F2A35FA007D5254 / * PBXContainerItemProxy * / = { <nl> isa = PBXContainerItemProxy ; <nl> containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> <nl> remoteGlobalIDString = 89F23C880E78D5B6006B2466 ; <nl> remoteInfo = " v8_shell - arm " ; <nl> } ; <nl> - 897F76800E71B6AC007ACF34 / * PBXContainerItemProxy * / = { <nl> - isa = PBXContainerItemProxy ; <nl> - containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> - proxyType = 1 ; <nl> - remoteGlobalIDString = 897FF1BE0E719CB600D62E90 ; <nl> - remoteInfo = jscre ; <nl> - } ; <nl> 897F76820E71B6AC007ACF34 / * PBXContainerItemProxy * / = { <nl> isa = PBXContainerItemProxy ; <nl> containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> <nl> remoteGlobalIDString = 8970F2EF0E719FB2006AE7B5 ; <nl> remoteInfo = v8 ; <nl> } ; <nl> - 89F23C8A0E78D5B6006B2466 / * PBXContainerItemProxy * / = { <nl> - isa = PBXContainerItemProxy ; <nl> - containerPortal = 8915B8680E719336009C4E19 / * Project object * / ; <nl> - proxyType = 1 ; <nl> - remoteGlobalIDString = 897FF1BE0E719CB600D62E90 ; <nl> - remoteInfo = jscre ; <nl> - } ; <nl> / * End PBXContainerItemProxy section * / <nl> <nl> / * Begin PBXFileReference section * / <nl> <nl> 897FF1B50E719C0900D62E90 / * shell . cc * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = shell . cc ; sourceTree = " < group > " ; } ; <nl> 897FF1B60E719C2300D62E90 / * js2c . py * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = text . script . python ; path = js2c . py ; sourceTree = " < group > " ; } ; <nl> 897FF1B70E719C2E00D62E90 / * macros . py * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = text . script . python ; name = macros . py ; path = . . / src / macros . py ; sourceTree = " < group > " ; } ; <nl> - 897FF1BF0E719CB600D62E90 / * libjscre . a * / = { isa = PBXFileReference ; explicitFileType = archive . ar ; includeInIndex = 0 ; path = libjscre . a ; sourceTree = BUILT_PRODUCTS_DIR ; } ; <nl> 898BD20C0EF6CC850068B00A / * debug - arm . cc * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = " debug - arm . cc " ; sourceTree = " < group > " ; } ; <nl> 898BD20D0EF6CC850068B00A / * debug - ia32 . cc * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = " debug - ia32 . cc " ; sourceTree = " < group > " ; } ; <nl> 89A15C630EE4661A00B48DEB / * bytecodes - irregexp . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = " bytecodes - irregexp . h " ; sourceTree = " < group > " ; } ; <nl> <nl> isa = PBXFrameworksBuildPhase ; <nl> buildActionMask = 2147483647 ; <nl> files = ( <nl> - 893988060F2A35FA007D5254 / * libjscre . a in Frameworks * / , <nl> 893988070F2A35FA007D5254 / * libv8 . a in Frameworks * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> <nl> isa = PBXFrameworksBuildPhase ; <nl> buildActionMask = 2147483647 ; <nl> files = ( <nl> - 897F76840E71B6B1007ACF34 / * libjscre . a in Frameworks * / , <nl> 897F76850E71B6B1007ACF34 / * libv8 . a in Frameworks * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> <nl> isa = PBXFrameworksBuildPhase ; <nl> buildActionMask = 2147483647 ; <nl> files = ( <nl> - 89F23C900E78D5B6006B2466 / * libjscre . a in Frameworks * / , <nl> 896FD03A0E78D717003DFB6A / * libv8 - arm . a in Frameworks * / , <nl> ) ; <nl> runOnlyForDeploymentPostprocessing = 0 ; <nl> <nl> isa = PBXGroup ; <nl> children = ( <nl> 897FF0DF0E719B3400D62E90 / * dtoa * / , <nl> - 897FF0E20E719B3500D62E90 / * jscre * / , <nl> ) ; <nl> path = third_party ; <nl> sourceTree = " < group > " ; <nl> <nl> path = dtoa ; <nl> sourceTree = " < group > " ; <nl> } ; <nl> - 897FF0E20E719B3500D62E90 / * jscre * / = { <nl> - isa = PBXGroup ; <nl> - children = ( <nl> - 897FF0E30E719B3500D62E90 / * ASCIICType . h * / , <nl> - 897FF0E40E719B3500D62E90 / * AUTHORS * / , <nl> - 897FF0E50E719B3500D62E90 / * config . h * / , <nl> - 897FF0E60E719B3500D62E90 / * COPYING * / , <nl> - 897FF0E70E719B3500D62E90 / * LICENSE * / , <nl> - 897FF0E80E719B3500D62E90 / * pcre . h * / , <nl> - 897FF0E90E719B3500D62E90 / * pcre_chartables . c * / , <nl> - 897FF0EA0E719B3500D62E90 / * pcre_compile . cpp * / , <nl> - 897FF0EB0E719B3500D62E90 / * pcre_exec . cpp * / , <nl> - 897FF0EC0E719B3500D62E90 / * pcre_internal . h * / , <nl> - 897FF0ED0E719B3500D62E90 / * pcre_tables . cpp * / , <nl> - 897FF0EE0E719B3500D62E90 / * pcre_ucp_searchfuncs . cpp * / , <nl> - 897FF0EF0E719B3500D62E90 / * pcre_xclass . cpp * / , <nl> - 897FF0F00E719B3500D62E90 / * ucpinternal . h * / , <nl> - 897FF0F10E719B3500D62E90 / * ucptable . cpp * / , <nl> - ) ; <nl> - path = jscre ; <nl> - sourceTree = " < group > " ; <nl> - } ; <nl> 897FF1B30E719BCE00D62E90 / * samples * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> <nl> 897FF1C00E719CB600D62E90 / * Products * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> - 897FF1BF0E719CB600D62E90 / * libjscre . a * / , <nl> 8970F2F00E719FB2006AE7B5 / * libv8 . a * / , <nl> 897F767A0E71B4CC007ACF34 / * v8_shell * / , <nl> 89F23C870E78D5B2006B2466 / * libv8 - arm . a * / , <nl> <nl> productReference = 897F767A0E71B4CC007ACF34 / * v8_shell * / ; <nl> productType = " com . apple . product - type . tool " ; <nl> } ; <nl> - 897FF1BE0E719CB600D62E90 / * jscre * / = { <nl> - isa = PBXNativeTarget ; <nl> - buildConfigurationList = 897FF1C30E719CB600D62E90 / * Build configuration list for PBXNativeTarget " jscre " * / ; <nl> - buildPhases = ( <nl> - 897FF1BC0E719CB600D62E90 / * Sources * / , <nl> - 897FF1BD0E719CB600D62E90 / * Frameworks * / , <nl> - ) ; <nl> - buildRules = ( <nl> - ) ; <nl> - dependencies = ( <nl> - ) ; <nl> - name = jscre ; <nl> - productName = jscre ; <nl> - productReference = 897FF1BF0E719CB600D62E90 / * libjscre . a * / ; <nl> - productType = " com . apple . product - type . library . static " ; <nl> - } ; <nl> 89F23C3C0E78D5B2006B2466 / * v8 - arm * / = { <nl> isa = PBXNativeTarget ; <nl> buildConfigurationList = 89F23C840E78D5B2006B2466 / * Build configuration list for PBXNativeTarget " v8 - arm " * / ; <nl> <nl> projectRoot = . . ; <nl> targets = ( <nl> 7BF891930E73098D000BAF8A / * All * / , <nl> - 897FF1BE0E719CB600D62E90 / * jscre * / , <nl> 8970F2EF0E719FB2006AE7B5 / * v8 * / , <nl> 897F76790E71B4CC007ACF34 / * v8_shell * / , <nl> 893987FE0F2A35FA007D5254 / * d8_shell * / , <nl> <nl> target = 897F76790E71B4CC007ACF34 / * v8_shell * / ; <nl> targetProxy = 7BF891980E73099F000BAF8A / * PBXContainerItemProxy * / ; <nl> } ; <nl> - 7BF8919B0E7309AD000BAF8A / * PBXTargetDependency * / = { <nl> - isa = PBXTargetDependency ; <nl> - target = 897FF1BE0E719CB600D62E90 / * jscre * / ; <nl> - targetProxy = 7BF8919A0E7309AD000BAF8A / * PBXContainerItemProxy * / ; <nl> - } ; <nl> - 893987FF0F2A35FA007D5254 / * PBXTargetDependency * / = { <nl> - isa = PBXTargetDependency ; <nl> - target = 897FF1BE0E719CB600D62E90 / * jscre * / ; <nl> - targetProxy = 893988000F2A35FA007D5254 / * PBXContainerItemProxy * / ; <nl> - } ; <nl> 893988010F2A35FA007D5254 / * PBXTargetDependency * / = { <nl> isa = PBXTargetDependency ; <nl> target = 8970F2EF0E719FB2006AE7B5 / * v8 * / ; <nl> <nl> target = 89F23C880E78D5B6006B2466 / * v8_shell - arm * / ; <nl> targetProxy = 896FD03F0E78D735003DFB6A / * PBXContainerItemProxy * / ; <nl> } ; <nl> - 897F76810E71B6AC007ACF34 / * PBXTargetDependency * / = { <nl> - isa = PBXTargetDependency ; <nl> - target = 897FF1BE0E719CB600D62E90 / * jscre * / ; <nl> - targetProxy = 897F76800E71B6AC007ACF34 / * PBXContainerItemProxy * / ; <nl> - } ; <nl> 897F76830E71B6AC007ACF34 / * PBXTargetDependency * / = { <nl> isa = PBXTargetDependency ; <nl> target = 8970F2EF0E719FB2006AE7B5 / * v8 * / ; <nl> targetProxy = 897F76820E71B6AC007ACF34 / * PBXContainerItemProxy * / ; <nl> } ; <nl> - 89F23C890E78D5B6006B2466 / * PBXTargetDependency * / = { <nl> - isa = PBXTargetDependency ; <nl> - target = 897FF1BE0E719CB600D62E90 / * jscre * / ; <nl> - targetProxy = 89F23C8A0E78D5B6006B2466 / * PBXContainerItemProxy * / ; <nl> - } ; <nl> / * End PBXTargetDependency section * / <nl> <nl> / * Begin XCBuildConfiguration section * / <nl> <nl> } ; <nl> name = Release ; <nl> } ; <nl> - 897FF1C10E719CB600D62E90 / * Debug * / = { <nl> - isa = XCBuildConfiguration ; <nl> - buildSettings = { <nl> - DEPLOYMENT_POSTPROCESSING = NO ; <nl> - GCC_PREPROCESSOR_DEFINITIONS = ( <nl> - " $ ( GCC_PREPROCESSOR_DEFINITIONS ) " , <nl> - SUPPORT_UTF8 , <nl> - SUPPORT_UCP , <nl> - NO_RECURSE , <nl> - ) ; <nl> - PRODUCT_NAME = jscre ; <nl> - STRIP_STYLE = debugging ; <nl> - } ; <nl> - name = Debug ; <nl> - } ; <nl> - 897FF1C20E719CB600D62E90 / * Release * / = { <nl> - isa = XCBuildConfiguration ; <nl> - buildSettings = { <nl> - DEPLOYMENT_POSTPROCESSING = NO ; <nl> - GCC_PREPROCESSOR_DEFINITIONS = ( <nl> - " $ ( GCC_PREPROCESSOR_DEFINITIONS ) " , <nl> - SUPPORT_UTF8 , <nl> - SUPPORT_UCP , <nl> - NO_RECURSE , <nl> - ) ; <nl> - PRODUCT_NAME = jscre ; <nl> - STRIP_STYLE = debugging ; <nl> - } ; <nl> - name = Release ; <nl> - } ; <nl> 89F23C850E78D5B2006B2466 / * Debug * / = { <nl> isa = XCBuildConfiguration ; <nl> buildSettings = { <nl> <nl> defaultConfigurationIsVisible = 0 ; <nl> defaultConfigurationName = Release ; <nl> } ; <nl> - 897FF1C30E719CB600D62E90 / * Build configuration list for PBXNativeTarget " jscre " * / = { <nl> - isa = XCConfigurationList ; <nl> - buildConfigurations = ( <nl> - 897FF1C10E719CB600D62E90 / * Debug * / , <nl> - 897FF1C20E719CB600D62E90 / * Release * / , <nl> - ) ; <nl> - defaultConfigurationIsVisible = 0 ; <nl> - defaultConfigurationName = Release ; <nl> - } ; <nl> 89F23C840E78D5B2006B2466 / * Build configuration list for PBXNativeTarget " v8 - arm " * / = { <nl> isa = XCConfigurationList ; <nl> buildConfigurations = ( <nl> mmm a / tools / visual_studio / README . txt <nl> ppp b / tools / visual_studio / README . txt <nl> be performed by Visual Studio . <nl> v8_base . vcproj <nl> mmmmmmmmmmmm - - <nl> Base V8 library containing all the V8 code but no JavaScript library code . This <nl> - includes third party code for regular expression handling ( jscre ) and <nl> - string / number convertions ( dtoa ) . <nl> + includes third party code for string / number convertions ( dtoa ) . <nl> <nl> v8 . vcproj <nl> mmmmmmmmm <nl> mmm a / tools / visual_studio / v8_base . vcproj <nl> ppp b / tools / visual_studio / v8_base . vcproj <nl> <nl> < References > <nl> < / References > <nl> < Files > <nl> - < Filter <nl> - Name = " jscre " <nl> - > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_compile . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_exec . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_internal . h " <nl> - > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_tables . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_ucp_searchfuncs . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_xclass . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < / Filter > <nl> < Filter <nl> Name = " dtoa " <nl> > <nl> mmm a / tools / visual_studio / v8_base_arm . vcproj <nl> ppp b / tools / visual_studio / v8_base_arm . vcproj <nl> <nl> < References > <nl> < / References > <nl> < Files > <nl> - < Filter <nl> - Name = " jscre " <nl> - > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_compile . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_exec . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_internal . h " <nl> - > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_tables . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_ucp_searchfuncs . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < File <nl> - RelativePath = " . . \ . . \ src \ third_party \ jscre \ pcre_xclass . cpp " <nl> - > <nl> - < FileConfiguration <nl> - Name = " Debug | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < FileConfiguration <nl> - Name = " Release | Win32 " <nl> - > <nl> - < Tool <nl> - Name = " VCCLCompilerTool " <nl> - UndefinePreprocessorDefinitions = " DEBUG " <nl> - / > <nl> - < / FileConfiguration > <nl> - < / File > <nl> - < / Filter > <nl> < Filter <nl> Name = " dtoa " <nl> > <nl>
Remove JSCRE
v8/v8
bbc2a73f31629eae149eda07812e19cb7650959e
2009-02-25T08:08:01Z
mmm a / src / x64 / builtins - x64 . cc <nl> ppp b / src / x64 / builtins - x64 . cc <nl> static void Generate_JSEntryTrampolineHelper ( MacroAssembler * masm , <nl> __ movq ( rbx , r8 ) ; <nl> # endif / / _WIN64 <nl> <nl> - / / Set up the roots register . <nl> - ExternalReference roots_address = ExternalReference : : roots_address ( ) ; <nl> - __ movq ( kRootRegister , roots_address ) ; <nl> - <nl> / / Current stack contents : <nl> / / [ rsp + 2 * kPointerSize . . . ] : Internal frame <nl> / / [ rsp + kPointerSize ] : function <nl> mmm a / src / x64 / codegen - x64 . cc <nl> ppp b / src / x64 / codegen - x64 . cc <nl> bool CodeGenerator : : HasValidEntryRegisters ( ) { <nl> & & ( allocator ( ) - > count ( r9 ) = = ( frame ( ) - > is_used ( r9 ) ? 1 : 0 ) ) <nl> & & ( allocator ( ) - > count ( r11 ) = = ( frame ( ) - > is_used ( r11 ) ? 1 : 0 ) ) <nl> & & ( allocator ( ) - > count ( r14 ) = = ( frame ( ) - > is_used ( r14 ) ? 1 : 0 ) ) <nl> - & & ( allocator ( ) - > count ( r15 ) = = ( frame ( ) - > is_used ( r15 ) ? 1 : 0 ) ) <nl> & & ( allocator ( ) - > count ( r12 ) = = ( frame ( ) - > is_used ( r12 ) ? 1 : 0 ) ) ; <nl> } <nl> # endif <nl> void CodeGenerator : : VisitCountOperation ( CountOperation * node ) { <nl> __ JumpIfNotSmi ( new_value . reg ( ) , deferred - > entry_label ( ) ) ; <nl> } <nl> if ( is_increment ) { <nl> - __ SmiAddConstant ( kScratchRegister , <nl> + __ SmiAddConstant ( new_value . reg ( ) , <nl> new_value . reg ( ) , <nl> Smi : : FromInt ( 1 ) , <nl> deferred - > entry_label ( ) ) ; <nl> } else { <nl> - __ SmiSubConstant ( kScratchRegister , <nl> + __ SmiSubConstant ( new_value . reg ( ) , <nl> new_value . reg ( ) , <nl> Smi : : FromInt ( 1 ) , <nl> deferred - > entry_label ( ) ) ; <nl> } <nl> - __ movq ( new_value . reg ( ) , kScratchRegister ) ; <nl> deferred - > BindExit ( ) ; <nl> <nl> / / Postfix count operations return their input converted to <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> __ bind ( & seq_ascii_string ) ; <nl> / / rax : subject string ( sequential ascii ) <nl> / / rcx : RegExp data ( FixedArray ) <nl> - __ movq ( r12 , FieldOperand ( rcx , JSRegExp : : kDataAsciiCodeOffset ) ) ; <nl> + __ movq ( r11 , FieldOperand ( rcx , JSRegExp : : kDataAsciiCodeOffset ) ) ; <nl> __ Set ( rdi , 1 ) ; / / Type is ascii . <nl> __ jmp ( & check_code ) ; <nl> <nl> __ bind ( & seq_two_byte_string ) ; <nl> / / rax : subject string ( flat two - byte ) <nl> / / rcx : RegExp data ( FixedArray ) <nl> - __ movq ( r12 , FieldOperand ( rcx , JSRegExp : : kDataUC16CodeOffset ) ) ; <nl> + __ movq ( r11 , FieldOperand ( rcx , JSRegExp : : kDataUC16CodeOffset ) ) ; <nl> __ Set ( rdi , 0 ) ; / / Type is two byte . <nl> <nl> __ bind ( & check_code ) ; <nl> / / Check that the irregexp code has been generated for the actual string <nl> / / encoding . If it has , the field contains a code object otherwise it contains <nl> / / the hole . <nl> - __ CmpObjectType ( r12 , CODE_TYPE , kScratchRegister ) ; <nl> + __ CmpObjectType ( r11 , CODE_TYPE , kScratchRegister ) ; <nl> __ j ( not_equal , & runtime ) ; <nl> <nl> / / rax : subject string <nl> / / rdi : encoding of subject string ( 1 if ascii , 0 if two_byte ) ; <nl> - / / r12 : code <nl> + / / r11 : code <nl> / / Load used arguments before starting to push arguments for call to native <nl> / / RegExp code to avoid handling changing stack height . <nl> __ SmiToInteger64 ( rbx , Operand ( rsp , kPreviousIndexOffset ) ) ; <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> / / rax : subject string <nl> / / rbx : previous index <nl> / / rdi : encoding of subject string ( 1 if ascii 0 if two_byte ) ; <nl> - / / r12 : code <nl> + / / r11 : code <nl> / / All checks done . Now push arguments for native regexp code . <nl> __ IncrementCounter ( & Counters : : regexp_entry_native , 1 ) ; <nl> <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> / / rax : subject string <nl> / / rbx : previous index <nl> / / rdi : encoding of subject string ( 1 if ascii 0 if two_byte ) ; <nl> - / / r12 : code <nl> + / / r11 : code <nl> <nl> / / Argument 4 : End of string data <nl> / / Argument 3 : Start of string data <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> __ movq ( arg1 , rax ) ; <nl> <nl> / / Locate the code entry and call it . <nl> - __ addq ( r12 , Immediate ( Code : : kHeaderSize - kHeapObjectTag ) ) ; <nl> - __ CallCFunction ( r12 , kRegExpExecuteArguments ) ; <nl> + __ addq ( r11 , Immediate ( Code : : kHeaderSize - kHeapObjectTag ) ) ; <nl> + __ CallCFunction ( r11 , kRegExpExecuteArguments ) ; <nl> <nl> / / rsi is caller save , as it is used to pass parameter . <nl> __ pop ( rsi ) ; <nl> void CEntryStub : : GenerateCore ( MacroAssembler * masm , <nl> / / rbp : frame pointer ( restored after C call ) . <nl> / / rsp : stack pointer ( restored after C call ) . <nl> / / r14 : number of arguments including receiver ( C callee - saved ) . <nl> - / / r15 : pointer to the first argument ( C callee - saved ) . <nl> + / / r12 : pointer to the first argument ( C callee - saved ) . <nl> / / This pointer is reused in LeaveExitFrame ( ) , so it is stored in a <nl> / / callee - saved register . <nl> <nl> void CEntryStub : : GenerateCore ( MacroAssembler * masm , <nl> / / Windows 64 - bit ABI passes arguments in rcx , rdx , r8 , r9 <nl> / / Store Arguments object on stack , below the 4 WIN64 ABI parameter slots . <nl> __ movq ( Operand ( rsp , 4 * kPointerSize ) , r14 ) ; / / argc . <nl> - __ movq ( Operand ( rsp , 5 * kPointerSize ) , r15 ) ; / / argv . <nl> + __ movq ( Operand ( rsp , 5 * kPointerSize ) , r12 ) ; / / argv . <nl> if ( result_size_ < 2 ) { <nl> / / Pass a pointer to the Arguments object as the first argument . <nl> / / Return result in single register ( rax ) . <nl> void CEntryStub : : GenerateCore ( MacroAssembler * masm , <nl> # else / / _WIN64 <nl> / / GCC passes arguments in rdi , rsi , rdx , rcx , r8 , r9 . <nl> __ movq ( rdi , r14 ) ; / / argc . <nl> - __ movq ( rsi , r15 ) ; / / argv . <nl> + __ movq ( rsi , r12 ) ; / / argv . <nl> # endif <nl> __ call ( rbx ) ; <nl> / / Result is in rax - do not destroy this register ! <nl> void CEntryStub : : Generate ( MacroAssembler * masm ) { <nl> / / rbp : frame pointer of exit frame ( restored after C call ) . <nl> / / rsp : stack pointer ( restored after C call ) . <nl> / / r14 : number of arguments including receiver ( C callee - saved ) . <nl> - / / r15 : argv pointer ( C callee - saved ) . <nl> + / / r12 : argv pointer ( C callee - saved ) . <nl> <nl> Label throw_normal_exception ; <nl> Label throw_termination_exception ; <nl> void JSEntryStub : : GenerateBody ( MacroAssembler * masm , bool is_construct ) { <nl> <nl> / / Push the stack frame type marker twice . <nl> int marker = is_construct ? StackFrame : : ENTRY_CONSTRUCT : StackFrame : : ENTRY ; <nl> - __ Push ( Smi : : FromInt ( marker ) ) ; / / context slot <nl> - __ Push ( Smi : : FromInt ( marker ) ) ; / / function slot <nl> - / / Save callee - saved registers ( X64 calling conventions ) . <nl> + / / Scratch register is neither callee - save , nor an argument register on any <nl> + / / platform . It ' s free to use at this point . <nl> + / / Cannot use smi - register for loading yet . <nl> + __ movq ( kScratchRegister , <nl> + reinterpret_cast < uint64_t > ( Smi : : FromInt ( marker ) ) , <nl> + RelocInfo : : NONE ) ; <nl> + __ push ( kScratchRegister ) ; / / context slot <nl> + __ push ( kScratchRegister ) ; / / function slot <nl> + / / Save callee - saved registers ( X64 / Win64 calling conventions ) . <nl> __ push ( r12 ) ; <nl> __ push ( r13 ) ; <nl> __ push ( r14 ) ; <nl> __ push ( r15 ) ; <nl> - __ push ( rdi ) ; <nl> - __ push ( rsi ) ; <nl> + # ifdef _WIN64 <nl> + __ push ( rdi ) ; / / Only callee save in Win64 ABI , argument in AMD64 ABI . <nl> + __ push ( rsi ) ; / / Only callee save in Win64 ABI , argument in AMD64 ABI . <nl> + # endif <nl> __ push ( rbx ) ; <nl> - / / TODO ( X64 ) : Push XMM6 - XMM15 ( low 64 bits ) as well , or make them <nl> - / / callee - save in JS code as well . <nl> + / / TODO ( X64 ) : On Win64 , if we ever use XMM6 - XMM15 , the low low 64 bits are <nl> + / / callee save as well . <nl> <nl> / / Save copies of the top frame descriptor on the stack . <nl> ExternalReference c_entry_fp ( Top : : k_c_entry_fp_address ) ; <nl> __ load_rax ( c_entry_fp ) ; <nl> __ push ( rax ) ; <nl> <nl> + / / Set up the roots and smi constant registers . <nl> + / / Needs to be done before any further smi loads . <nl> + ExternalReference roots_address = ExternalReference : : roots_address ( ) ; <nl> + __ movq ( kRootRegister , roots_address ) ; <nl> + __ InitializeSmiConstantRegister ( ) ; <nl> + <nl> # ifdef ENABLE_LOGGING_AND_PROFILING <nl> / / If this is the outermost JS call , set js_entry_sp value . <nl> ExternalReference js_entry_sp ( Top : : k_js_entry_sp_address ) ; <nl> void JSEntryStub : : GenerateBody ( MacroAssembler * masm , bool is_construct ) { <nl> <nl> / / Restore callee - saved registers ( X64 conventions ) . <nl> __ pop ( rbx ) ; <nl> + # ifdef _WIN64 <nl> + / / Callee save on in Win64 ABI , arguments / volatile in AMD64 ABI . <nl> __ pop ( rsi ) ; <nl> __ pop ( rdi ) ; <nl> + # endif <nl> __ pop ( r15 ) ; <nl> __ pop ( r14 ) ; <nl> __ pop ( r13 ) ; <nl> void StringAddStub : : Generate ( MacroAssembler * masm ) { <nl> <nl> / / Check that both strings are non - external ascii strings . <nl> __ JumpIfBothInstanceTypesAreNotSequentialAscii ( r8 , r9 , rbx , rcx , <nl> - & string_add_runtime ) ; <nl> + & string_add_runtime ) ; <nl> <nl> / / Get the two characters forming the sub string . <nl> __ movzxbq ( rbx , FieldOperand ( rax , SeqAsciiString : : kHeaderSize ) ) ; <nl> void StringAddStub : : Generate ( MacroAssembler * masm ) { <nl> / / just allocate a new one . <nl> Label make_two_character_string , make_flat_ascii_string ; <nl> StringHelper : : GenerateTwoCharacterSymbolTableProbe ( <nl> - masm , rbx , rcx , r14 , r12 , rdi , r15 , & make_two_character_string ) ; <nl> + masm , rbx , rcx , r14 , r11 , rdi , r12 , & make_two_character_string ) ; <nl> __ IncrementCounter ( & Counters : : string_add_native , 1 ) ; <nl> __ ret ( 2 * kPointerSize ) ; <nl> <nl> void StringAddStub : : Generate ( MacroAssembler * masm ) { <nl> <nl> __ bind ( & make_flat_ascii_string ) ; <nl> / / Both strings are ascii strings . As they are short they are both flat . <nl> - __ AllocateAsciiString ( rcx , rbx , rdi , r14 , r15 , & string_add_runtime ) ; <nl> + __ AllocateAsciiString ( rcx , rbx , rdi , r14 , r11 , & string_add_runtime ) ; <nl> / / rcx : result string <nl> __ movq ( rbx , rcx ) ; <nl> / / Locate first character of result . <nl> void StringAddStub : : Generate ( MacroAssembler * masm ) { <nl> __ j ( not_zero , & string_add_runtime ) ; <nl> / / Both strings are two byte strings . As they are short they are both <nl> / / flat . <nl> - __ AllocateTwoByteString ( rcx , rbx , rdi , r14 , r15 , & string_add_runtime ) ; <nl> + __ AllocateTwoByteString ( rcx , rbx , rdi , r14 , r11 , & string_add_runtime ) ; <nl> / / rcx : result string <nl> __ movq ( rbx , rcx ) ; <nl> / / Locate first character of result . <nl> mmm a / src / x64 / frames - x64 . h <nl> ppp b / src / x64 / frames - x64 . h <nl> class StackHandlerConstants : public AllStatic { <nl> <nl> class EntryFrameConstants : public AllStatic { <nl> public : <nl> + # ifdef _WIN64 <nl> static const int kCallerFPOffset = - 10 * kPointerSize ; <nl> + # else <nl> + static const int kCallerFPOffset = - 8 * kPointerSize ; <nl> + # endif <nl> static const int kArgvOffset = 6 * kPointerSize ; <nl> } ; <nl> <nl> mmm a / src / x64 / macro - assembler - x64 . cc <nl> ppp b / src / x64 / macro - assembler - x64 . cc <nl> void MacroAssembler : : GetBuiltinEntry ( Register target , Builtins : : JavaScript id ) { <nl> <nl> void MacroAssembler : : Set ( Register dst , int64_t x ) { <nl> if ( x = = 0 ) { <nl> - xor_ ( dst , dst ) ; <nl> + xorl ( dst , dst ) ; <nl> } else if ( is_int32 ( x ) ) { <nl> movq ( dst , Immediate ( static_cast < int32_t > ( x ) ) ) ; <nl> } else if ( is_uint32 ( x ) ) { <nl> void MacroAssembler : : Set ( Register dst , int64_t x ) { <nl> } <nl> } <nl> <nl> - <nl> void MacroAssembler : : Set ( const Operand & dst , int64_t x ) { <nl> if ( is_int32 ( x ) ) { <nl> movq ( dst , Immediate ( static_cast < int32_t > ( x ) ) ) ; <nl> void MacroAssembler : : Set ( const Operand & dst , int64_t x ) { <nl> <nl> static int kSmiShift = kSmiTagSize + kSmiShiftSize ; <nl> <nl> + Register MacroAssembler : : GetSmiConstant ( Smi * source ) { <nl> + int value = source - > value ( ) ; <nl> + if ( value = = 0 ) { <nl> + xorl ( kScratchRegister , kScratchRegister ) ; <nl> + return kScratchRegister ; <nl> + } <nl> + if ( value = = 1 ) { <nl> + return kSmiConstantRegister ; <nl> + } <nl> + LoadSmiConstant ( kScratchRegister , source ) ; <nl> + return kScratchRegister ; <nl> + } <nl> + <nl> + void MacroAssembler : : LoadSmiConstant ( Register dst , Smi * source ) { <nl> + if ( FLAG_debug_code ) { <nl> + movq ( dst , <nl> + reinterpret_cast < uint64_t > ( Smi : : FromInt ( kSmiConstantRegisterValue ) ) , <nl> + RelocInfo : : NONE ) ; <nl> + cmpq ( dst , kSmiConstantRegister ) ; <nl> + if ( allow_stub_calls ( ) ) { <nl> + Assert ( equal , " Uninitialized kSmiConstantRegister " ) ; <nl> + } else { <nl> + Label ok ; <nl> + j ( equal , & ok ) ; <nl> + int3 ( ) ; <nl> + bind ( & ok ) ; <nl> + } <nl> + } <nl> + if ( source - > value ( ) = = 0 ) { <nl> + xorl ( dst , dst ) ; <nl> + return ; <nl> + } <nl> + int value = source - > value ( ) ; <nl> + bool negative = value < 0 ; <nl> + unsigned int uvalue = negative ? - value : value ; <nl> + <nl> + switch ( uvalue ) { <nl> + case 9 : <nl> + lea ( dst , Operand ( kSmiConstantRegister , kSmiConstantRegister , times_8 , 0 ) ) ; <nl> + break ; <nl> + case 8 : <nl> + xorl ( dst , dst ) ; <nl> + lea ( dst , Operand ( dst , kSmiConstantRegister , times_8 , 0 ) ) ; <nl> + break ; <nl> + case 4 : <nl> + xorl ( dst , dst ) ; <nl> + lea ( dst , Operand ( dst , kSmiConstantRegister , times_4 , 0 ) ) ; <nl> + break ; <nl> + case 5 : <nl> + lea ( dst , Operand ( kSmiConstantRegister , kSmiConstantRegister , times_4 , 0 ) ) ; <nl> + break ; <nl> + case 3 : <nl> + lea ( dst , Operand ( kSmiConstantRegister , kSmiConstantRegister , times_2 , 0 ) ) ; <nl> + break ; <nl> + case 2 : <nl> + lea ( dst , Operand ( kSmiConstantRegister , kSmiConstantRegister , times_1 , 0 ) ) ; <nl> + break ; <nl> + case 1 : <nl> + movq ( dst , kSmiConstantRegister ) ; <nl> + break ; <nl> + case 0 : <nl> + UNREACHABLE ( ) ; <nl> + return ; <nl> + default : <nl> + movq ( dst , reinterpret_cast < uint64_t > ( source ) , RelocInfo : : NONE ) ; <nl> + return ; <nl> + } <nl> + if ( negative ) { <nl> + neg ( dst ) ; <nl> + } <nl> + } <nl> + <nl> void MacroAssembler : : Integer32ToSmi ( Register dst , Register src ) { <nl> ASSERT_EQ ( 0 , kSmiTag ) ; <nl> if ( ! dst . is ( src ) ) { <nl> Condition MacroAssembler : : CheckSmi ( Register src ) { <nl> <nl> Condition MacroAssembler : : CheckPositiveSmi ( Register src ) { <nl> ASSERT_EQ ( 0 , kSmiTag ) ; <nl> + / / Make mask 0x8000000000000001 and test that both bits are zero . <nl> movq ( kScratchRegister , src ) ; <nl> rol ( kScratchRegister , Immediate ( 1 ) ) ; <nl> - testl ( kScratchRegister , Immediate ( 0x03 ) ) ; <nl> + testb ( kScratchRegister , Immediate ( 3 ) ) ; <nl> return zero ; <nl> } <nl> <nl> Condition MacroAssembler : : CheckBothPositiveSmi ( Register first , <nl> } <nl> <nl> <nl> - <nl> Condition MacroAssembler : : CheckEitherSmi ( Register first , Register second ) { <nl> if ( first . is ( second ) ) { <nl> return CheckSmi ( first ) ; <nl> Condition MacroAssembler : : CheckEitherSmi ( Register first , Register second ) { <nl> <nl> <nl> Condition MacroAssembler : : CheckIsMinSmi ( Register src ) { <nl> - ASSERT ( kSmiTag = = 0 & & kSmiTagSize = = 1 ) ; <nl> - movq ( kScratchRegister , src ) ; <nl> - rol ( kScratchRegister , Immediate ( 1 ) ) ; <nl> - cmpq ( kScratchRegister , Immediate ( 1 ) ) ; <nl> - return equal ; <nl> + ASSERT ( ! src . is ( kScratchRegister ) ) ; <nl> + / / If we overflow by subtracting one , it ' s the minimal smi value . <nl> + cmpq ( src , kSmiConstantRegister ) ; <nl> + return overflow ; <nl> } <nl> <nl> <nl> Condition MacroAssembler : : CheckInteger32ValidSmiValue ( Register src ) { <nl> Condition MacroAssembler : : CheckUInteger32ValidSmiValue ( Register src ) { <nl> / / An unsigned 32 - bit integer value is valid as long as the high bit <nl> / / is not set . <nl> - testq ( src , Immediate ( 0x80000000 ) ) ; <nl> - return zero ; <nl> + testl ( src , src ) ; <nl> + return positive ; <nl> } <nl> <nl> <nl> void MacroAssembler : : SmiSub ( Register dst , <nl> } <nl> Assert ( no_overflow , " Smi subtraction overflow " ) ; <nl> } else if ( dst . is ( src1 ) ) { <nl> - movq ( kScratchRegister , src1 ) ; <nl> - subq ( kScratchRegister , src2 ) ; <nl> + movq ( kScratchRegister , src2 ) ; <nl> + cmpq ( src1 , kScratchRegister ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> - movq ( src1 , kScratchRegister ) ; <nl> + subq ( src1 , kScratchRegister ) ; <nl> } else { <nl> movq ( dst , src1 ) ; <nl> subq ( dst , src2 ) ; <nl> void MacroAssembler : : SmiTryAddConstant ( Register dst , <nl> <nl> JumpIfNotSmi ( src , on_not_smi_result ) ; <nl> Register tmp = ( dst . is ( src ) ? kScratchRegister : dst ) ; <nl> - Move ( tmp , constant ) ; <nl> + LoadSmiConstant ( tmp , constant ) ; <nl> addq ( tmp , src ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> if ( dst . is ( src ) ) { <nl> void MacroAssembler : : SmiAddConstant ( Register dst , Register src , Smi * constant ) { <nl> if ( ! dst . is ( src ) ) { <nl> movq ( dst , src ) ; <nl> } <nl> + return ; <nl> } else if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> - <nl> - Move ( kScratchRegister , constant ) ; <nl> - addq ( dst , kScratchRegister ) ; <nl> + switch ( constant - > value ( ) ) { <nl> + case 1 : <nl> + addq ( dst , kSmiConstantRegister ) ; <nl> + return ; <nl> + case 2 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_2 , 0 ) ) ; <nl> + return ; <nl> + case 4 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_4 , 0 ) ) ; <nl> + return ; <nl> + case 8 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_8 , 0 ) ) ; <nl> + return ; <nl> + default : <nl> + Register constant_reg = GetSmiConstant ( constant ) ; <nl> + addq ( dst , constant_reg ) ; <nl> + return ; <nl> + } <nl> } else { <nl> - Move ( dst , constant ) ; <nl> - addq ( dst , src ) ; <nl> + switch ( constant - > value ( ) ) { <nl> + case 1 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_1 , 0 ) ) ; <nl> + return ; <nl> + case 2 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_2 , 0 ) ) ; <nl> + return ; <nl> + case 4 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_4 , 0 ) ) ; <nl> + return ; <nl> + case 8 : <nl> + lea ( dst , Operand ( src , kSmiConstantRegister , times_8 , 0 ) ) ; <nl> + return ; <nl> + default : <nl> + LoadSmiConstant ( dst , constant ) ; <nl> + addq ( dst , src ) ; <nl> + return ; <nl> + } <nl> } <nl> } <nl> <nl> void MacroAssembler : : SmiAddConstant ( Register dst , <nl> } else if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> <nl> - Move ( kScratchRegister , constant ) ; <nl> - addq ( kScratchRegister , dst ) ; <nl> + LoadSmiConstant ( kScratchRegister , constant ) ; <nl> + addq ( kScratchRegister , src ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> movq ( dst , kScratchRegister ) ; <nl> } else { <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> addq ( dst , src ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> } <nl> void MacroAssembler : : SmiSubConstant ( Register dst , Register src , Smi * constant ) { <nl> } <nl> } else if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> - <nl> - Move ( kScratchRegister , constant ) ; <nl> - subq ( dst , kScratchRegister ) ; <nl> + Register constant_reg = GetSmiConstant ( constant ) ; <nl> + subq ( dst , constant_reg ) ; <nl> } else { <nl> - / / Subtract by adding the negative , to do it in two operations . <nl> if ( constant - > value ( ) = = Smi : : kMinValue ) { <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> / / Adding and subtracting the min - value gives the same result , it only <nl> / / differs on the overflow bit , which we don ' t check here . <nl> addq ( dst , src ) ; <nl> } else { <nl> / / Subtract by adding the negation . <nl> - Move ( dst , Smi : : FromInt ( - constant - > value ( ) ) ) ; <nl> + LoadSmiConstant ( dst , Smi : : FromInt ( - constant - > value ( ) ) ) ; <nl> addq ( dst , src ) ; <nl> } <nl> } <nl> void MacroAssembler : : SmiSubConstant ( Register dst , <nl> / / We test the non - negativeness before doing the subtraction . <nl> testq ( src , src ) ; <nl> j ( not_sign , on_not_smi_result ) ; <nl> - Move ( kScratchRegister , constant ) ; <nl> + LoadSmiConstant ( kScratchRegister , constant ) ; <nl> subq ( dst , kScratchRegister ) ; <nl> } else { <nl> / / Subtract by adding the negation . <nl> - Move ( kScratchRegister , Smi : : FromInt ( - constant - > value ( ) ) ) ; <nl> + LoadSmiConstant ( kScratchRegister , Smi : : FromInt ( - constant - > value ( ) ) ) ; <nl> addq ( kScratchRegister , dst ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> movq ( dst , kScratchRegister ) ; <nl> void MacroAssembler : : SmiSubConstant ( Register dst , <nl> / / We test the non - negativeness before doing the subtraction . <nl> testq ( src , src ) ; <nl> j ( not_sign , on_not_smi_result ) ; <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> / / Adding and subtracting the min - value gives the same result , it only <nl> / / differs on the overflow bit , which we don ' t check here . <nl> addq ( dst , src ) ; <nl> } else { <nl> / / Subtract by adding the negation . <nl> - Move ( dst , Smi : : FromInt ( - ( constant - > value ( ) ) ) ) ; <nl> + LoadSmiConstant ( dst , Smi : : FromInt ( - ( constant - > value ( ) ) ) ) ; <nl> addq ( dst , src ) ; <nl> j ( overflow , on_not_smi_result ) ; <nl> } <nl> void MacroAssembler : : SmiAndConstant ( Register dst , Register src , Smi * constant ) { <nl> xor_ ( dst , dst ) ; <nl> } else if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> - Move ( kScratchRegister , constant ) ; <nl> - and_ ( dst , kScratchRegister ) ; <nl> + Register constant_reg = GetSmiConstant ( constant ) ; <nl> + and_ ( dst , constant_reg ) ; <nl> } else { <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> and_ ( dst , src ) ; <nl> } <nl> } <nl> void MacroAssembler : : SmiOr ( Register dst , Register src1 , Register src2 ) { <nl> void MacroAssembler : : SmiOrConstant ( Register dst , Register src , Smi * constant ) { <nl> if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> - Move ( kScratchRegister , constant ) ; <nl> - or_ ( dst , kScratchRegister ) ; <nl> + Register constant_reg = GetSmiConstant ( constant ) ; <nl> + or_ ( dst , constant_reg ) ; <nl> } else { <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> or_ ( dst , src ) ; <nl> } <nl> } <nl> void MacroAssembler : : SmiXor ( Register dst , Register src1 , Register src2 ) { <nl> void MacroAssembler : : SmiXorConstant ( Register dst , Register src , Smi * constant ) { <nl> if ( dst . is ( src ) ) { <nl> ASSERT ( ! dst . is ( kScratchRegister ) ) ; <nl> - Move ( kScratchRegister , constant ) ; <nl> - xor_ ( dst , kScratchRegister ) ; <nl> + Register constant_reg = GetSmiConstant ( constant ) ; <nl> + xor_ ( dst , constant_reg ) ; <nl> } else { <nl> - Move ( dst , constant ) ; <nl> + LoadSmiConstant ( dst , constant ) ; <nl> xor_ ( dst , src ) ; <nl> } <nl> } <nl> void MacroAssembler : : SelectNonSmi ( Register dst , <nl> / / If src1 is a smi , dst is src2 , else it is src1 , i . e . , the non - smi . <nl> } <nl> <nl> + <nl> SmiIndex MacroAssembler : : SmiToIndex ( Register dst , <nl> Register src , <nl> int shift ) { <nl> void MacroAssembler : : Push ( Smi * source ) { <nl> if ( is_int32 ( smi ) ) { <nl> push ( Immediate ( static_cast < int32_t > ( smi ) ) ) ; <nl> } else { <nl> - Set ( kScratchRegister , smi ) ; <nl> - push ( kScratchRegister ) ; <nl> + Register constant = GetSmiConstant ( source ) ; <nl> + push ( constant ) ; <nl> } <nl> } <nl> <nl> void MacroAssembler : : EnterExitFrame ( ExitFrame : : Mode mode , int result_size ) { <nl> movq ( rax , rsi ) ; <nl> store_rax ( context_address ) ; <nl> <nl> - / / Setup argv in callee - saved register r15 . It is reused in LeaveExitFrame , <nl> + / / Setup argv in callee - saved register r12 . It is reused in LeaveExitFrame , <nl> / / so it must be retained across the C - call . <nl> int offset = StandardFrameConstants : : kCallerSPOffset - kPointerSize ; <nl> - lea ( r15 , Operand ( rbp , r14 , times_pointer_size , offset ) ) ; <nl> + lea ( r12 , Operand ( rbp , r14 , times_pointer_size , offset ) ) ; <nl> <nl> # ifdef ENABLE_DEBUGGER_SUPPORT <nl> / / Save the state of all registers to the stack from the memory <nl> void MacroAssembler : : EnterExitFrame ( ExitFrame : : Mode mode , int result_size ) { <nl> <nl> void MacroAssembler : : LeaveExitFrame ( ExitFrame : : Mode mode , int result_size ) { <nl> / / Registers : <nl> - / / r15 : argv <nl> + / / r12 : argv <nl> # ifdef ENABLE_DEBUGGER_SUPPORT <nl> / / Restore the memory copy of the registers by digging them out from <nl> / / the stack . This is needed to allow nested break points . <nl> void MacroAssembler : : LeaveExitFrame ( ExitFrame : : Mode mode , int result_size ) { <nl> <nl> / / Pop everything up to and including the arguments and the receiver <nl> / / from the caller stack . <nl> - lea ( rsp , Operand ( r15 , 1 * kPointerSize ) ) ; <nl> + lea ( rsp , Operand ( r12 , 1 * kPointerSize ) ) ; <nl> <nl> / / Restore current context from top and clear it in debug mode . <nl> ExternalReference context_address ( Top : : k_context_address ) ; <nl> mmm a / src / x64 / macro - assembler - x64 . h <nl> ppp b / src / x64 / macro - assembler - x64 . h <nl> enum AllocationFlags { <nl> / / Default scratch register used by MacroAssembler ( and other code that needs <nl> / / a spare register ) . The register isn ' t callee save , and not used by the <nl> / / function calling convention . <nl> - static const Register kScratchRegister = { 10 } ; / / r10 . <nl> - static const Register kRootRegister = { 13 } ; / / r13 <nl> + static const Register kScratchRegister = { 10 } ; / / r10 . <nl> + static const Register kSmiConstantRegister = { 15 } ; / / r15 ( callee save ) . <nl> + static const Register kRootRegister = { 13 } ; / / r13 ( callee save ) . <nl> + / / Value of smi in kSmiConstantRegister . <nl> + static const int kSmiConstantRegisterValue = 1 ; <nl> <nl> / / Convenience for platform - independent signatures . <nl> typedef Operand MemOperand ; <nl> class MacroAssembler : public Assembler { <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> / / Smi tagging , untagging and operations on tagged smis . <nl> <nl> + void InitializeSmiConstantRegister ( ) { <nl> + movq ( kSmiConstantRegister , <nl> + reinterpret_cast < uint64_t > ( Smi : : FromInt ( kSmiConstantRegisterValue ) ) , <nl> + RelocInfo : : NONE ) ; <nl> + } <nl> + <nl> / / Conversions between tagged smi values and non - tagged integer values . <nl> <nl> / / Tag an integer value . The result must be known to be a valid smi value . <nl> class MacroAssembler : public Assembler { <nl> <nl> / / Basic Smi operations . <nl> void Move ( Register dst , Smi * source ) { <nl> - Set ( dst , reinterpret_cast < int64_t > ( source ) ) ; <nl> + LoadSmiConstant ( dst , source ) ; <nl> } <nl> <nl> void Move ( const Operand & dst , Smi * source ) { <nl> - Set ( dst , reinterpret_cast < int64_t > ( source ) ) ; <nl> + Register constant = GetSmiConstant ( source ) ; <nl> + movq ( dst , constant ) ; <nl> } <nl> <nl> void Push ( Smi * smi ) ; <nl> class MacroAssembler : public Assembler { <nl> private : <nl> bool generating_stub_ ; <nl> bool allow_stub_calls_ ; <nl> + <nl> + / / Returns a register holding the smi value . The register MUST NOT be <nl> + / / modified . It may be the " smi 1 constant " register . <nl> + Register GetSmiConstant ( Smi * value ) ; <nl> + <nl> + / / Moves the smi value to the destination register . <nl> + void LoadSmiConstant ( Register dst , Smi * value ) ; <nl> + <nl> / / This handle will be patched with the code object on installation . <nl> Handle < Object > code_object_ ; <nl> <nl> mmm a / src / x64 / register - allocator - x64 - inl . h <nl> ppp b / src / x64 / register - allocator - x64 - inl . h <nl> namespace internal { <nl> <nl> bool RegisterAllocator : : IsReserved ( Register reg ) { <nl> return reg . is ( rsp ) | | reg . is ( rbp ) | | reg . is ( rsi ) | | <nl> - reg . is ( kScratchRegister ) | | reg . is ( kRootRegister ) ; <nl> + reg . is ( kScratchRegister ) | | reg . is ( kRootRegister ) | | <nl> + reg . is ( kSmiConstantRegister ) ; <nl> } <nl> <nl> <nl> int RegisterAllocator : : ToNumber ( Register reg ) { <nl> 5 , / / r8 <nl> 6 , / / r9 <nl> - 1 , / / r10 Scratch register . <nl> - 9 , / / r11 <nl> - 10 , / / r12 <nl> + 8 , / / r11 <nl> + 9 , / / r12 <nl> - 1 , / / r13 Roots array . This is callee saved . <nl> 7 , / / r14 <nl> - 8 / / r15 <nl> + - 1 / / r15 Smi constant register . <nl> } ; <nl> return kNumbers [ reg . code ( ) ] ; <nl> } <nl> int RegisterAllocator : : ToNumber ( Register reg ) { <nl> Register RegisterAllocator : : ToRegister ( int num ) { <nl> ASSERT ( num > = 0 & & num < kNumRegisters ) ; <nl> const Register kRegisters [ ] = <nl> - { rax , rbx , rcx , rdx , rdi , r8 , r9 , r14 , r15 , r11 , r12 } ; <nl> + { rax , rbx , rcx , rdx , rdi , r8 , r9 , r14 , r11 , r12 } ; <nl> return kRegisters [ num ] ; <nl> } <nl> <nl> mmm a / src / x64 / register - allocator - x64 . h <nl> ppp b / src / x64 / register - allocator - x64 . h <nl> namespace internal { <nl> <nl> class RegisterAllocatorConstants : public AllStatic { <nl> public : <nl> - static const int kNumRegisters = 11 ; <nl> + static const int kNumRegisters = 10 ; <nl> static const int kInvalidRegister = - 1 ; <nl> } ; <nl> <nl> mmm a / test / cctest / test - macro - assembler - x64 . cc <nl> ppp b / test / cctest / test - macro - assembler - x64 . cc <nl> using v8 : : internal : : rsp ; <nl> using v8 : : internal : : r8 ; <nl> using v8 : : internal : : r9 ; <nl> using v8 : : internal : : r11 ; <nl> - using v8 : : internal : : r12 ; / / Remember : r12 . . r15 are callee save ! <nl> + using v8 : : internal : : r12 ; <nl> using v8 : : internal : : r13 ; <nl> using v8 : : internal : : r14 ; <nl> - using v8 : : internal : : r15 ; <nl> using v8 : : internal : : times_pointer_size ; <nl> using v8 : : internal : : FUNCTION_CAST ; <nl> using v8 : : internal : : CodeDesc ; <nl> typedef int ( * F0 ) ( ) ; <nl> <nl> # define __ masm - > <nl> <nl> + <nl> + static void EntryCode ( MacroAssembler * masm ) { <nl> + / / Smi constant register is callee save . <nl> + __ push ( v8 : : internal : : kSmiConstantRegister ) ; <nl> + __ InitializeSmiConstantRegister ( ) ; <nl> + } <nl> + <nl> + <nl> + static void ExitCode ( MacroAssembler * masm ) { <nl> + / / Return - 1 if kSmiConstantRegister was clobbered during the test . <nl> + __ Move ( rdx , Smi : : FromInt ( 1 ) ) ; <nl> + __ cmpq ( rdx , v8 : : internal : : kSmiConstantRegister ) ; <nl> + __ movq ( rdx , Immediate ( - 1 ) ) ; <nl> + __ cmovq ( not_equal , rax , rdx ) ; <nl> + __ pop ( v8 : : internal : : kSmiConstantRegister ) ; <nl> + } <nl> + <nl> + <nl> TEST ( Smi ) { <nl> / / Check that C + + Smi operations work as expected . <nl> int64_t test_numbers [ ] = { <nl> TEST ( SmiMove ) { <nl> MacroAssembler assembler ( buffer , static_cast < int > ( actual_size ) ) ; <nl> MacroAssembler * masm = & assembler ; / / Create a pointer for the __ macro . <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestMoveSmi ( masm , & exit , 1 , Smi : : FromInt ( 0 ) ) ; <nl> TEST ( SmiMove ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiCompare ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiCompare ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiCompare ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( Integer32ToSmi ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> __ movq ( rax , Immediate ( 1 ) ) ; / / Test number . <nl> TEST ( Integer32ToSmi ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( Integer64PlusConstantToSmi ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> int64_t twice_max = static_cast < int64_t > ( Smi : : kMaxValue ) * 2 ; <nl> TEST ( Integer64PlusConstantToSmi ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiCheck ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> Condition cond ; <nl> <nl> TEST ( SmiCheck ) { <nl> __ xor_ ( rax , rax ) ; <nl> <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiNeg ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiNeg ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiNeg ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiAdd ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> / / No - overflow tests . <nl> TEST ( SmiAdd ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiSub ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> SmiSubTest ( masm , & exit , 0x10 , 1 , 2 ) ; <nl> TEST ( SmiSub ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiMul ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiMul ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiMul ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> void TestSmiDiv ( MacroAssembler * masm , Label * exit , int id , int x , int y ) { <nl> # endif <nl> bool fraction = ! division_by_zero & & ! overflow & & ( x % y ! = 0 ) ; <nl> __ Move ( r11 , Smi : : FromInt ( x ) ) ; <nl> - __ Move ( r12 , Smi : : FromInt ( y ) ) ; <nl> + __ Move ( r14 , Smi : : FromInt ( y ) ) ; <nl> if ( ! fraction & & ! overflow & & ! negative_zero & & ! division_by_zero ) { <nl> / / Division succeeds <nl> __ movq ( rcx , r11 ) ; <nl> - __ movq ( r15 , Immediate ( id ) ) ; <nl> + __ movq ( r12 , Immediate ( id ) ) ; <nl> int result = x / y ; <nl> __ Move ( r8 , Smi : : FromInt ( result ) ) ; <nl> - __ SmiDiv ( r9 , rcx , r12 , exit ) ; <nl> - / / Might have destroyed rcx and r12 . <nl> - __ incq ( r15 ) ; <nl> + __ SmiDiv ( r9 , rcx , r14 , exit ) ; <nl> + / / Might have destroyed rcx and r14 . <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( r9 , r8 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ movq ( rcx , r11 ) ; <nl> - __ Move ( r12 , Smi : : FromInt ( y ) ) ; <nl> + __ Move ( r14 , Smi : : FromInt ( y ) ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> - __ SmiDiv ( rcx , rcx , r12 , exit ) ; <nl> + __ incq ( r12 ) ; <nl> + __ SmiDiv ( rcx , rcx , r14 , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r8 ) ; <nl> __ j ( not_equal , exit ) ; <nl> } else { <nl> / / Division fails . <nl> - __ movq ( r15 , Immediate ( id + 8 ) ) ; <nl> + __ movq ( r12 , Immediate ( id + 8 ) ) ; <nl> <nl> Label fail_ok , fail_ok2 ; <nl> __ movq ( rcx , r11 ) ; <nl> - __ SmiDiv ( r9 , rcx , r12 , & fail_ok ) ; <nl> + __ SmiDiv ( r9 , rcx , r14 , & fail_ok ) ; <nl> __ jmp ( exit ) ; <nl> __ bind ( & fail_ok ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> - __ SmiDiv ( rcx , rcx , r12 , & fail_ok2 ) ; <nl> + __ incq ( r12 ) ; <nl> + __ SmiDiv ( rcx , rcx , r14 , & fail_ok2 ) ; <nl> __ jmp ( exit ) ; <nl> __ bind ( & fail_ok2 ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> } <nl> TEST ( SmiDiv ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> + __ push ( r14 ) ; <nl> __ push ( r12 ) ; <nl> - __ push ( r15 ) ; <nl> TestSmiDiv ( masm , & exit , 0x10 , 1 , 1 ) ; <nl> TestSmiDiv ( masm , & exit , 0x20 , 1 , 0 ) ; <nl> TestSmiDiv ( masm , & exit , 0x30 , - 1 , 0 ) ; <nl> TEST ( SmiDiv ) { <nl> TestSmiDiv ( masm , & exit , 0x130 , Smi : : kMinValue , Smi : : kMinValue ) ; <nl> TestSmiDiv ( masm , & exit , 0x140 , Smi : : kMinValue , - 1 ) ; <nl> <nl> - __ xor_ ( r15 , r15 ) ; / / Success . <nl> + __ xor_ ( r12 , r12 ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> - __ movq ( rax , r15 ) ; <nl> - __ pop ( r15 ) ; <nl> + __ movq ( rax , r12 ) ; <nl> __ pop ( r12 ) ; <nl> + __ pop ( r14 ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> void TestSmiMod ( MacroAssembler * masm , Label * exit , int id , int x , int y ) { <nl> bool negative_zero = ( ! fraction & & x < 0 ) ; <nl> __ Move ( rcx , Smi : : FromInt ( x ) ) ; <nl> __ movq ( r11 , rcx ) ; <nl> - __ Move ( r12 , Smi : : FromInt ( y ) ) ; <nl> + __ Move ( r14 , Smi : : FromInt ( y ) ) ; <nl> if ( ! division_overflow & & ! negative_zero & & ! division_by_zero ) { <nl> / / Modulo succeeds <nl> - __ movq ( r15 , Immediate ( id ) ) ; <nl> + __ movq ( r12 , Immediate ( id ) ) ; <nl> int result = x % y ; <nl> __ Move ( r8 , Smi : : FromInt ( result ) ) ; <nl> - __ SmiMod ( r9 , rcx , r12 , exit ) ; <nl> + __ SmiMod ( r9 , rcx , r14 , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( r9 , r8 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> - __ SmiMod ( rcx , rcx , r12 , exit ) ; <nl> + __ incq ( r12 ) ; <nl> + __ SmiMod ( rcx , rcx , r14 , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r8 ) ; <nl> __ j ( not_equal , exit ) ; <nl> } else { <nl> / / Modulo fails . <nl> - __ movq ( r15 , Immediate ( id + 8 ) ) ; <nl> + __ movq ( r12 , Immediate ( id + 8 ) ) ; <nl> <nl> Label fail_ok , fail_ok2 ; <nl> - __ SmiMod ( r9 , rcx , r12 , & fail_ok ) ; <nl> + __ SmiMod ( r9 , rcx , r14 , & fail_ok ) ; <nl> __ jmp ( exit ) ; <nl> __ bind ( & fail_ok ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> - __ SmiMod ( rcx , rcx , r12 , & fail_ok2 ) ; <nl> + __ incq ( r12 ) ; <nl> + __ SmiMod ( rcx , rcx , r14 , & fail_ok2 ) ; <nl> __ jmp ( exit ) ; <nl> __ bind ( & fail_ok2 ) ; <nl> <nl> - __ incq ( r15 ) ; <nl> + __ incq ( r12 ) ; <nl> __ SmiCompare ( rcx , r11 ) ; <nl> __ j ( not_equal , exit ) ; <nl> } <nl> TEST ( SmiMod ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> + __ push ( r14 ) ; <nl> __ push ( r12 ) ; <nl> - __ push ( r15 ) ; <nl> TestSmiMod ( masm , & exit , 0x10 , 1 , 1 ) ; <nl> TestSmiMod ( masm , & exit , 0x20 , 1 , 0 ) ; <nl> TestSmiMod ( masm , & exit , 0x30 , - 1 , 0 ) ; <nl> TEST ( SmiMod ) { <nl> TestSmiMod ( masm , & exit , 0x130 , Smi : : kMinValue , Smi : : kMinValue ) ; <nl> TestSmiMod ( masm , & exit , 0x140 , Smi : : kMinValue , - 1 ) ; <nl> <nl> - __ xor_ ( r15 , r15 ) ; / / Success . <nl> + __ xor_ ( r12 , r12 ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> - __ movq ( rax , r15 ) ; <nl> - __ pop ( r15 ) ; <nl> + __ movq ( rax , r12 ) ; <nl> __ pop ( r12 ) ; <nl> + __ pop ( r14 ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiIndex ) { <nl> / / Allocate an executable page of memory . <nl> size_t actual_size ; <nl> byte * buffer = <nl> - static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 2 , <nl> + static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 3 , <nl> & actual_size , <nl> true ) ) ; <nl> CHECK ( buffer ) ; <nl> TEST ( SmiIndex ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiIndex ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiIndex ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiSelectNonSmi ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; / / Avoid inline checks . <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSelectNonSmi ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiSelectNonSmi ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiAnd ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiAnd ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiAnd ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiOr ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiOr ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiOr ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiXor ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiXor ( masm , & exit , 0x10 , 0 , 0 ) ; <nl> TEST ( SmiXor ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiNot ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiNot ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiNot ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiShiftLeft ) { <nl> / / Allocate an executable page of memory . <nl> size_t actual_size ; <nl> byte * buffer = <nl> - static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 3 , <nl> + static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 4 , <nl> & actual_size , <nl> true ) ) ; <nl> CHECK ( buffer ) ; <nl> TEST ( SmiShiftLeft ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiShiftLeft ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiShiftLeft ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiShiftLogicalRight ) { <nl> / / Allocate an executable page of memory . <nl> size_t actual_size ; <nl> byte * buffer = <nl> - static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 2 , <nl> + static_cast < byte * > ( OS : : Allocate ( Assembler : : kMinimalBufferSize * 3 , <nl> & actual_size , <nl> true ) ) ; <nl> CHECK ( buffer ) ; <nl> TEST ( SmiShiftLogicalRight ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiShiftLogicalRight ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiShiftLogicalRight ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( SmiShiftArithmeticRight ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestSmiShiftArithmeticRight ( masm , & exit , 0x10 , 0 ) ; <nl> TEST ( SmiShiftArithmeticRight ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( PositiveSmiTimesPowerOfTwoToInteger64 ) { <nl> <nl> MacroAssembler * masm = & assembler ; <nl> masm - > set_allow_stub_calls ( false ) ; <nl> + EntryCode ( masm ) ; <nl> Label exit ; <nl> <nl> TestPositiveSmiPowerUp ( masm , & exit , 0x20 , 0 ) ; <nl> TEST ( PositiveSmiTimesPowerOfTwoToInteger64 ) { <nl> <nl> __ xor_ ( rax , rax ) ; / / Success . <nl> __ bind ( & exit ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> CodeDesc desc ; <nl> TEST ( OperandOffset ) { <nl> masm - > set_allow_stub_calls ( false ) ; <nl> Label exit ; <nl> <nl> - __ push ( r12 ) ; <nl> + EntryCode ( masm ) ; <nl> __ push ( r13 ) ; <nl> + __ push ( r14 ) ; <nl> __ push ( rbx ) ; <nl> __ push ( rbp ) ; <nl> __ push ( Immediate ( 0x100 ) ) ; / / < - - rbp <nl> TEST ( OperandOffset ) { <nl> / / r12 = rsp [ 3 ] <nl> / / rbx = rsp [ 5 ] <nl> / / r13 = rsp [ 7 ] <nl> - __ lea ( r12 , Operand ( rsp , 3 * kPointerSize ) ) ; <nl> + __ lea ( r14 , Operand ( rsp , 3 * kPointerSize ) ) ; <nl> __ lea ( r13 , Operand ( rbp , - 3 * kPointerSize ) ) ; <nl> __ lea ( rbx , Operand ( rbp , - 5 * kPointerSize ) ) ; <nl> __ movl ( rcx , Immediate ( 2 ) ) ; <nl> TEST ( OperandOffset ) { <nl> __ lea ( rsp , Operand ( rbp , kPointerSize ) ) ; <nl> __ pop ( rbp ) ; <nl> __ pop ( rbx ) ; <nl> + __ pop ( r14 ) ; <nl> __ pop ( r13 ) ; <nl> - __ pop ( r12 ) ; <nl> + ExitCode ( masm ) ; <nl> __ ret ( 0 ) ; <nl> <nl> <nl>
X64 : Added register holding Smi : : FromInt ( 1 ) .
v8/v8
04da7b90c9e8d29f20a72c1fc5029709095b6d69
2010-07-02T14:09:35Z
mmm a / test / cpp / end2end / client_lb_end2end_test . cc <nl> ppp b / test / cpp / end2end / client_lb_end2end_test . cc <nl> TEST_F ( ClientLbEnd2endTest , PickFirstResetConnectionBackoff ) { <nl> channel - > WaitForConnected ( grpc_timeout_milliseconds_to_deadline ( 10 ) ) ) ; <nl> / / Reset connection backoff . <nl> experimental : : ChannelResetConnectionBackoff ( channel . get ( ) ) ; <nl> - / / Wait for connect . Should happen ~ immediately . <nl> + / / Wait for connect . Should happen as soon as the client connects to <nl> + / / the newly started server , which should be before the initial <nl> + / / backoff timeout elapses . <nl> EXPECT_TRUE ( <nl> - channel - > WaitForConnected ( grpc_timeout_milliseconds_to_deadline ( 10 ) ) ) ; <nl> + channel - > WaitForConnected ( grpc_timeout_milliseconds_to_deadline ( 20 ) ) ) ; <nl> const gpr_timespec t1 = gpr_now ( GPR_CLOCK_MONOTONIC ) ; <nl> const grpc_millis waited_ms = gpr_time_to_millis ( gpr_time_sub ( t1 , t0 ) ) ; <nl> gpr_log ( GPR_DEBUG , " Waited % " PRId64 " milliseconds " , waited_ms ) ; <nl>
Increase test timeout to avoid flakiness .
grpc/grpc
118fe7405f6d9a039afa445ac3c525a5621d7150
2020-01-13T20:47:06Z
mmm a / lib / SILOptimizer / IPO / GlobalOpt . cpp <nl> ppp b / lib / SILOptimizer / IPO / GlobalOpt . cpp <nl> bool SILGlobalOpt : : tryRemoveGlobalAddr ( SILGlobalVariable * global ) { <nl> if ( ! isa < StoreInst > ( use - > getUser ( ) ) ) <nl> return false ; <nl> } <nl> - InstToRemove . addUsersOfAllResultsToWorklist ( addr ) ; <nl> + <nl> InstToRemove . add ( addr ) ; <nl> } <nl> <nl> bool SILGlobalOpt : : tryRemoveUnusedGlobal ( SILGlobalVariable * global ) { <nl> if ( ! isSafeToRemove ( global ) ) <nl> return false ; <nl> <nl> + if ( GlobalVarSkipProcessing . count ( global ) ) <nl> + return false ; <nl> + <nl> if ( GlobalVarSkipProcessing . count ( global ) | | GlobalAddrMap [ global ] . size ( ) | | <nl> GlobalAccessMap [ global ] . size ( ) | | GlobalLoadMap [ global ] . size ( ) | | <nl> - AllocGlobalStore . count ( global ) | | GlobalVarStore . count ( global ) ) <nl> - return false ; <nl> + AllocGlobalStore . count ( global ) | | GlobalVarStore . count ( global ) ) { <nl> + SmallVector < SILInstruction * , 4 > deadInsts ; <nl> + while ( ! InstToRemove . isEmpty ( ) ) { <nl> + auto * inst = InstToRemove . pop_back_val ( ) ; <nl> + deadInsts . push_back ( inst ) ; <nl> + } <nl> + InstToRemove . addInitialGroup ( deadInsts ) ; <nl> + <nl> + for ( auto * inst : deadInsts ) { <nl> + if ( GlobalAddrMap [ global ] . size ( ) & & <nl> + ! std : : any_of ( GlobalAddrMap [ global ] . begin ( ) , <nl> + GlobalAddrMap [ global ] . end ( ) , <nl> + [ & inst ] ( SILInstruction * addr ) { return inst = = addr ; } ) ) <nl> + return false ; <nl> + if ( GlobalAccessMap [ global ] . size ( ) & & <nl> + ! std : : any_of ( <nl> + GlobalAccessMap [ global ] . begin ( ) , GlobalAccessMap [ global ] . end ( ) , <nl> + [ & inst ] ( SILInstruction * access ) { return inst = = access ; } ) ) <nl> + return false ; <nl> + if ( GlobalLoadMap [ global ] . size ( ) & & <nl> + ! std : : any_of ( GlobalLoadMap [ global ] . begin ( ) , <nl> + GlobalLoadMap [ global ] . end ( ) , <nl> + [ & inst ] ( SILInstruction * load ) { return inst = = load ; } ) ) <nl> + return false ; <nl> + <nl> + if ( AllocGlobalStore . count ( global ) & & AllocGlobalStore [ global ] ! = inst ) <nl> + return false ; <nl> + <nl> + if ( GlobalVarStore . count ( global ) & & GlobalVarStore [ global ] ! = inst ) <nl> + return false ; <nl> + } <nl> + } <nl> <nl> GlobalsToRemove . push_back ( global ) ; <nl> return true ; <nl> bool SILGlobalOpt : : run ( ) { <nl> <nl> / / Erase the instructions that we have marked for deletion . <nl> while ( ! InstToRemove . isEmpty ( ) ) { <nl> - InstToRemove . pop_back_val ( ) - > eraseFromParent ( ) ; <nl> + eraseUsesOfInstruction ( InstToRemove . pop_back_val ( ) ) ; <nl> } <nl> <nl> - / / After we erase some instructions , re - collect . <nl> - reset ( ) ; <nl> - collect ( ) ; <nl> - <nl> for ( auto & global : Module - > getSILGlobals ( ) ) { <nl> HasChanged | = tryRemoveUnusedGlobal ( & global ) ; <nl> } <nl>
Remove double - collect
apple/swift
b81e47b00bbf615c7f56c14239c2c0762975ced6
2019-12-14T01:56:26Z
mmm a / arangod / Utils / CollectionNameResolver . cpp <nl> ppp b / arangod / Utils / CollectionNameResolver . cpp <nl> std : : string CollectionNameResolver : : getCollectionNameCluster ( <nl> } <nl> } <nl> <nl> - LOG_TOPIC ( ERR , arangodb : : Logger : : FIXME ) < < " CollectionNameResolver : was not able to resolve id " < < cid ; <nl> + LOG_TOPIC ( DEBUG , arangodb : : Logger : : FIXME ) < < " CollectionNameResolver : was not able to resolve id " < < cid ; <nl> return " _unknown " ; <nl> } <nl> <nl>
turn off spam message
arangodb/arangodb
7c6bd3b0e2869ba1f292517dd48efd40caa0862c
2018-07-20T17:50:24Z
mmm a / src / library_browser . js <nl> ppp b / src / library_browser . js <nl> mergeInto ( LibraryManager . library , { <nl> $ Browser__postset : ' Module [ " requestFullScreen " ] = function ( ) { Browser . requestFullScreen ( ) } ; \ n ' + / / exports <nl> ' Module [ " requestAnimationFrame " ] = function ( func ) { Browser . requestAnimationFrame ( func ) } ; \ n ' + <nl> ' Module [ " pauseMainLoop " ] = function ( ) { Browser . mainLoop . pause ( ) } ; \ n ' + <nl> - ' Module [ " resumeMainLoop " ] = function ( ) { Browser . mainLoop . resume ( ) } ; \ n ' + <nl> - ' Module [ " preloadPlugins " ] = Browser . preloadPlugins ; \ n ' , <nl> + ' Module [ " resumeMainLoop " ] = function ( ) { Browser . mainLoop . resume ( ) } ; \ n ' , <nl> $ Browser : { <nl> mainLoop : { <nl> scheduler : null , <nl> mergeInto ( LibraryManager . library , { <nl> Browser . BlobBuilder = typeof MozBlobBuilder ! = " undefined " ? MozBlobBuilder : ( typeof WebKitBlobBuilder ! = " undefined " ? WebKitBlobBuilder : console . log ( " warning : cannot build blobs " ) ) ; <nl> Browser . URLObject = typeof window ! = " undefined " ? ( window . URL ? window . URL : window . webkitURL ) : console . log ( " warning : cannot create object URLs " ) ; <nl> <nl> - / / preload plugins <nl> + / / Support for plugins that can process preloaded files . You can add more of these to <nl> + / / your app by creating and appending to Module . preloadPlugins . <nl> + / / <nl> + / / Each plugin is asked if it can handle a file based on the file ' s name . If it can , <nl> + / / it is given the file ' s raw data . When it is done , it calls a callback with the file ' s <nl> + / / ( possibly modified ) data . For example , a plugin might decompress a file , or it <nl> + / / might create some side data structure for use later ( like an Image element , etc . ) . <nl> + <nl> + if ( ! Module [ " preloadPlugins " ] ) Module [ " preloadPlugins " ] = [ ] ; <nl> + <nl> var imagePlugin = { } ; <nl> imagePlugin [ ' canHandle ' ] = function ( name ) { <nl> return name . substr ( - 4 ) in { ' . jpg ' : 1 , ' . png ' : 1 , ' . bmp ' : 1 } ; <nl> mergeInto ( LibraryManager . library , { <nl> } ) ; <nl> addRunDependency ( ' al ' + url ) ; <nl> } , <nl> - <nl> - / / A list of plugins that can process preloaded files . You can add more of these to <nl> - / / your app by appending to Module . preloadPlugins . <nl> - / / <nl> - / / Each plugin is asked if it can handle a file based on the file ' s name . If it can , <nl> - / / it is given the file ' s raw data . When it is done , it calls a callback with the file ' s <nl> - / / ( possibly modified ) data . For example , a plugin might decompress a file , or it <nl> - / / might create some side data structure for use later ( like an Image element , etc . ) . <nl> - preloadPlugins : [ ] , <nl> } , <nl> <nl> emscripten_async_wget : function ( url , file , onload , onerror ) { <nl>
allow defining preload plugins from outside
emscripten-core/emscripten
bfc0bd5a11bbf0a1c2bdf484ff2bfa26c87fdf74
2012-07-23T03:52:43Z
mmm a / hphp / runtime / version . h <nl> ppp b / hphp / runtime / version . h <nl> <nl> * / <nl> # ifndef HHVM_VERSION_OVERRIDE <nl> # define HHVM_VERSION_MAJOR 4 <nl> - # define HHVM_VERSION_MINOR 73 <nl> + # define HHVM_VERSION_MINOR 74 <nl> # define HHVM_VERSION_PATCH 0 <nl> # define HHVM_VERSION_SUFFIX " - dev " <nl> # endif <nl>
update version . h
facebook/hhvm
439178498d188cdbd63ae8639ab11f3d683dc3d7
2020-09-02T17:00:47Z
mmm a / Makefile <nl> ppp b / Makefile <nl> grpc_jwt_verifier_test : $ ( BINDIR ) / $ ( CONFIG ) / grpc_jwt_verifier_test <nl> grpc_print_google_default_creds_token : $ ( BINDIR ) / $ ( CONFIG ) / grpc_print_google_default_creds_token <nl> grpc_security_connector_test : $ ( BINDIR ) / $ ( CONFIG ) / grpc_security_connector_test <nl> grpc_verify_jwt : $ ( BINDIR ) / $ ( CONFIG ) / grpc_verify_jwt <nl> + hpack_parser_fuzzer_test : $ ( BINDIR ) / $ ( CONFIG ) / hpack_parser_fuzzer_test <nl> hpack_parser_test : $ ( BINDIR ) / $ ( CONFIG ) / hpack_parser_test <nl> hpack_table_test : $ ( BINDIR ) / $ ( CONFIG ) / hpack_table_test <nl> httpcli_format_request_test : $ ( BINDIR ) / $ ( CONFIG ) / httpcli_format_request_test <nl> + httpcli_fuzzer_test : $ ( BINDIR ) / $ ( CONFIG ) / httpcli_fuzzer_test <nl> httpcli_parser_test : $ ( BINDIR ) / $ ( CONFIG ) / httpcli_parser_test <nl> httpcli_test : $ ( BINDIR ) / $ ( CONFIG ) / httpcli_test <nl> httpscli_test : $ ( BINDIR ) / $ ( CONFIG ) / httpscli_test <nl> transport_connectivity_state_test : $ ( BINDIR ) / $ ( CONFIG ) / transport_connectivity_st <nl> transport_metadata_test : $ ( BINDIR ) / $ ( CONFIG ) / transport_metadata_test <nl> transport_security_test : $ ( BINDIR ) / $ ( CONFIG ) / transport_security_test <nl> udp_server_test : $ ( BINDIR ) / $ ( CONFIG ) / udp_server_test <nl> + uri_fuzzer_test : $ ( BINDIR ) / $ ( CONFIG ) / uri_fuzzer_test <nl> uri_parser_test : $ ( BINDIR ) / $ ( CONFIG ) / uri_parser_test <nl> workqueue_test : $ ( BINDIR ) / $ ( CONFIG ) / workqueue_test <nl> alarm_cpp_test : $ ( BINDIR ) / $ ( CONFIG ) / alarm_cpp_test <nl> endif <nl> endif <nl> <nl> <nl> + HPACK_PARSER_FUZZER_TEST_SRC = \ <nl> + test / core / transport / chttp2 / hpack_parser_fuzzer_test . c \ <nl> + <nl> + HPACK_PARSER_FUZZER_TEST_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( basename $ ( HPACK_PARSER_FUZZER_TEST_SRC ) ) ) ) <nl> + ifeq ( $ ( NO_SECURE ) , true ) <nl> + <nl> + # You can ' t build secure targets if you don ' t have OpenSSL . <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / hpack_parser_fuzzer_test : openssl_dep_error <nl> + <nl> + else <nl> + <nl> + <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / hpack_parser_fuzzer_test : $ ( HPACK_PARSER_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + $ ( E ) " [ LD ] Linking $ @ " <nl> + $ ( Q ) mkdir - p ` dirname $ @ ` <nl> + $ ( Q ) $ ( LDXX ) $ ( LDFLAGS ) $ ( HPACK_PARSER_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a $ ( LDLIBS ) $ ( LDLIBS_SECURE ) - lFuzzer - o $ ( BINDIR ) / $ ( CONFIG ) / hpack_parser_fuzzer_test <nl> + <nl> + endif <nl> + <nl> + $ ( OBJDIR ) / $ ( CONFIG ) / test / core / transport / chttp2 / hpack_parser_fuzzer_test . o : $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + <nl> + deps_hpack_parser_fuzzer_test : $ ( HPACK_PARSER_FUZZER_TEST_OBJS : . o = . dep ) <nl> + <nl> + ifneq ( $ ( NO_SECURE ) , true ) <nl> + ifneq ( $ ( NO_DEPS ) , true ) <nl> + - include $ ( HPACK_PARSER_FUZZER_TEST_OBJS : . o = . dep ) <nl> + endif <nl> + endif <nl> + <nl> + <nl> HPACK_PARSER_TEST_SRC = \ <nl> test / core / transport / chttp2 / hpack_parser_test . c \ <nl> <nl> endif <nl> endif <nl> <nl> <nl> + HTTPCLI_FUZZER_TEST_SRC = \ <nl> + test / core / httpcli / fuzzer . c \ <nl> + <nl> + HTTPCLI_FUZZER_TEST_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( basename $ ( HTTPCLI_FUZZER_TEST_SRC ) ) ) ) <nl> + ifeq ( $ ( NO_SECURE ) , true ) <nl> + <nl> + # You can ' t build secure targets if you don ' t have OpenSSL . <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / httpcli_fuzzer_test : openssl_dep_error <nl> + <nl> + else <nl> + <nl> + <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / httpcli_fuzzer_test : $ ( HTTPCLI_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + $ ( E ) " [ LD ] Linking $ @ " <nl> + $ ( Q ) mkdir - p ` dirname $ @ ` <nl> + $ ( Q ) $ ( LDXX ) $ ( LDFLAGS ) $ ( HTTPCLI_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a $ ( LDLIBS ) $ ( LDLIBS_SECURE ) - lFuzzer - o $ ( BINDIR ) / $ ( CONFIG ) / httpcli_fuzzer_test <nl> + <nl> + endif <nl> + <nl> + $ ( OBJDIR ) / $ ( CONFIG ) / test / core / httpcli / fuzzer . o : $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + <nl> + deps_httpcli_fuzzer_test : $ ( HTTPCLI_FUZZER_TEST_OBJS : . o = . dep ) <nl> + <nl> + ifneq ( $ ( NO_SECURE ) , true ) <nl> + ifneq ( $ ( NO_DEPS ) , true ) <nl> + - include $ ( HTTPCLI_FUZZER_TEST_OBJS : . o = . dep ) <nl> + endif <nl> + endif <nl> + <nl> + <nl> HTTPCLI_PARSER_TEST_SRC = \ <nl> test / core / httpcli / parser_test . c \ <nl> <nl> endif <nl> endif <nl> <nl> <nl> + URI_FUZZER_TEST_SRC = \ <nl> + test / core / client_config / uri_fuzzer_test . c \ <nl> + <nl> + URI_FUZZER_TEST_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( basename $ ( URI_FUZZER_TEST_SRC ) ) ) ) <nl> + ifeq ( $ ( NO_SECURE ) , true ) <nl> + <nl> + # You can ' t build secure targets if you don ' t have OpenSSL . <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / uri_fuzzer_test : openssl_dep_error <nl> + <nl> + else <nl> + <nl> + <nl> + <nl> + $ ( BINDIR ) / $ ( CONFIG ) / uri_fuzzer_test : $ ( URI_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + $ ( E ) " [ LD ] Linking $ @ " <nl> + $ ( Q ) mkdir - p ` dirname $ @ ` <nl> + $ ( Q ) $ ( LDXX ) $ ( LDFLAGS ) $ ( URI_FUZZER_TEST_OBJS ) $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a $ ( LDLIBS ) $ ( LDLIBS_SECURE ) - lFuzzer - o $ ( BINDIR ) / $ ( CONFIG ) / uri_fuzzer_test <nl> + <nl> + endif <nl> + <nl> + $ ( OBJDIR ) / $ ( CONFIG ) / test / core / client_config / uri_fuzzer_test . o : $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgrpc . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr_test_util . a $ ( LIBDIR ) / $ ( CONFIG ) / libgpr . a <nl> + <nl> + deps_uri_fuzzer_test : $ ( URI_FUZZER_TEST_OBJS : . o = . dep ) <nl> + <nl> + ifneq ( $ ( NO_SECURE ) , true ) <nl> + ifneq ( $ ( NO_DEPS ) , true ) <nl> + - include $ ( URI_FUZZER_TEST_OBJS : . o = . dep ) <nl> + endif <nl> + endif <nl> + <nl> + <nl> URI_PARSER_TEST_SRC = \ <nl> test / core / client_config / uri_parser_test . c \ <nl> <nl> mmm a / build . yaml <nl> ppp b / build . yaml <nl> targets : <nl> - grpc <nl> - gpr_test_util <nl> - gpr <nl> + - name : hpack_parser_fuzzer_test <nl> + build : fuzzer <nl> + language : c <nl> + src : <nl> + - test / core / transport / chttp2 / hpack_parser_fuzzer_test . c <nl> + deps : <nl> + - grpc_test_util <nl> + - grpc <nl> + - gpr_test_util <nl> + - gpr <nl> + corpus_dirs : <nl> + - test / core / transport / chttp2 / hpack_parser_corpus <nl> - name : hpack_parser_test <nl> build : test <nl> language : c <nl> targets : <nl> - grpc <nl> - gpr_test_util <nl> - gpr <nl> + - name : httpcli_fuzzer_test <nl> + build : fuzzer <nl> + language : c <nl> + src : <nl> + - test / core / httpcli / fuzzer . c <nl> + deps : <nl> + - grpc_test_util <nl> + - grpc <nl> + - gpr_test_util <nl> + - gpr <nl> + corpus_dirs : <nl> + - test / core / httpcli / corpus <nl> - name : httpcli_parser_test <nl> build : test <nl> language : c <nl> targets : <nl> - mac <nl> - linux <nl> - posix <nl> + - name : uri_fuzzer_test <nl> + build : fuzzer <nl> + language : c <nl> + src : <nl> + - test / core / client_config / uri_fuzzer_test . c <nl> + deps : <nl> + - grpc_test_util <nl> + - grpc <nl> + - gpr_test_util <nl> + - gpr <nl> + corpus_dirs : <nl> + - test / core / client_config / uri_corpus <nl> - name : uri_parser_test <nl> build : test <nl> language : c <nl> mmm a / src / core / httpcli / parser . c <nl> ppp b / src / core / httpcli / parser . c <nl> <nl> # include < grpc / support / log . h > <nl> # include < grpc / support / useful . h > <nl> <nl> + extern int grpc_http_trace ; <nl> + <nl> static int handle_response_line ( grpc_httpcli_parser * parser ) { <nl> uint8_t * beg = parser - > cur_line ; <nl> uint8_t * cur = beg ; <nl> static int handle_response_line ( grpc_httpcli_parser * parser ) { <nl> return 1 ; <nl> <nl> error : <nl> - gpr_log ( GPR_ERROR , " Failed parsing response line " ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , " Failed parsing response line " ) ; <nl> + } <nl> return 0 ; <nl> } <nl> <nl> static int add_header ( grpc_httpcli_parser * parser ) { <nl> GPR_ASSERT ( cur ! = end ) ; <nl> <nl> if ( * cur = = ' ' | | * cur = = ' \ t ' ) { <nl> - gpr_log ( GPR_ERROR , " Continued header lines not supported yet " ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , " Continued header lines not supported yet " ) ; <nl> + } <nl> goto error ; <nl> } <nl> <nl> static int add_header ( grpc_httpcli_parser * parser ) { <nl> cur + + ; <nl> } <nl> if ( cur = = end ) { <nl> - gpr_log ( GPR_ERROR , " Didn ' t find ' : ' in header string " ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , " Didn ' t find ' : ' in header string " ) ; <nl> + } <nl> goto error ; <nl> } <nl> GPR_ASSERT ( cur > = beg ) ; <nl> mmm a / src / core / transport / chttp2 / hpack_parser . c <nl> ppp b / src / core / transport / chttp2 / hpack_parser . c <nl> <nl> # include " src / core / transport / chttp2 / hpack_parser . h " <nl> # include " src / core / transport / chttp2 / internal . h " <nl> <nl> + # include < assert . h > <nl> # include < stddef . h > <nl> # include < string . h > <nl> - # include < assert . h > <nl> <nl> / * This is here for grpc_is_binary_header <nl> * TODO ( murgatroid99 ) : Remove this <nl> <nl> # include " src / core / support / string . h " <nl> # include " src / core / transport / chttp2 / bin_encoder . h " <nl> <nl> + extern int grpc_http_trace ; <nl> + <nl> typedef enum { <nl> NOT_BINARY , <nl> B64_BYTE0 , <nl> static int finish_indexed_field ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> const uint8_t * end ) { <nl> grpc_mdelem * md = grpc_chttp2_hptbl_lookup ( & p - > table , p - > index ) ; <nl> if ( md = = NULL ) { <nl> - gpr_log ( GPR_ERROR , " Invalid HPACK index received : % d " , p - > index ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , " Invalid HPACK index received : % d " , p - > index ) ; <nl> + } <nl> return 0 ; <nl> } <nl> GRPC_MDELEM_REF ( md ) ; <nl> static int parse_lithdr_nvridx_v ( grpc_chttp2_hpack_parser * p , <nl> / * finish parsing a max table size change * / <nl> static int finish_max_tbl_size ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> const uint8_t * end ) { <nl> - gpr_log ( GPR_INFO , " MAX TABLE SIZE : % d " , p - > index ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_INFO , " MAX TABLE SIZE : % d " , p - > index ) ; <nl> + } <nl> return grpc_chttp2_hptbl_set_current_table_size ( & p - > table , p - > index ) & & <nl> parse_begin ( p , cur , end ) ; <nl> } <nl> static int parse_error ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> static int parse_illegal_op ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> const uint8_t * end ) { <nl> GPR_ASSERT ( cur ! = end ) ; <nl> - gpr_log ( GPR_DEBUG , " Illegal hpack op code % d " , * cur ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_DEBUG , " Illegal hpack op code % d " , * cur ) ; <nl> + } <nl> return parse_error ( p , cur , end ) ; <nl> } <nl> <nl> static int parse_value4 ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> } <nl> <nl> error : <nl> - gpr_log ( GPR_ERROR , <nl> - " integer overflow in hpack integer decoding : have 0x % 08x , " <nl> - " got byte 0x % 02x on byte 5 " , <nl> - * p - > parsing . value , * cur ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , <nl> + " integer overflow in hpack integer decoding : have 0x % 08x , " <nl> + " got byte 0x % 02x on byte 5 " , <nl> + * p - > parsing . value , * cur ) ; <nl> + } <nl> return parse_error ( p , cur , end ) ; <nl> } <nl> <nl> static int parse_value5up ( grpc_chttp2_hpack_parser * p , const uint8_t * cur , <nl> return parse_next ( p , cur + 1 , end ) ; <nl> } <nl> <nl> - gpr_log ( GPR_ERROR , <nl> - " integer overflow in hpack integer decoding : have 0x % 08x , " <nl> - " got byte 0x % 02x sometime after byte 5 " , <nl> - * p - > parsing . value , * cur ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , <nl> + " integer overflow in hpack integer decoding : have 0x % 08x , " <nl> + " got byte 0x % 02x sometime after byte 5 " , <nl> + * p - > parsing . value , * cur ) ; <nl> + } <nl> return parse_error ( p , cur , end ) ; <nl> } <nl> <nl> static is_binary_header is_binary_literal_header ( grpc_chttp2_hpack_parser * p ) { <nl> static is_binary_header is_binary_indexed_header ( grpc_chttp2_hpack_parser * p ) { <nl> grpc_mdelem * elem = grpc_chttp2_hptbl_lookup ( & p - > table , p - > index ) ; <nl> if ( ! elem ) { <nl> - gpr_log ( GPR_ERROR , " Invalid HPACK index received : % d " , p - > index ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , " Invalid HPACK index received : % d " , p - > index ) ; <nl> + } <nl> return ERROR_HEADER ; <nl> } <nl> return grpc_is_binary_header ( <nl> mmm a / src / core / transport / chttp2 / hpack_table . c <nl> ppp b / src / core / transport / chttp2 / hpack_table . c <nl> <nl> <nl> # include " src / core / support / murmur_hash . h " <nl> <nl> + extern int grpc_http_trace ; <nl> + <nl> static struct { <nl> const char * key ; <nl> const char * value ; <nl> int grpc_chttp2_hptbl_set_current_table_size ( grpc_chttp2_hptbl * tbl , <nl> return 1 ; <nl> } <nl> if ( bytes > tbl - > max_bytes ) { <nl> - gpr_log ( GPR_ERROR , <nl> - " Attempt to make hpack table % d bytes when max is % d bytes " , bytes , <nl> - tbl - > max_bytes ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , <nl> + " Attempt to make hpack table % d bytes when max is % d bytes " , <nl> + bytes , tbl - > max_bytes ) ; <nl> + } <nl> return 0 ; <nl> } <nl> - gpr_log ( GPR_DEBUG , " Update hpack parser table size to % d " , bytes ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_DEBUG , " Update hpack parser table size to % d " , bytes ) ; <nl> + } <nl> while ( tbl - > mem_used > bytes ) { <nl> evict1 ( tbl ) ; <nl> } <nl> int grpc_chttp2_hptbl_add ( grpc_chttp2_hptbl * tbl , grpc_mdelem * md ) { <nl> GRPC_CHTTP2_HPACK_ENTRY_OVERHEAD ; <nl> <nl> if ( tbl - > current_table_bytes > tbl - > max_bytes ) { <nl> - gpr_log ( GPR_ERROR , <nl> - " HPACK max table size reduced to % d but not reflected by hpack " <nl> - " stream ( still at % d ) " , <nl> - tbl - > max_bytes , tbl - > current_table_bytes ) ; <nl> + if ( grpc_http_trace ) { <nl> + gpr_log ( GPR_ERROR , <nl> + " HPACK max table size reduced to % d but not reflected by hpack " <nl> + " stream ( still at % d ) " , <nl> + tbl - > max_bytes , tbl - > current_table_bytes ) ; <nl> + } <nl> return 0 ; <nl> } <nl> <nl> new file mode 100644 <nl> index 00000000000 . . 597a6db294c <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 042dc4512fa3d391c5170cf3aa61e6a638f84342 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + i <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . d56b8fc6c88 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 0e9bbe975f2027e8c39c89f85f667530368e7d11 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : iiiÐ ? + n ! ij <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 3936e8964c5 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 14b57bcbf1e17b1db1de491ef2ba3768f704b7dc <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ‡ i ? = niI_ ! ' ; ñ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a94c4cf58dd <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 1794310671a060eead6e5ee66ac978a18ec7e84f <nl> <nl> + ~ ipip ~ 6 : : 1 <nl> + v : Ð : 1 <nl> new file mode 100644 <nl> index 00000000000 . . 875ac2a4f91 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 1d30b2a79afbaf2828ff42b9a9647e942ba1ab80 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : il0P / 8 ? n ! $ i : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 59469af5285 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 1fcf5d9c333b70596cf5ba04d1f7affdf445b971 <nl> <nl> + iiP * v : : : pip ~ 6 : : : 0 <nl> + v : : : 11 <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 2f902351bd8 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 23162c8a8936e20b195404c21337ee734d02a6bc <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ii / i ? n ! % i * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 4bdc3f60c02 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 23f3198b815ca60bdadcaae682b9f965dda387f1 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + uni : : . i ? n ( ! ipR6 / <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . fb7665d0ccb <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 2ef3893b43f1f60b77b59ce06a6bce9815d78eaf <nl> <nl> + : / i ? n ! ipv6 : . / : : abc . * <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 6c1e22fa32a <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 356c3c129e203b5c74550b4209764d74b9caefce <nl> @ @ - 0 , 0 + 1 @ @ <nl> + unix : / / ii : # v6i ? n ! <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 725b2086c5b <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 3b58860f3451d3e7aad99690a8d39782ca5116fc <nl> <nl> + i : i ? nip ~ & 2 . / : : : abipip ~ 6c . * <nl> + <nl> + : : 1 <nl> + v : Ð : 1 <nl> new file mode 100644 <nl> index 00000000000 . . 23d52e19a8a <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 47b5228404451fc9d4071fa69192514bb4ce33c1 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : iiP / i ? n ! ' i * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 1dc4931ac6e <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 636c5606fc23713a1bae88c8899c0541cfad4fd8 <nl> <nl> + : i ? n ! ip ~ f2 : . / : : abipip ~ 6c . * <nl> + <nl> + : : 1 <nl> + v : Ð : 1 <nl> new file mode 100644 <nl> index 00000000000 . . 7b9532914e6 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 63fe493b270b17426d77a27cbf3abac5b2c2794a <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ‡ i ? = niI ! ' ; ñ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 4eaca392656 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 655300a902b62662296a8e46bfb04fbcb07182cb <nl> @ @ - 0 , 0 + 1 @ @ <nl> + unix : / / ii : pv6i ? n ! <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 57cbd72dbcd <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 6b70979a70a038ff6607d6cf85485ee95baf58e6 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + uni : : / i ? n ! ipR6 / <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . e13cf5a8e63 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 7314ab3545a7535a26e0e8aad67caea5534d68b1 <nl> <nl> + ipip ~ 6 : : : 1 <nl> + v : : : 1 <nl> new file mode 100644 <nl> index 00000000000 . . 58ecc7e2afa <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 884dcaee2908ffe5f12b65b8eba81016099c4266 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ip * v : : : 1 <nl> new file mode 100644 <nl> index 00000000000 . . efb392b7d99 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 96c8d266b7dc037288ef305c996608270f72e7fb <nl> <nl> + : / i / n ! ipv6 : : : / a . b . c1 <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . 71552225470 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 975536c71ade4800415a7e9c2f1b45c35a6d5ea8 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ilP . i ; ? n ! # i ! ; <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 4061e02189e <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / 99750aa67d30beaea8af565c829d4999aa8cb91b <nl> @ @ - 0 , 0 + 1 @ @ <nl> + unix : : / i ? n ! ipv6 / <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 736e63e7e7e <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / a1f0f9b75bb354eb063d7cba4fcfa2d0b88d63de <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ¢ ilP / i ; n ! # i : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . dff2f8920de <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / a296eb3d1d436ed7df7195b10aa3c4de3896f98d <nl> @ @ - 0 , 0 + 1 @ @ <nl> + u + ni : : / i ? n ! ipR3 / <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 13a115481d2 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / a8b8e66050b424f1b8c07d46f868199fb7f60e38 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + uni : : pi : miP / ? ni . ! ( Ri ? ) 8 / n ! ' i * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . fe019fc4091 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / af55baf8c8855e563befdf1eefbcbd46c5ddb8d2 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + uni : : . i ! in : / i / n ! ipv6 ž : ? ( pR ; : : / a . 2b <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 6e12167b521 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / b3c0bf66c2bf5d24ef1daf4cc5a9d6d5bd0e8bfd <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ii / iilP . i ; ? n ? n ! # i ! ; ! % * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b5655220d74 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ceb4e2264ba7a8d5be47d276b37ec09489e00245 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ‡ i ? P - niI ! ' iñ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a7656724f51 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / cf4395958f5bfb46fd6f535a39657d016c75114c <nl> @ @ - 0 , 0 + 1 @ @ <nl> + unix : / / ipv6 : : : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . d658fb8ee09 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / d46668372b7e20154a89409a7430a28e642afdca <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ilP / i ? n ! # i : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 6d37b5fd450 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / d6fe7412a0a1d1c733160246f3fa425f4f97682a <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ilP / i ? n , ! # i : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 577e1054e4b <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / dns . txt <nl> @ @ - 0 , 0 + 1 @ @ <nl> + dns : 10 . 2 . 1 . 1 <nl> new file mode 100644 <nl> index 00000000000 . . 52f5a2382ac <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ea02d9fea9bad5b89cf353a0169238f584177e71 <nl> <nl> + i : i ? n ! ip ~ f2 . : / : : abipip ~ 6c . * <nl> + <nl> + : : 1 <nl> + v : Ð : 1 <nl> new file mode 100644 <nl> index 00000000000 . . c3c93fed6af <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ec4731dddf94ed3ea92ae4d5a71f145ab6e3f6ee <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ii - i ? n ! % * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 45065e2f000 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ed2f78646f19fc47dd85ff0877c232b71913ece2 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ii / i ? n ! % * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 02151c8b6af <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / f6889f4a6350fea1596a3adea5cdac02bd5d1ff3 <nl> <nl> + : ipip ~ 6 : : : 1 <nl> + vii / : : : iunix : ? n / 1 / ipv6 ! % <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8034e133d7b <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / f6f3bd030f0d321efe7c51ca3f057de23509af67 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : iiP / i ? n ! i * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 240946dbaac <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / f97598cff03306af3c70400608fec47268b5075d <nl> <nl> + unix : / / ipv6 : : : / a . b . c1 <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . d089a593462 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / f9e1ec1fc642b575bc9955618b7065747f56b101 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : ilP . i ; ? n ! # i ; <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 66eefbcc14d <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / fe0630a3aeed2ec6f474f362e4c839478290d5c4 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + : miP / i ? ) n ! ' i * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . fe29486b6e7 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ipv4 . txt <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ipv4 : 10 . 2 . 1 . 1 <nl> new file mode 100644 <nl> index 00000000000 . . 7b6932be00b <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / ipv6 . txt <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ipv6 : : : 1 <nl> new file mode 100644 <nl> index 00000000000 . . 7a0997ec9a6 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_corpus / unix . txt <nl> @ @ - 0 , 0 + 1 @ @ <nl> + unix : / / / a . b . c <nl> new file mode 100644 <nl> index 00000000000 . . f39288f1407 <nl> mmm / dev / null <nl> ppp b / test / core / client_config / uri_fuzzer_test . c <nl> <nl> + / * <nl> + * <nl> + * Copyright 2015 - 2016 , Google Inc . <nl> + * All rights reserved . <nl> + * <nl> + * Redistribution and use in source and binary forms , with or without <nl> + * modification , are permitted provided that the following conditions are <nl> + * met : <nl> + * <nl> + * * Redistributions of source code must retain the above copyright <nl> + * notice , this list of conditions and the following disclaimer . <nl> + * * Redistributions in binary form must reproduce the above <nl> + * copyright notice , this list of conditions and the following disclaimer <nl> + * in the documentation and / or other materials provided with the <nl> + * distribution . <nl> + * * Neither the name of Google Inc . nor the names of its <nl> + * contributors may be used to endorse or promote products derived from <nl> + * this software without specific prior written permission . <nl> + * <nl> + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + * " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + * LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + * A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + * SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + * LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + * DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + * THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + * ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + * OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + * <nl> + * / <nl> + <nl> + # include < stdint . h > <nl> + # include < string . h > <nl> + <nl> + # include < grpc / support / alloc . h > <nl> + <nl> + # include " src / core / client_config / uri_parser . h " <nl> + <nl> + int LLVMFuzzerTestOneInput ( const uint8_t * data , size_t size ) { <nl> + char * s = gpr_malloc ( size + 1 ) ; <nl> + memcpy ( s , data , size ) ; <nl> + s [ size ] = 0 ; <nl> + <nl> + grpc_uri * x ; <nl> + if ( ( x = grpc_uri_parse ( s , 1 ) ) ) { <nl> + grpc_uri_destroy ( x ) ; <nl> + } <nl> + gpr_free ( s ) ; <nl> + return 0 ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 3d6face56a0 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 0299ca2580e4398d170c4a336e0c33eb2cd9d427 <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 5cbaf2e460f <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 05e613853d64a9669ea3cf41b0de777dc24931ba <nl> <nl> + HTTP / 1 . 1 8 ) p  MKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8831f0786bb <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 069352518a1d1baa05f317c677d275cefda2ac97 <nl> <nl> + HTTP / 1 . 1 80 ) OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . c79e456904b <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 0c5b7c2569410b526605e308309a7f36574e530d <nl> <nl> + H TTP / 16 . 1 200 OK <nl> + test : h ! ello <nl> + <nl> + abcd <nl> new file mode 100644 <nl> index 00000000000 . . 7b979b5e10f <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 0ef3d0a84360bb5ad66274f1226f5cb273ecdbcf <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + tHTTP / 01 . 021 Oes , H <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 67382b4f3af <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 1e1273f90187fdf5df3625764245610f86af6aa4 <nl> <nl> + HTTP / 1 . 1 200 OKHHTTP ‰ / 1 . 200 OKH <nl> + <nl> + tHTHTTP / 0 20T : tes / 01 . <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . cce8ded71af <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 33f4ea0c7ea27c37d8f95cfa64d282370efdafd2 <nl> <nl> + HTTP / 1 * 9y 200 OKm <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8df43e4dcee <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 35f0c561297cfc840ddaeebb9fc61091f4eadece <nl> <nl> + HTTP / 1 . 9y 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . fefa4512a87 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 39b19c41ba537f37511eff7727733715db432e76 <nl> <nl> + HTTP / 1 . 1 000 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b967b57614d <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 3e3c4756d5e40b5aa250954cbac86b826e70a7ac <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + tHTTP / 01 . 021 : Oes , H <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 7d20266703c <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 3fb034e66ee5494a67acae1b4e6ff64ba92a2046 <nl> <nl> + HTTP / 1 . 1y 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . c59c4d22466 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 487725eb38511c79a9340bf4560a1411061fa6fa <nl> <nl> + HTTP / 01 . 021 O , H <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 49d1c8f1d2e <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 4b1f1f79a0bfa3f942479dd5f8edb59a7c257c55 <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 5f2c4dfef05 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 5028c56a5116a186b7343ff59567b47347a0796d <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + HTDP / 01 . 021 : Oes , H <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 9a15ab025fe <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 5b6292bdf009b0daecbc90b85cca30a88c36eec5 <nl> <nl> + HTTP / 1 . 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 1f14f69103f <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 657368df512ca6294b9df16adf935a3f374a8be2 <nl> <nl> + HTT <nl> + / 1 . 1 201 OKH <nl> + des <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . d4223ccf818 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 81f59a12b458ec3604035cb962165c604d1355e6 <nl> <nl> + HTTP / 1 . 1 8p ) ) MKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 99e2c48bbd4 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 8f41c50e88ee8c17ecad3d41d63d38fb12aca0b9 <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + tHTHTTP / 1 . 20TP / 01 . 020 ( : Oes , H0 OKH <nl> + <nl> + tteses <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b1927fbf63d <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / 97e4499d450c95660de86747f527e670f2012548 <nl> <nl> + HTHHTT ` TT <nl> + / 1 . 1 201 P * / OKH <nl> + des1 . 1 2T <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 837449dda35 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / aa3bbb876eafa8ad8ca4ff2eabc6dd94341d2441 <nl> <nl> + HTTP / 1 . 1 80î OH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 10905bed391 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / b04fea5c041c707db0ad9c09a81672557b52cc47 <nl> <nl> + JHTTP / 1 . 1 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 2704e4fb392 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / c55ce9995b002e88a102ae2891a71e8bacb346c8 <nl> <nl> + HTTP / 1 . 1 767 ) OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . f5cbbc69e78 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / ca5a0c00b8969310acb73d15ad0d0c602f1bd0c2 <nl> <nl> + HJHTHHTT ` TT <nl> + / 1 . 1 201 P * HHTT  / T1 / OKH <nl> + des1 . 1 2 . 1T 20T1 <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . be33d81102f <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / d4c3e4cf5d035596433c30eaabbd2b2925f4b453 <nl> <nl> + HTTP / 1 . 1 200 OKH <nl> + HTTP / 01 . 021 : Oes , H <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . ccf918751dc <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / d936dad71c129cf659097dc3db64550c4dd467f4 <nl> <nl> + HTTP ‰ / 1 . 200 OKH <nl> + tes <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 06f1a3b8002 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / fc5d4b9117ba9e87388174aee4f4970bdfe8d066 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + HH <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a17139982e7 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / corpus / response1 . txt <nl> <nl> + HTTP / 1 . 1 200 OK <nl> + test : hello <nl> + <nl> + abcd <nl> new file mode 100644 <nl> index 00000000000 . . ff960484db5 <nl> mmm / dev / null <nl> ppp b / test / core / httpcli / fuzzer . c <nl> <nl> + / * <nl> + * <nl> + * Copyright 2015 - 2016 , Google Inc . <nl> + * All rights reserved . <nl> + * <nl> + * Redistribution and use in source and binary forms , with or without <nl> + * modification , are permitted provided that the following conditions are <nl> + * met : <nl> + * <nl> + * * Redistributions of source code must retain the above copyright <nl> + * notice , this list of conditions and the following disclaimer . <nl> + * * Redistributions in binary form must reproduce the above <nl> + * copyright notice , this list of conditions and the following disclaimer <nl> + * in the documentation and / or other materials provided with the <nl> + * distribution . <nl> + * * Neither the name of Google Inc . nor the names of its <nl> + * contributors may be used to endorse or promote products derived from <nl> + * this software without specific prior written permission . <nl> + * <nl> + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + * " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + * LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + * A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + * SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + * LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + * DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + * THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + * ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + * OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + * <nl> + * / <nl> + <nl> + # include < stdint . h > <nl> + # include < string . h > <nl> + <nl> + # include < grpc / support / alloc . h > <nl> + <nl> + # include " src / core / httpcli / parser . h " <nl> + <nl> + int LLVMFuzzerTestOneInput ( const uint8_t * data , size_t size ) { <nl> + grpc_httpcli_parser parser ; <nl> + grpc_httpcli_parser_init ( & parser ) ; <nl> + gpr_slice slice = gpr_slice_from_copied_buffer ( ( const char * ) data , size ) ; <nl> + grpc_httpcli_parser_parse ( & parser , slice ) ; <nl> + grpc_httpcli_parser_eof ( & parser ) ; <nl> + gpr_slice_unref ( slice ) ; <nl> + grpc_httpcli_parser_destroy ( & parser ) ; <nl> + return 0 ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 76b12506256 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 0141fcddc9807ee093313b2256f1306fbbdc6cda <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( ? ¤ ¤ ¤ Û ¤ Ûð <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . be20eb55def <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 06995c2f3f01c7ec50547415dc324c64030b7a3e differ <nl> new file mode 100644 <nl> index 00000000000 . . 0edc9f996f6 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 06f7ce769fe07804fc842462d4be8c1aa2ba82c2 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ® € ¤ ˆ ( cc <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . af778fb8af4 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 0828169ba82152a8907f1001e3d98804397d4610 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ¤ Ûð ! ð cc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 94a69970223 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 0a10bd140c6c5fb109a0816ca061739688a6db9a differ <nl> new file mode 100644 <nl> index 00000000000 . . 229a89045a0 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 0c9996d4fef87bacd7a001e99a515b3ba3d5788f differ <nl> new file mode 100644 <nl> index 00000000000 . . 001fd0bc880 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 0d6210208831fe55951af56cdeee3d54a91a5361 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + f  ! ( ! ! i [ Ñ ! å <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 398077e307c <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 0d784965b2262df7ed7a1eb57b92a718cc76bde8 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ Ê ! ð c ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8a3ef9ea824 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 0e9196f951874edbb5ed098739ea5c8b6c0751c2 differ <nl> new file mode 100644 <nl> index 00000000000 . . 6ae0cd1d857 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 1e8befb98cbaba059d6771abd1680e19484e7723 differ <nl> new file mode 100644 <nl> index 00000000000 . . 6c900ea761b <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 311dac5092e36134d3490f98aa4207425e0ee941 <nl> @ @ - 0 , 0 + 1 @ @ <nl> +   ð [ ( ! ð [ ( ! \ ! åGý : [ ( ! !  [ ( ! åGýA ) ( ! ) í ! ¸ * ! ! ) <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . d6921932bb4 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 342ff1db70a7616b4ef76c03a42802c6702c18cb <nl> @ @ - 0 , 0 + 1 @ @ <nl> + )  : ; ! œ Ê ' ÒØ ) * ; } v ) 7IÏ ! ¤ ) ; – - M * ± äâ ! ' d * Cu   « ‘ X $ 0  ) : ó * ; : äÝ ; ; ( ) ; : ] ïæ @ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 6fdb6622b21 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 38228bf98cdb50fd3fa830ba5a9d4c7399063dff <nl> @ @ - 0 , 0 + 1 @ @ <nl> + * ¤ ® @    : ð [ ( øc ( ; þ ! ! \ ! cåGý : <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 54400a32f75 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 3a4bb427a85bdc5bf66ac71db073c99e0dc9f881 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤   ð [ ( ' ! ð ( ! \ ! åGý : ( ! ' ð [ ( ! ! ¤ [ ð ! ð cð [ ( ! ! ! åGý ! åGý ' A ) (  [ ( ' ! ! å <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 345b8b2025e <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 3aec8d9311130dfbb6584fe6e619579c21992b5f <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤  ¤ Ûð ¤ ƒ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 6d2446ab3a0 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 3c5af4d73e94d0e8ad5666b6acb340f929031e95 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 0350f5adbd0 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 3f8983e457033cc85997c356935ba9c21460e86b <nl> @ @ - 0 , 0 + 1 @ @ <nl> + . : ¤ c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 6eebe104ae1 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 4256437fc5897c0cd5d755816e4e68c7be326849 differ <nl> new file mode 100644 <nl> index 00000000000 . . da07fc4a7a9 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 471a307b81dc37459087d41532741c5c9d7ba836 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤   <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 49a5a112dd6 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 48bcce2c6487b18706ef0c609ca39c456215bac8 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 778ecf79e5d <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 4c7a034d3a3b4f29d99caf021a0e9bbb89706c2e differ <nl> new file mode 100644 <nl> index 00000000000 . . e2e8f9b231b <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 50b3f4b6aed97f442496d27f3b4315a18ba76d5f <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ; ; ? <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b26118fd841 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 51eff6fcbfe1a51ceb3f5f2140c01eea89b4313d <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ¤ Ûðð ƒ cc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 463f1a40fb0 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 5653c44a5b520bdf2bdc599b7966f1d7c44950b3 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤  <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 198c062d33a <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 5a99df42fb7bbafa2d55714ee235b1c46776b2ad <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ [ ð ! ð c ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 0d758c9c7bc <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 5bab61eb53176449e25c2c82f172b82cb13ffb9d <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ? <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a273e997885 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 65566df65e8f55428b6672cc351df414fa8f936c <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ' ð [ ( ! ! ð [ ( ! ! ! åGý ! åGýA ) ( ! ) í ! ¼ ) Ù : ; ‡ Š * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8ac429215fe <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 65bb703af35d5afb824cd68c41d7a1aeb3848d35 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + [ ð ! ð ' ( ! [ ( ! ! ! åGý ! åGýA ) ( ! ) í ! ¼ ) Ù : ; ‡ Š * <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . e9dc85e59cc <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 78176d80c1d74c4b1b820d386ae483ac4d1d92b7 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( ? ¤ › ð ! c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . ad5695bc9ad <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 7a28fc2e9c72d51d29e87eed63ed405c9779b5e1 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤  ¤ Ûðð ƒ € cc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8985a0765aa <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 7ba7239a29d6183960e3986abc8f19cfb548b905 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ! ƒ Ûððcc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a38435872b0 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 8057c32b8bd28a5ec2105d62f2abe8cf69c9f5fc <nl> @ @ - 0 , 0 + 1 @ @ <nl> + f  ! ( ! ! c ' i [ Ñ ! ð [ ( ! ! ð [ ( ! ! ! åHý ! [ ( ! ! ! åGýåA ) ( ! ) í ! ¼ * ) åGýA ) ( Ù ! ) Š <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . fc8f2aea186 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 86bae059b18af8ae263e5ae0022b67da0cfc0fbe <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ÿ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . e92f115945c <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 8762a523cdb78d2344d553fa52a229bd63c44e51 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 532ee696019 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 8fbbf3c0eaa25b64d0a97a8ee08006539e649199 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b8c71bd5cdb <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 96903512b1f1dec08206123f024b62d0e31cd4dc <nl> @ @ - 0 , 0 + 1 @ @ <nl> +  c  ð [ ( ! ð [ ( ! \ ! åGý ! [ ( ! ! ! åGýA ) ( ! ) í ! ¸ * ) åGýA ) ( Ù ; ) Š <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . eafc76571c7 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 999821e3750a7f2c9db663d2d100b4404c225040 <nl> @ @ - 0 , 0 + 1 @ @ <nl> +  [ ( ! ! ! [ ð ! å <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 168a2155962 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / 99b2ed83be40cab431d1940e8de2dc3ebfe9352f differ <nl> new file mode 100644 <nl> index 00000000000 . . ac4416657c8 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / 9e56bb3b68d2e2617cb2d2f0f3941f7fc832e462 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + c ' ð [ ( ! ! ð [ ( ! ! ! åGý ! [ ( ! ! ! åGýA ) ( ! ) í ! ¼ * ) åGýA ) ( Ù ! ) Š <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 59295acbf79 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / a871e7ce66afd4f57702cd1299de06cd08995561 differ <nl> new file mode 100644 <nl> index 00000000000 . . 71cc57ac18a <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / ac94b2788f5252f9e2e8502c7c75e04bef4c0b76 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ? <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 32171625ca8 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / af417c83e831a96fda1bdde99a1af6509ef2df3d <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( ? ¤ ¤ ¤ ÛÛð <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 326107127df <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / b0587e6e319f4b56d877e7ed46bc7da9b1e7249c <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ )  : ; ! œ Ê ' ÒØ ) * ; } v ) 7IÏ ! ¤ ) ; – - M * ± äâ ! ' d * Cu   « ‘ X $ 0  ) : ó * ; : äÝ ; ; ( ) ; : ] ïæ @ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . e93670e5ed9 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / b244c690157ff21d073940ef8c77d1898f37cf8e <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ¤ Ûðð cc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 3c7fb6e67ae <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / bcf4684ce097faa7e9d99b6e93cc2de24f57aee3 differ <nl> new file mode 100644 <nl> index 00000000000 . . fa972a4f51c <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / ccd3b8aa26c52f6d9c607c26ebdf621142aff745 differ <nl> new file mode 100644 <nl> index 00000000000 . . 81cc0fcbfb8 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / d76d0c7f24ae3cc3f530d5306b8dcc15290c7ff2 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ä — <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . a4520fbd222 <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / df01203edfa2dfe9e108ddde786ae48235624fef differ <nl> new file mode 100644 <nl> index 00000000000 . . bc92a9fe320 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / e25adf8de44f5978d00b7e8c52aee89c5cd1fe93 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ? ¤ Ûð ! ð c ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . d00c6b42967 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / e29f05162e3d96d5549f96aa4a54c868535b2847 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤  ¤ Ûðð ƒ cc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 34de7ad3564 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / e4ce52007d001806fc9368b62c124dfc56e8471c <nl> @ @ - 0 , 0 + 1 @ @ <nl> + )  : ; ! œ Ê ' ÒØ ) * ; } v - 7IÏ ! ¤ )  – - M * ± äâ ! ' d * Cu   « X $ 0  ) : ó * ; : äÝ ; ; ( ) ; : ] ïæ @ <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . b92817bb295 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / eb48ebd4d01e5623dd16ae61938b3333fab3ce78 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ ¤ ÛððÜcc ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 9c27b3888f8 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / eef2f30b5e2ecd98ebefb12d57aba8b4ad52d904 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ? ð Û ! ðcm ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 453512785b6 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / ef23911de1a27d03d2d4983ca1527e17d6a7092b <nl> @ @ - 0 , 0 + 1 @ @ <nl> + 0c ' ð [ ( ! ð [ ( ! \ ! åGý ! [ ( ! ! ! åGýA ) ( ! ) í ! ¼ * ) åGýA ) ( Ù ; ) Š <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . c3c6d7380ef <nl> Binary files / dev / null and b / test / core / transport / chttp2 / hpack_parser_corpus / ef5b7fc62a2daecf1e8f928b1fa3ebd028413a41 differ <nl> new file mode 100644 <nl> index 00000000000 . . 0926c631be9 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / efdd6824bd2456e3e408e0e84369c4fa3aa14f41 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + 0c  ð [ ( ! ð [ ( ! \ ! åGý ! [ ( ! ! ! åGýA ) ( ! ) í ! ¼ * ) åGýA ) ( Ù ; ) Š <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 9e21c0e98c9 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / efec040a5de1969df5e37e4bc50a0a8f0de341d8 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( ? ¤ : › ð ! c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 8926de37070 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / f4628084cf46f139babb886a782b4ab5977d5d2e <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( ? ¤  ; [ ( ' ¤ ð ! ( <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 669e4a82f0c <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / f7cf30724ab740918eee6e4a6b6658ae3d7706e8 <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤  c <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 0fee6875eff <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / f823828ffd2a60efee36f1de52cb0f024ac5b4bb <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ¤ Ûð ! ð c ' <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 67f84c5fbc9 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_corpus / fb15042c268625089ef6c8aa3d8a6f12d1d02c74 <nl> @ @ - 0 , 0 + 1 @ @ <nl> +   ð [ ( ! ð ( ! \ ! åGý : [ ( ! ' ð [ ( ! ! ð [ ( ! ! ! åGý ! åGýA ) (  [ ( ! ! å <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 00000000000 . . 5ebcd320f49 <nl> mmm / dev / null <nl> ppp b / test / core / transport / chttp2 / hpack_parser_fuzzer_test . c <nl> <nl> + / * <nl> + * <nl> + * Copyright 2015 - 2016 , Google Inc . <nl> + * All rights reserved . <nl> + * <nl> + * Redistribution and use in source and binary forms , with or without <nl> + * modification , are permitted provided that the following conditions are <nl> + * met : <nl> + * <nl> + * * Redistributions of source code must retain the above copyright <nl> + * notice , this list of conditions and the following disclaimer . <nl> + * * Redistributions in binary form must reproduce the above <nl> + * copyright notice , this list of conditions and the following disclaimer <nl> + * in the documentation and / or other materials provided with the <nl> + * distribution . <nl> + * * Neither the name of Google Inc . nor the names of its <nl> + * contributors may be used to endorse or promote products derived from <nl> + * this software without specific prior written permission . <nl> + * <nl> + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + * " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + * LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + * A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + * SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + * LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + * DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + * THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + * ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + * OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + * <nl> + * / <nl> + <nl> + # include < stdint . h > <nl> + # include < string . h > <nl> + <nl> + # include < grpc / grpc . h > <nl> + # include < grpc / support / alloc . h > <nl> + <nl> + # include " src / core / transport / chttp2 / hpack_parser . h " <nl> + <nl> + static void onhdr ( void * ud , grpc_mdelem * md ) { GRPC_MDELEM_UNREF ( md ) ; } <nl> + <nl> + int LLVMFuzzerTestOneInput ( const uint8_t * data , size_t size ) { <nl> + grpc_init ( ) ; <nl> + grpc_chttp2_hpack_parser parser ; <nl> + grpc_chttp2_hpack_parser_init ( & parser ) ; <nl> + parser . on_header = onhdr ; <nl> + grpc_chttp2_hpack_parser_parse ( & parser , data , data + size ) ; <nl> + grpc_chttp2_hpack_parser_destroy ( & parser ) ; <nl> + grpc_shutdown ( ) ; <nl> + return 0 ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 932b69e8ce7 <nl> mmm / dev / null <nl> ppp b / tools / fuzzer / runners / hpack_parser_fuzzer_test . sh <nl> <nl> + # ! / bin / bash <nl> + # Copyright 2016 , Google Inc . <nl> + # All rights reserved . <nl> + # <nl> + # Redistribution and use in source and binary forms , with or without <nl> + # modification , are permitted provided that the following conditions are <nl> + # met : <nl> + # <nl> + # * Redistributions of source code must retain the above copyright <nl> + # notice , this list of conditions and the following disclaimer . <nl> + # * Redistributions in binary form must reproduce the above <nl> + # copyright notice , this list of conditions and the following disclaimer <nl> + # in the documentation and / or other materials provided with the <nl> + # distribution . <nl> + # * Neither the name of Google Inc . nor the names of its <nl> + # contributors may be used to endorse or promote products derived from <nl> + # this software without specific prior written permission . <nl> + # <nl> + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + # " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + # LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + # A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + # SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + # LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + # DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + # THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + # ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + # OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + # <nl> + <nl> + bins / $ config / hpack_parser_fuzzer_test - max_total_time = 60 fuzzer_output test / core / transport / chttp2 / hpack_parser_corpus <nl> new file mode 100644 <nl> index 00000000000 . . c4b577879f1 <nl> mmm / dev / null <nl> ppp b / tools / fuzzer / runners / httpcli_fuzzer_test . sh <nl> <nl> + # ! / bin / bash <nl> + # Copyright 2016 , Google Inc . <nl> + # All rights reserved . <nl> + # <nl> + # Redistribution and use in source and binary forms , with or without <nl> + # modification , are permitted provided that the following conditions are <nl> + # met : <nl> + # <nl> + # * Redistributions of source code must retain the above copyright <nl> + # notice , this list of conditions and the following disclaimer . <nl> + # * Redistributions in binary form must reproduce the above <nl> + # copyright notice , this list of conditions and the following disclaimer <nl> + # in the documentation and / or other materials provided with the <nl> + # distribution . <nl> + # * Neither the name of Google Inc . nor the names of its <nl> + # contributors may be used to endorse or promote products derived from <nl> + # this software without specific prior written permission . <nl> + # <nl> + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + # " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + # LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + # A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + # SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + # LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + # DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + # THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + # ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + # OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + # <nl> + <nl> + bins / $ config / httpcli_fuzzer_test - max_total_time = 60 fuzzer_output test / core / httpcli / corpus <nl> new file mode 100644 <nl> index 00000000000 . . 2da8d29d892 <nl> mmm / dev / null <nl> ppp b / tools / fuzzer / runners / uri_fuzzer_test . sh <nl> <nl> + # ! / bin / bash <nl> + # Copyright 2016 , Google Inc . <nl> + # All rights reserved . <nl> + # <nl> + # Redistribution and use in source and binary forms , with or without <nl> + # modification , are permitted provided that the following conditions are <nl> + # met : <nl> + # <nl> + # * Redistributions of source code must retain the above copyright <nl> + # notice , this list of conditions and the following disclaimer . <nl> + # * Redistributions in binary form must reproduce the above <nl> + # copyright notice , this list of conditions and the following disclaimer <nl> + # in the documentation and / or other materials provided with the <nl> + # distribution . <nl> + # * Neither the name of Google Inc . nor the names of its <nl> + # contributors may be used to endorse or promote products derived from <nl> + # this software without specific prior written permission . <nl> + # <nl> + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS <nl> + # " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT <nl> + # LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR <nl> + # A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT <nl> + # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , <nl> + # SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT <nl> + # LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , <nl> + # DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY <nl> + # THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT <nl> + # ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE <nl> + # OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . <nl> + # <nl> + <nl> + bins / $ config / uri_fuzzer_test - max_total_time = 60 fuzzer_output test / core / client_config / uri_corpus <nl> mmm a / tools / run_tests / sources_and_headers . json <nl> ppp b / tools / run_tests / sources_and_headers . json <nl> <nl> " third_party " : false , <nl> " type " : " target " <nl> } , <nl> + { <nl> + " deps " : [ <nl> + " gpr " , <nl> + " gpr_test_util " , <nl> + " grpc " , <nl> + " grpc_test_util " <nl> + ] , <nl> + " headers " : [ ] , <nl> + " language " : " c " , <nl> + " name " : " hpack_parser_fuzzer_test " , <nl> + " src " : [ <nl> + " test / core / transport / chttp2 / hpack_parser_fuzzer_test . c " <nl> + ] , <nl> + " third_party " : false , <nl> + " type " : " target " <nl> + } , <nl> { <nl> " deps " : [ <nl> " gpr " , <nl> <nl> " third_party " : false , <nl> " type " : " target " <nl> } , <nl> + { <nl> + " deps " : [ <nl> + " gpr " , <nl> + " gpr_test_util " , <nl> + " grpc " , <nl> + " grpc_test_util " <nl> + ] , <nl> + " headers " : [ ] , <nl> + " language " : " c " , <nl> + " name " : " httpcli_fuzzer_test " , <nl> + " src " : [ <nl> + " test / core / httpcli / fuzzer . c " <nl> + ] , <nl> + " third_party " : false , <nl> + " type " : " target " <nl> + } , <nl> { <nl> " deps " : [ <nl> " gpr " , <nl> <nl> " third_party " : false , <nl> " type " : " target " <nl> } , <nl> + { <nl> + " deps " : [ <nl> + " gpr " , <nl> + " gpr_test_util " , <nl> + " grpc " , <nl> + " grpc_test_util " <nl> + ] , <nl> + " headers " : [ ] , <nl> + " language " : " c " , <nl> + " name " : " uri_fuzzer_test " , <nl> + " src " : [ <nl> + " test / core / client_config / uri_fuzzer_test . c " <nl> + ] , <nl> + " third_party " : false , <nl> + " type " : " target " <nl> + } , <nl> { <nl> " deps " : [ <nl> " gpr " , <nl>
Expand fuzzers
grpc/grpc
71c610d512fa349cb1d58d4500d4dd761d80b578
2016-03-18T22:57:08Z
mmm a / tensorflow / java / src / main / java / org / tensorflow / Operation . java <nl> ppp b / tensorflow / java / src / main / java / org / tensorflow / Operation . java <nl> public int numOutputs ( ) { <nl> * <nl> * @ param name identifier of the list of tensors ( of which there may <nl> * be many ) produced by this operation . <nl> - * @ returns the size of the list of Tensors produced by this named output . <nl> + * @ return the size of the list of Tensors produced by this named output . <nl> * @ throws IllegalArgumentException if this operation has no output <nl> * with the provided name . <nl> * / <nl> mmm a / tensorflow / java / src / main / java / org / tensorflow / Session . java <nl> ppp b / tensorflow / java / src / main / java / org / tensorflow / Session . java <nl> public void close ( ) { <nl> * < tt > operation_name : output_index < / tt > , in which case this method acts like { @ code <nl> * feed ( operation_name , output_index ) } . These colon - separated names are commonly used in the <nl> * { @ code SignatureDef } protocol buffer messages that are included in { @ link <nl> - * SavedModelBundle . metaGraphDef ( ) } . <nl> + * SavedModelBundle # metaGraphDef ( ) } . <nl> * / <nl> public Runner feed ( String operation , Tensor t ) { <nl> return feed ( parseOutput ( operation ) , t ) ; <nl> public Runner feed ( Output o , Tensor t ) { <nl> * < tt > operation_name : output_index < / tt > , in which case this method acts like { @ code <nl> * fetch ( operation_name , output_index ) } . These colon - separated names are commonly used in <nl> * the { @ code SignatureDef } protocol buffer messages that are included in { @ link <nl> - * SavedModelBundle . metaGraphDef ( ) } . <nl> + * SavedModelBundle # metaGraphDef ( ) } . <nl> * / <nl> public Runner fetch ( String operation ) { <nl> return fetch ( parseOutput ( operation ) ) ; <nl>
fix javadoc issues
tensorflow/tensorflow
b8138a3b3ed8d576c411fada7a2a0d37c153f4b5
2017-06-11T08:56:46Z
mmm a / lib / BasicsC / associative . c <nl> ppp b / lib / BasicsC / associative . c <nl> static void AddNewElement ( TRI_associative_array_t * array , void * element ) { <nl> <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesR + + ; <nl> + # endif <nl> } <nl> <nl> / / add a new element to the associative array <nl> static void ResizeAssociativeArray ( TRI_associative_array_t * array ) { <nl> oldAlloc = array - > _nrAlloc ; <nl> <nl> array - > _nrAlloc = 2 * array - > _nrAlloc + 1 ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrResizes + + ; <nl> + # endif <nl> <nl> array - > _table = TRI_Allocate ( array - > _memoryZone , array - > _nrAlloc * array - > _elementSize , true ) ; <nl> <nl> void TRI_InitAssociativeArray ( TRI_associative_array_t * array , <nl> } <nl> <nl> array - > _nrUsed = 0 ; <nl> + <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrFinds = 0 ; <nl> array - > _nrAdds = 0 ; <nl> array - > _nrRems = 0 ; <nl> void TRI_InitAssociativeArray ( TRI_associative_array_t * array , <nl> array - > _nrProbesA = 0 ; <nl> array - > _nrProbesD = 0 ; <nl> array - > _nrProbesR = 0 ; <nl> + # endif <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void * TRI_LookupByKeyAssociativeArray ( TRI_associative_array_t * array , void * key <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualKeyElement ( array , key , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> / / return whatever we found <nl> void * TRI_LookupByElementAssociativeArray ( TRI_associative_array_t * array , void * <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualElementElement ( array , element , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> / / return whatever we found <nl> bool TRI_InsertElementAssociativeArray ( TRI_associative_array_t * array , void * el <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualElementElement ( array , element , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> / / if we found an element , return <nl> bool TRI_InsertKeyAssociativeArray ( TRI_associative_array_t * array , void * key , v <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualKeyElement ( array , key , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> / / if we found an element , return <nl> bool TRI_RemoveElementAssociativeArray ( TRI_associative_array_t * array , void * el <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualElementElement ( array , element , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return false <nl> bool TRI_RemoveKeyAssociativeArray ( TRI_associative_array_t * array , void * key , v <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( ! array - > isEmptyElement ( array , array - > _table + i * array - > _elementSize ) <nl> & & ! array - > isEqualKeyElement ( array , key , array - > _table + i * array - > _elementSize ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return false <nl> static void AddNewElementPointer ( TRI_associative_pointer_t * array , void * elemen <nl> <nl> while ( array - > _table [ i ] ! = NULL ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesR + + ; <nl> + # endif <nl> } <nl> <nl> / / add a new element to the associative array <nl> static void ResizeAssociativePointer ( TRI_associative_pointer_t * array ) { <nl> oldAlloc = array - > _nrAlloc ; <nl> <nl> array - > _nrAlloc = 2 * array - > _nrAlloc + 1 ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrResizes + + ; <nl> + # endif <nl> <nl> array - > _table = TRI_Allocate ( array - > _memoryZone , array - > _nrAlloc * sizeof ( void * ) , true ) ; <nl> <nl> void TRI_InitAssociativePointer ( TRI_associative_pointer_t * array , <nl> } <nl> <nl> array - > _nrUsed = 0 ; <nl> + <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrFinds = 0 ; <nl> array - > _nrAdds = 0 ; <nl> array - > _nrRems = 0 ; <nl> void TRI_InitAssociativePointer ( TRI_associative_pointer_t * array , <nl> array - > _nrProbesA = 0 ; <nl> array - > _nrProbesD = 0 ; <nl> array - > _nrProbesR = 0 ; <nl> + # endif <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void * TRI_LookupByKeyAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> / / return whatever we found <nl> void * TRI_LookupByElementAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> / / return whatever we found <nl> void * TRI_InsertElementAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> old = array - > _table [ i ] ; <nl> void * TRI_InsertKeyAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> old = array - > _table [ i ] ; <nl> void * TRI_RemoveElementAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return 0 <nl> void * TRI_RemoveKeyAssociativePointer ( TRI_associative_pointer_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return false <nl> static void AddNewElementSynced ( TRI_associative_synced_t * array , void * element ) <nl> <nl> while ( array - > _table [ i ] ! = NULL ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesR + + ; <nl> + # endif <nl> } <nl> <nl> / / add a new element to the associative array <nl> static void ResizeAssociativeSynced ( TRI_associative_synced_t * array ) { <nl> oldAlloc = array - > _nrAlloc ; <nl> <nl> array - > _nrAlloc = 2 * array - > _nrAlloc + 1 ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrResizes + + ; <nl> + # endif <nl> <nl> array - > _table = TRI_Allocate ( array - > _memoryZone , array - > _nrAlloc * sizeof ( void * ) , true ) ; <nl> <nl> void TRI_InitAssociativeSynced ( TRI_associative_synced_t * array , <nl> } <nl> <nl> array - > _nrUsed = 0 ; <nl> + <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrFinds = 0 ; <nl> array - > _nrAdds = 0 ; <nl> array - > _nrRems = 0 ; <nl> void TRI_InitAssociativeSynced ( TRI_associative_synced_t * array , <nl> array - > _nrProbesA = 0 ; <nl> array - > _nrProbesD = 0 ; <nl> array - > _nrProbesR = 0 ; <nl> + # endif <nl> <nl> TRI_InitReadWriteLock ( & array - > _lock ) ; <nl> } <nl> void const * TRI_LookupByKeyAssociativeSynced ( TRI_associative_synced_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> TRI_ReadLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> result = array - > _table [ i ] ; <nl> void const * TRI_LookupByElementAssociativeSynced ( TRI_associative_synced_t * arra <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrFinds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> TRI_ReadLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesF + + ; <nl> + # endif <nl> } <nl> <nl> result = array - > _table [ i ] ; <nl> void * TRI_InsertElementAssociativeSynced ( TRI_associative_synced_t * array , <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table , TODO optimise the locks <nl> TRI_WriteLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> old = array - > _table [ i ] ; <nl> void * TRI_InsertKeyAssociativeSynced ( TRI_associative_synced_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrAdds + + ; <nl> + # endif <nl> <nl> / / search the table <nl> TRI_WriteLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesA + + ; <nl> + # endif <nl> } <nl> <nl> old = array - > _table [ i ] ; <nl> void * TRI_RemoveElementAssociativeSynced ( TRI_associative_synced_t * array , <nl> hash = array - > hashElement ( array , element ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> TRI_WriteLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualElementElement ( array , element , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return 0 <nl> void * TRI_RemoveKeyAssociativeSynced ( TRI_associative_synced_t * array , <nl> hash = array - > hashKey ( array , key ) ; <nl> i = hash % array - > _nrAlloc ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> / / update statistics <nl> array - > _nrRems + + ; <nl> + # endif <nl> <nl> / / search the table <nl> TRI_WriteLockReadWriteLock ( & array - > _lock ) ; <nl> <nl> while ( array - > _table [ i ] ! = NULL & & ! array - > isEqualKeyElement ( array , key , array - > _table [ i ] ) ) { <nl> i = ( i + 1 ) % array - > _nrAlloc ; <nl> + # ifdef TRI_INTERNAL_STATS <nl> array - > _nrProbesD + + ; <nl> + # endif <nl> } <nl> <nl> / / if we did not find such an item return false <nl> mmm a / lib / BasicsC / associative . h <nl> ppp b / lib / BasicsC / associative . h <nl> typedef struct TRI_associative_array_s { <nl> <nl> char * _table ; / / the table itself <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> uint64_t _nrFinds ; / / statistics : number of lookup calls <nl> uint64_t _nrAdds ; / / statistics : number of insert calls <nl> uint64_t _nrRems ; / / statistics : number of remove calls <nl> typedef struct TRI_associative_array_s { <nl> uint64_t _nrProbesA ; / / statistics : number of misses while inserting <nl> uint64_t _nrProbesD ; / / statistics : number of misses while removing <nl> uint64_t _nrProbesR ; / / statistics : number of misses while adding <nl> + # endif <nl> <nl> TRI_memory_zone_t * _memoryZone ; <nl> } <nl> typedef struct TRI_associative_pointer_s { <nl> <nl> void * * _table ; / / the table itself <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> uint64_t _nrFinds ; / / statistics : number of lookup calls <nl> uint64_t _nrAdds ; / / statistics : number of insert calls <nl> uint64_t _nrRems ; / / statistics : number of remove calls <nl> typedef struct TRI_associative_pointer_s { <nl> uint64_t _nrProbesA ; / / statistics : number of misses while inserting <nl> uint64_t _nrProbesD ; / / statistics : number of misses while removing <nl> uint64_t _nrProbesR ; / / statistics : number of misses while adding <nl> + # endif <nl> <nl> TRI_memory_zone_t * _memoryZone ; <nl> } <nl> typedef struct TRI_associative_synced_s { <nl> <nl> TRI_read_write_lock_t _lock ; <nl> <nl> + # ifdef TRI_INTERNAL_STATS <nl> uint64_t _nrFinds ; / / statistics : number of lookup calls <nl> uint64_t _nrAdds ; / / statistics : number of insert calls <nl> uint64_t _nrRems ; / / statistics : number of remove calls <nl> typedef struct TRI_associative_synced_s { <nl> uint64_t _nrProbesA ; / / statistics : number of misses while inserting <nl> uint64_t _nrProbesD ; / / statistics : number of misses while removing <nl> uint64_t _nrProbesR ; / / statistics : number of misses while adding <nl> + # endif <nl> <nl> TRI_memory_zone_t * _memoryZone ; <nl> } <nl> mmm a / lib / BasicsC / vector . c <nl> ppp b / lib / BasicsC / vector . c <nl> void TRI_InitVector ( TRI_vector_t * vector , TRI_memory_zone_t * zone , size_t eleme <nl> vector - > _growthFactor = GROW_FACTOR ; <nl> } <nl> <nl> - int TRI_InitVector2 ( TRI_vector_t * vector , TRI_memory_zone_t * zone , size_t elementSize , <nl> - size_t initialCapacity , double growthFactor ) { <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief initialises a vector , with user - definable settings <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + int TRI_InitVector2 ( TRI_vector_t * vector , <nl> + TRI_memory_zone_t * zone , <nl> + size_t elementSize , <nl> + size_t initialCapacity , <nl> + double growthFactor ) { <nl> vector - > _memoryZone = zone ; <nl> vector - > _elementSize = elementSize ; <nl> vector - > _buffer = NULL ; <nl> mmm a / lib / BasicsC / vector . h <nl> ppp b / lib / BasicsC / vector . h <nl> TRI_vector_t ; <nl> <nl> void TRI_InitVector ( TRI_vector_t * , TRI_memory_zone_t * , size_t elementSize ) ; <nl> <nl> - int TRI_InitVector2 ( TRI_vector_t * , TRI_memory_zone_t * , size_t elementSize , <nl> - size_t initialCapacity , double growthFactor ) ; <nl> + int TRI_InitVector2 ( TRI_vector_t * , <nl> + TRI_memory_zone_t * , <nl> + size_t elementSize , <nl> + size_t initialCapacity , <nl> + double growthFactor ) ; <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief destroys a vector , but does not free the pointer <nl>
disable internal statistics by default
arangodb/arangodb
f0b615ab9ee8c6093f239416eede9a96b8961b59
2012-10-16T15:43:11Z
mmm a / Telegram / SourceFiles / history / history_item . cpp <nl> ppp b / Telegram / SourceFiles / history / history_item . cpp <nl> bool HistoryItem : : canDeleteForEveryone ( TimeId now ) const { <nl> return false ; <nl> } <nl> } <nl> - if ( ! peer - > isUser ( ) & & ! toHistoryMessage ( ) ) { <nl> - return false ; <nl> - } else if ( const auto media = this - > media ( ) ) { <nl> + if ( const auto media = this - > media ( ) ) { <nl> if ( ! media - > allowsRevoke ( now ) ) { <nl> return false ; <nl> } <nl>
Allow in groups to delete service messages for everyone .
telegramdesktop/tdesktop
0299ba48737dddc99b8d04ed752f8510d85ec2bd
2020-10-01T15:21:39Z
mmm a / caffe2 / core / common_gpu . h <nl> ppp b / caffe2 / core / common_gpu . h <nl> <nl> / / CAFFE_HAS_CUDA_FP16 manually . <nl> <nl> # ifndef CAFFE_HAS_CUDA_FP16 <nl> - # if CUDA_VERSION > = 7050 <nl> + # if CUDA_VERSION > = 7050 | | defined ( __HIP_PLATFORM_HCC__ ) <nl> # define CAFFE_HAS_CUDA_FP16 <nl> # endif / / CUDA_VERSION > = 7050 <nl> # endif / / CAFFE_HAS_CUDA_FP16 <nl> <nl> # include < cuda_fp16 . h > <nl> # endif <nl> <nl> + / / cuda major revision number below which fp16 compute is not supoorted <nl> + # ifndef __HIP_PLATFORM_HCC__ <nl> + constexpr int kFp16CUDADevicePropMajor = 6 ; <nl> + # else <nl> + constexpr int kFp16CUDADevicePropMajor = 3 ; <nl> + # endif <nl> + <nl> / / Re - enable strict aliasing diagnostic if it was disabled . <nl> # if CUDA_VERSION > = 9000 <nl> # ifdef __GNUC__ <nl> mmm a / caffe2 / operators / fully_connected_op_gpu . cc <nl> ppp b / caffe2 / operators / fully_connected_op_gpu . cc <nl> namespace caffe2 { <nl> <nl> namespace { <nl> <nl> - constexpr int kFp16CUDADevicePropMajor = 6 ; <nl> - <nl> template < class FullyConnectedOp > <nl> bool RunFullyConnectedOpOnCUDADevice ( <nl> const bool float16_compute , <nl> mmm a / caffe2 / python / operator_test / fc_operator_test . py <nl> ppp b / caffe2 / python / operator_test / fc_operator_test . py <nl> <nl> class TestFcOperator ( serial . SerializedTestCase ) : <nl> def _run_test ( self , n , m , k , transposed , multi_dim , dtype , engine , gc , dc ) : <nl> if dtype = = np . float16 : <nl> - # fp16 only supported with CUDA <nl> - assume ( gc . device_type = = caffe2_pb2 . CUDA ) <nl> - dc = [ d for d in dc if d . device_type = = caffe2_pb2 . CUDA ] <nl> + # fp16 only supported with CUDA / HIP <nl> + assume ( core . IsGPUDeviceType ( gc . device_type ) ) <nl> + dc = [ d for d in dc if core . IsGPUDeviceType ( d . device_type ) ] <nl> <nl> if engine = = ' TENSORCORE ' : <nl> # TensorCore only makes sense with CUDA <nl> def fc_tranposed_op ( X , W , b ) : <nl> engine = engine , <nl> ) <nl> <nl> - if dtype = = np . float16 and gc . device_type = = caffe2_pb2 . CUDA : <nl> + if dtype = = np . float16 and core . IsGPUDeviceType ( gc . device_type ) : <nl> a = caffe2_pb2 . Argument ( ) <nl> a . i = 1 <nl> a . name = " float16_compute " <nl> op . arg . extend ( [ a ] ) <nl> <nl> # Check against numpy reference <nl> + # ReferenceChecks is flaky on rocm with threshold of 1e - 4 for fp16 . Relaxing to 1e - 3 . <nl> + threshold = 1e - 3 if ( gc . device_type = = caffe2_pb2 . HIP and dtype = = np . float16 ) else 1e - 4 <nl> self . assertReferenceChecks ( <nl> device_option = gc , <nl> op = op , <nl> inputs = [ X , W , b ] , <nl> reference = fc_tranposed_op if transposed else fc_op , <nl> + threshold = threshold <nl> ) <nl> # Check over multiple devices <nl> self . assertDeviceChecks ( dc , op , [ X , W , b ] , [ 0 ] ) <nl> mmm a / caffe2 / python / operator_test / matmul_op_test . py <nl> ppp b / caffe2 / python / operator_test / matmul_op_test . py <nl> class TestBatchMatMul ( serial . SerializedTestCase ) : <nl> ) <nl> def test_batch_matmul ( self , C , M , K , N , trans_a , trans_b , dtype , gc , dc ) : <nl> if dtype = = np . float16 : <nl> - # fp16 is only supported with CUDA <nl> - assume ( gc . device_type = = caffe2_pb2 . CUDA ) <nl> - dc = [ d for d in dc if d . device_type = = caffe2_pb2 . CUDA ] <nl> + # fp16 is only supported with CUDA / HIP <nl> + assume ( core . IsGPUDeviceType ( gc . device_type ) ) <nl> + dc = [ d for d in dc if core . IsGPUDeviceType ( d . device_type ) ] <nl> <nl> batch_dims = np . random . randint ( <nl> low = 1 , <nl> mmm a / caffe2 / python / operator_test / momentum_sgd_test . py <nl> ppp b / caffe2 / python / operator_test / momentum_sgd_test . py <nl> <nl> import caffe2 . python . hypothesis_test_util as hu <nl> import caffe2 . python . serialized_test . serialized_test_util as serial <nl> <nl> - import hypothesis <nl> - from hypothesis import given <nl> + from hypothesis import given , assume <nl> import hypothesis . strategies as st <nl> import numpy as np <nl> import unittest <nl> def test_sparse_momentum_sgd ( <nl> ) <nl> <nl> # Verify that the generated indices are unique <nl> - hypothesis . assume ( <nl> + assume ( <nl> np . array_equal ( <nl> np . unique ( indices . flatten ( ) ) , <nl> np . sort ( indices . flatten ( ) ) ) ) <nl> def sparse ( grad , m , lr , param , i ) : <nl> [ grad , m , lr , w , indices ] , <nl> sparse ) <nl> <nl> - @ given ( n = st . integers ( 4 , 8 ) , nesterov = st . booleans ( ) , * * hu . gcs_gpu_only ) <nl> - @ unittest . skipIf ( not workspace . has_gpu_support , " No gpu support . " ) <nl> + @ unittest . skipIf ( not workspace . has_gpu_support and not workspace . has_hip_support , " No gpu support . " ) <nl> + @ given ( n = st . integers ( 4 , 8 ) , nesterov = st . booleans ( ) , * * hu . gcs ) <nl> def test_fp16momentum_sgd ( self , n , nesterov , gc , dc ) : <nl> + assume ( core . IsGPUDeviceType ( gc . device_type ) ) <nl> gpuvers = workspace . GetDeviceProperties ( 0 ) [ " major " ] <nl> if gpuvers < 6 : <nl> print ( " No FP16 support because major version { } < 6 " . format ( gpuvers ) ) <nl> mmm a / caffe2 / sgd / fp16_momentum_sgd_op . cu <nl> ppp b / caffe2 / sgd / fp16_momentum_sgd_op . cu <nl> void fp16_momentum_sgd_update < CUDAContext > ( <nl> at : : Half * param , <nl> CUDAContext * context ) { <nl> const cudaDeviceProp & prop = GetDeviceProperty ( 0 ) ; <nl> - if ( prop . major > = 6 ) { <nl> + if ( prop . major > = kFp16CUDADevicePropMajor ) { <nl> if ( ! fp32_update ) { <nl> FP16MomentumSGDKernel < < < <nl> CAFFE_GET_BLOCKS ( N / 2 ) , <nl> mmm a / caffe2 / utils / math_gpu . cu <nl> ppp b / caffe2 / utils / math_gpu . cu <nl> <nl> # define FIXED_DIVISOR_DIV_MOD ( d , n , q , r ) ( d . DivMod ( n , q , r ) ) <nl> # endif / / __HIP_PLATFORM_HCC__ <nl> <nl> + # ifdef __HIP_PLATFORM_HCC__ <nl> + using CUBLAS_HALF_TYPE = rocblas_half ; <nl> + # else / / __HIP_PLATFORM_HCC <nl> + using CUBLAS_HALF_TYPE = __half ; <nl> + # endif / / __HIP_PLATFORM_HCC <nl> + <nl> # include " caffe2 / utils / math_utils . h " <nl> <nl> # if THRUST_VERSION > = 100800 <nl> CAFFE2_CUDA_EXPORT void Gemm < at : : Half , CUDAContext > ( <nl> at : : Half * C , <nl> CUDAContext * context , <nl> TensorProto : : DataType math_type ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> - # else <nl> / / Note that cublas follows fortran order , so the order is different from <nl> / / the cblas convention . <nl> const int lda = ( trans_A = = CblasNoTrans ) ? K : M ; <nl> CAFFE2_CUDA_EXPORT void Gemm < at : : Half , CUDAContext > ( <nl> if ( math_type = = TensorProto_DataType_FLOAT ) { <nl> CUBLAS_ENFORCE ( cublasSetPointerMode ( <nl> context - > cublas_handle ( ) , CUBLAS_POINTER_MODE_HOST ) ) ; <nl> + # ifdef __HIP_PLATFORM_HCC__ <nl> + / / rocblas doesn ' t support cublasSgemmEx type API yet . <nl> + / / It has more general rocblas_gemm_ex API which is more close to cublasGemmEx <nl> + / / rocblas_gemm_ex does D = alpha * op ( A ) * op ( B ) + beta * C , whereas <nl> + / / cublasgemmEx does C = alpha * op ( A ) * op ( B ) + beta * C <nl> + ROCBLAS_ENFORCE ( rocblas_gemm_ex ( <nl> + context - > rocblashandle ( ) , <nl> + cu_trans_B , <nl> + cu_trans_A , <nl> + N , <nl> + M , <nl> + K , <nl> + & alpha , <nl> + B , <nl> + rocblas_datatype_f16_r , <nl> + ldb , <nl> + A , <nl> + rocblas_datatype_f16_r , <nl> + lda , <nl> + & beta , <nl> + C , <nl> + rocblas_datatype_f16_r , <nl> + N , <nl> + C , / / D <nl> + rocblas_datatype_f16_r , / / D type <nl> + N , / / ldd <nl> + rocblas_datatype_f32_r , / / compute type <nl> + rocblas_gemm_algo_standard , / / rocblas_gemm_algo <nl> + 0 , / / solution index , reserved for future use <nl> + 0 , / / flags , reserved for future use <nl> + NULL , / / size of workspace <nl> + NULL ) ) ; / / workspace <nl> + # else <nl> CUBLAS_ENFORCE ( cublasSgemmEx ( <nl> context - > cublas_handle ( ) , <nl> cu_trans_B , <nl> CAFFE2_CUDA_EXPORT void Gemm < at : : Half , CUDAContext > ( <nl> C , <nl> CUDA_R_16F , <nl> N ) ) ; <nl> + # endif / / __HIP_PLATFORM_HCC__ <nl> } else if ( math_type = = TensorProto_DataType_FLOAT16 ) { <nl> / / convert alpha , beta from float - > __half <nl> const __half alpha_fp16 = at : : Half ( alpha ) ; <nl> CAFFE2_CUDA_EXPORT void Gemm < at : : Half , CUDAContext > ( <nl> N , <nl> M , <nl> K , <nl> - & alpha_fp16 , <nl> - ( const __half * ) B , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & alpha_fp16 ) , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( B ) , <nl> ldb , <nl> - ( const __half * ) A , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( A ) , <nl> lda , <nl> - & beta_fp16 , <nl> - ( __half * ) C , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & beta_fp16 ) , <nl> + reinterpret_cast < CUBLAS_HALF_TYPE * > ( C ) , <nl> N ) ) ; <nl> } else { <nl> / / fail <nl> CAFFE_THROW ( " Unsupported math type " ) ; <nl> } <nl> - # endif <nl> } <nl> <nl> template < > <nl> CAFFE2_CUDA_EXPORT void GemmBatched < at : : Half , CUDAContext > ( <nl> at : : Half * * C , <nl> CUDAContext * context , <nl> TensorProto : : DataType math_type ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> - # else <nl> # if __CUDACC_VER_MAJOR__ < 9 <nl> / / loop over matrices in the batch <nl> for ( int i = 0 ; i < batch_size ; + + i ) { <nl> CAFFE2_CUDA_EXPORT void GemmBatched < at : : Half , CUDAContext > ( <nl> CAFFE_THROW ( " Unsupported math type " ) ; <nl> } <nl> # endif <nl> - # endif <nl> } <nl> <nl> template < > <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> const int C_stride , <nl> CUDAContext * context , <nl> TensorProto : : DataType math_type ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> - # else <nl> - # if __CUDACC_VER_MAJOR__ < 8 <nl> + # if __CUDACC_VER_MAJOR__ < 8 & & ! defined ( __HIP_PLATFORM_HCC__ ) <nl> / / loop over matrices in the batch <nl> for ( int i = 0 ; i < batch_size ; + + i ) { <nl> Gemm < at : : Half , CUDAContext > ( <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> const cublasOperation_t cu_trans_B = <nl> ( trans_B = = CblasNoTrans ) ? CUBLAS_OP_N : CUBLAS_OP_T ; <nl> if ( math_type = = TensorProto_DataType_FLOAT ) { <nl> - # if CUDA_VERSION < 9010 <nl> + # if CUDA_VERSION < 9010 & & ! defined ( __HIP_PLATFORM_HCC__ ) <nl> / / loop over matrices in the batch <nl> for ( int i = 0 ; i < batch_size ; + + i ) { <nl> Gemm < at : : Half , CUDAContext > ( <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> # else <nl> CUBLAS_ENFORCE ( cublasSetPointerMode ( <nl> context - > cublas_handle ( ) , CUBLAS_POINTER_MODE_HOST ) ) ; <nl> + # ifdef __HIP_PLATFORM_HCC__ <nl> + / / D [ i * stride_d ] = alpha * op ( A [ i * stride_a ] ) * op ( B [ i * stride_b ] ) + beta * C [ i * stride_c ] , <nl> + / / for i in [ 0 , batch_count - 1 ] <nl> + ROCBLAS_ENFORCE ( rocblas_gemm_strided_batched_ex ( <nl> + context - > rocblashandle ( ) , <nl> + cu_trans_B , <nl> + cu_trans_A , <nl> + N , <nl> + M , <nl> + K , <nl> + & alpha , <nl> + B , <nl> + rocblas_datatype_f16_r , <nl> + ldb , <nl> + B_stride , <nl> + A , <nl> + rocblas_datatype_f16_r , <nl> + lda , <nl> + A_stride , <nl> + & beta , <nl> + C , <nl> + rocblas_datatype_f16_r , <nl> + ldc , <nl> + C_stride , <nl> + C , / / D <nl> + rocblas_datatype_f16_r , / / D type <nl> + ldc , / / ldd <nl> + C_stride , / / D stride <nl> + batch_size , <nl> + rocblas_datatype_f32_r , / / compute type <nl> + rocblas_gemm_algo_standard , / / rocblas_gemm_algo <nl> + 0 , / / solution index , reserved for future use <nl> + 0 , / / flags , reserved for future use <nl> + NULL , / / size of workspace <nl> + NULL ) ) ; / / workspace <nl> + # else <nl> CUBLAS_ENFORCE ( cublasGemmStridedBatchedEx ( <nl> context - > cublas_handle ( ) , <nl> cu_trans_B , <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> batch_size , <nl> CUDA_R_32F , <nl> CUBLAS_GEMM_DEFAULT_TENSOR_OP ) ) ; <nl> + # endif / / __HIP_PLATFORM_HCC__ <nl> # endif <nl> } else if ( math_type = = TensorProto_DataType_FLOAT16 ) { <nl> / / Convert alpha , beta from float - > __half <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> N , <nl> M , <nl> K , <nl> - & alpha_fp16 , <nl> - ( const __half * ) B , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & alpha_fp16 ) , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( B ) , <nl> ldb , <nl> B_stride , <nl> - ( const __half * ) A , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( A ) , <nl> lda , <nl> A_stride , <nl> - & beta_fp16 , <nl> - ( __half * ) C , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & beta_fp16 ) , <nl> + reinterpret_cast < CUBLAS_HALF_TYPE * > ( C ) , <nl> ldc , <nl> C_stride , <nl> batch_size ) ) ; <nl> CAFFE2_CUDA_EXPORT void GemmStridedBatched < at : : Half , CUDAContext > ( <nl> CAFFE_THROW ( " Unsupported math type " ) ; <nl> } <nl> # endif <nl> - # endif <nl> } <nl> <nl> # if CUDA_VERSION > = 9000 <nl> CAFFE2_CUDA_EXPORT void Gemv < at : : Half , CUDAContext > ( <nl> at : : Half * y , <nl> CUDAContext * context , <nl> TensorProto : : DataType math_type ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> - # else <nl> const cublasOperation_t cu_trans_A = <nl> ( trans_A = = CblasNoTrans ) ? CUBLAS_OP_T : CUBLAS_OP_N ; <nl> <nl> CAFFE2_CUDA_EXPORT void Gemv < at : : Half , CUDAContext > ( <nl> if ( math_type = = TensorProto_DataType_FLOAT ) { <nl> CUBLAS_ENFORCE ( cublasSetPointerMode ( <nl> context - > cublas_handle ( ) , CUBLAS_POINTER_MODE_HOST ) ) ; <nl> + # ifdef __HIP_PLATFORM_HCC__ <nl> + / / rocblas doesn ' t support cublasSgemmEx type API yet . <nl> + / / It has more general rocblas_gemm_ex API which is more close to cublasGemmEx <nl> + / / rocblas_gemm_ex does D = alpha * op ( A ) * op ( B ) + beta * C , whereas <nl> + / / cublasgemmEx does C = alpha * op ( A ) * op ( B ) + beta * C <nl> + ROCBLAS_ENFORCE ( rocblas_gemm_ex ( <nl> + context - > rocblashandle ( ) , <nl> + cu_trans_A , <nl> + rocblas_operation_none , <nl> + m , <nl> + 1 , <nl> + k , <nl> + & alpha , <nl> + A , <nl> + rocblas_datatype_f16_r , <nl> + lda , <nl> + x , <nl> + rocblas_datatype_f16_r , <nl> + k , <nl> + & beta , <nl> + y , <nl> + rocblas_datatype_f16_r , <nl> + ldc , <nl> + y , / / D <nl> + rocblas_datatype_f16_r , / / D type <nl> + ldc , / / ldd <nl> + rocblas_datatype_f32_r , / / compute type <nl> + rocblas_gemm_algo_standard , / / rocblas_gemm_algo <nl> + 0 , / / solution index , reserved for future use <nl> + 0 , / / flags , reserved for future use <nl> + NULL , / / size of workspace <nl> + NULL ) ) ; / / workspace <nl> + # else <nl> CUBLAS_ENFORCE ( cublasSgemmEx ( <nl> context - > cublas_handle ( ) , <nl> cu_trans_A , <nl> CAFFE2_CUDA_EXPORT void Gemv < at : : Half , CUDAContext > ( <nl> y , <nl> CUDA_R_16F , <nl> ldc ) ) ; <nl> + # endif / / __HIP_PLATFORM_HCC__ <nl> } else if ( math_type = = TensorProto_DataType_FLOAT16 ) { <nl> const __half alpha_fp16 = at : : Half ( alpha ) ; <nl> const __half beta_fp16 = at : : Half ( beta ) ; <nl> CAFFE2_CUDA_EXPORT void Gemv < at : : Half , CUDAContext > ( <nl> m , <nl> 1 , <nl> k , <nl> - & alpha_fp16 , <nl> - ( const __half * ) A , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & alpha_fp16 ) , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( A ) , <nl> lda , <nl> - ( const __half * ) x , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( x ) , <nl> k , <nl> - & beta_fp16 , <nl> - ( __half * ) y , <nl> + reinterpret_cast < const CUBLAS_HALF_TYPE * > ( & beta_fp16 ) , <nl> + reinterpret_cast < CUBLAS_HALF_TYPE * > ( y ) , <nl> ldc ) ) ; <nl> } else { <nl> / / fail <nl> CAFFE_THROW ( " Unsupported math type " ) ; <nl> } <nl> - # endif <nl> } <nl> <nl> namespace { <nl> CAFFE2_CUDA_EXPORT void Dot < at : : Half , CUDAContext > ( <nl> const at : : Half * b , <nl> at : : Half * y , <nl> CUDAContext * context ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> + # if defined ( __HIP_PLATFORM_HCC__ ) <nl> + CAFFE_THROW ( " HIP currently does not support FP16 completely yet . " ) ; <nl> # else <nl> / / execute with 32 - bit math <nl> CUBLAS_ENFORCE ( cublasSetPointerMode ( <nl> CAFFE2_CUDA_EXPORT void Axpy < at : : Half , CUDAContext > ( <nl> const at : : Half * X , <nl> at : : Half * Y , <nl> CUDAContext * context ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> + # if defined ( __HIP_PLATFORM_HCC__ ) <nl> + CAFFE_THROW ( " HIP currently does not support FP16 completely yet . " ) ; <nl> # else <nl> CUBLAS_ENFORCE ( <nl> cublasSetPointerMode ( context - > cublas_handle ( ) , CUBLAS_POINTER_MODE_HOST ) ) ; <nl> CAFFE2_CUDA_EXPORT void Axpy < at : : Half , CUDAContext > ( <nl> const at : : Half * X , <nl> at : : Half * Y , <nl> CUDAContext * context ) { <nl> - # if defined ( __HIP_PLATFORM_HCC__ ) & & ! ROCBLAS_FP16 <nl> - CAFFE_THROW ( " HIP currently does not support FP16 yet . " ) ; <nl> + # if defined ( __HIP_PLATFORM_HCC__ ) <nl> + CAFFE_THROW ( " HIP currently does not support FP16 completely yet . " ) ; <nl> # else <nl> CUBLAS_ENFORCE ( cublasSetPointerMode ( <nl> context - > cublas_handle ( ) , CUBLAS_POINTER_MODE_DEVICE ) ) ; <nl> mmm a / cmake / Dependencies . cmake <nl> ppp b / cmake / Dependencies . cmake <nl> if ( USE_ROCM ) <nl> list ( APPEND HIP_CXX_FLAGS - Wno - unused - command - line - argument ) <nl> list ( APPEND HIP_CXX_FLAGS - Wno - duplicate - decl - specifier ) <nl> list ( APPEND HIP_CXX_FLAGS - DCAFFE2_USE_MIOPEN ) <nl> - list ( APPEND HIP_CXX_FLAGS - DROCBLAS_FP16 = 0 ) <nl> <nl> set ( HIP_HCC_FLAGS $ { HIP_CXX_FLAGS } ) <nl> # Ask hcc to generate device code during compilation so we can use <nl> mmm a / tools / amd_build / pyHIPIFY / cuda_to_hip_mappings . py <nl> ppp b / tools / amd_build / pyHIPIFY / cuda_to_hip_mappings . py <nl> <nl> ( " cublasCgemmStridedBatched " , ( " rocblas_cgemm_strided_batched " , CONV_MATH_FUNC , API_BLAS , HIP_UNSUPPORTED ) ) , <nl> ( " cublasCgemm3mStridedBatched " , ( " rocblas_cgemm_3m_strided_batched " , CONV_MATH_FUNC , API_BLAS , HIP_UNSUPPORTED ) ) , <nl> ( " cublasZgemmStridedBatched " , ( " rocblas_zgemm_strided_batched " , CONV_MATH_FUNC , API_BLAS , HIP_UNSUPPORTED ) ) , <nl> - ( " cublasHgemmStridedBatched " , ( " rocblas_hgemm_batched " , CONV_MATH_FUNC , API_BLAS , HIP_UNSUPPORTED ) ) , <nl> + ( " cublasHgemmStridedBatched " , ( " rocblas_hgemm_strided_batched " , CONV_MATH_FUNC , API_BLAS , HIP_UNSUPPORTED ) ) , <nl> ( " cublasSgemm " , ( " rocblas_sgemm " , CONV_MATH_FUNC , API_BLAS ) ) , <nl> ( " cublasDgemm " , ( " rocblas_dgemm " , CONV_MATH_FUNC , API_BLAS ) ) , <nl> ( " cublasCgemm " , ( " rocblas_cgemm " , CONV_MATH_FUNC , API_BLAS ) ) , <nl>
Integrate rocBLAS fp16 api into Caffe2 ( )
pytorch/pytorch
7e2b074219fad6d2b09b379423e83b2295b29df2
2018-12-11T01:54:06Z
mmm a / android / playground / app / build . gradle <nl> ppp b / android / playground / app / build . gradle <nl> android { <nl> minSdkVersion project . minSdkVersion <nl> targetSdkVersion project . targetSdkVersion <nl> versionCode 17 <nl> - versionName " 0 . 19 . 0 " <nl> + versionName " 0 . 20 . 0 " <nl> testInstrumentationRunner " android . support . test . runner . AndroidJUnitRunner " <nl> multiDexEnabled true <nl> ndk { <nl> mmm a / android / sdk / build . gradle <nl> ppp b / android / sdk / build . gradle <nl> checkstyle { <nl> } <nl> <nl> <nl> - version = " 0 . 19 . 0 . 4 " <nl> + version = " 0 . 20 . 0 " <nl> <nl> android { <nl> <nl> mmm a / ios / sdk / WeexSDK / Sources / Utility / WXDefine . h <nl> ppp b / ios / sdk / WeexSDK / Sources / Utility / WXDefine . h <nl> <nl> * to you under the Apache License , Version 2 . 0 ( the <nl> * " License " ) ; you may not use this file except in compliance <nl> * with the License . You may obtain a copy of the License at <nl> - * <nl> + * <nl> * http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> - * <nl> + * <nl> * Unless required by applicable law or agreed to in writing , <nl> * software distributed under the License is distributed on an <nl> * " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY <nl> <nl> # ifndef __WX_DEFINE_H__ <nl> # define __WX_DEFINE_H__ <nl> <nl> - # define WX_SDK_VERSION @ " 0 . 19 . 0 " <nl> + # define WX_SDK_VERSION @ " 0 . 20 . 0 " <nl> <nl> # if defined ( __cplusplus ) <nl> # define WX_EXTERN extern " C " __attribute__ ( ( visibility ( " default " ) ) ) <nl> parts = [ parts subarrayWithRange : ( NSRange ) { 0 , parts . count - 1 } ] ; \ <nl> # endif <nl> <nl> / * * <nl> - * @ abstract Compared with system version of current device <nl> - * <nl> + * @ abstract Compared with system version of current device <nl> + * <nl> * @ return YES if greater than or equal to the system verison , otherwise , NO . <nl> * <nl> * / <nl>
[ Release ] Roll version number to 0 . 20 . 0
apache/incubator-weex
3b67c87aca09aa76d74881002e52d84877b795cc
2018-11-13T07:26:26Z
mmm a / client / README . md <nl> ppp b / client / README . md <nl> Create a symbolic link in the Tensorflow Serving checkout to the deepspeech clie <nl> <nl> ` ` ` <nl> cd serving <nl> - ln - s . . / DeepSpeech / deepspeech_client . / <nl> + ln - s . . / DeepSpeech / client . / deepspeech_client <nl> ` ` ` <nl> <nl> If you haven ' t already , you ' ll need to build the Tensorflow Server . <nl> new file mode 100644 <nl> index 000000000 . . 800329205 <nl> mmm / dev / null <nl> ppp b / demos / BUILD <nl> <nl> + # Description : Deepspeech Serving Client . <nl> + <nl> + load ( " / / tensorflow_serving : serving . bzl " , " serving_proto_library " ) <nl> + <nl> + py_binary ( <nl> + name = " deepspeech_demos " , <nl> + srcs = [ <nl> + " deepspeech_demos . py " , <nl> + ] , <nl> + deps = [ <nl> + " / / tensorflow_serving / apis : predict_proto_py_pb2 " , <nl> + " / / tensorflow_serving / apis : prediction_service_proto_py_pb2 " , <nl> + " @ org_tensorflow / / tensorflow : tensorflow_py " , <nl> + ] , <nl> + ) <nl> new file mode 100644 <nl> index 000000000 . . f7bf3749b <nl> mmm / dev / null <nl> ppp b / demos / README . md <nl> <nl> + # DeepSpeech demos <nl> + <nl> + A collection of demos for an exported DeepSpeech model . <nl> + <nl> + # # Requirements <nl> + <nl> + * [ Tensorflow Serving ] ( https : / / tensorflow . github . io / serving / setup ) <nl> + * [ python - websocket - server ] ( https : / / github . com / Pithikos / python - websocket - server ) <nl> + * [ pyaudio ] ( https : / / people . csail . mit . edu / hubert / pyaudio / ) <nl> + <nl> + # # Building <nl> + <nl> + Create a symbolic link in the Tensorflow Serving checkout to the deepspeech demos directory . <nl> + <nl> + ` ` ` <nl> + cd serving <nl> + ln - s . . / DeepSpeech / demos . / deepspeech_demos <nl> + ` ` ` <nl> + <nl> + If you haven ' t already , you ' ll need to build the Tensorflow Server . <nl> + <nl> + ` ` ` <nl> + bazel build - c opt / / tensorflow_serving / model_servers : tensorflow_model_server <nl> + ` ` ` <nl> + <nl> + Then you can build the DeepSpeech demos binary . <nl> + <nl> + ` ` ` <nl> + bazel build - c opt / / deepspeech_demos <nl> + ` ` ` <nl> + <nl> + # # Running <nl> + <nl> + Start a server running an exported DeepSpeech model . <nl> + <nl> + ` ` ` <nl> + bazel - bin / tensorflow_serving / model_servers / tensorflow_model_server - - port = 9000 - - model_name = deepspeech - - model_base_path = / path / to / deepspeech / export <nl> + ` ` ` <nl> + <nl> + Run the demos binary , from the demos directory . <nl> + <nl> + ` ` ` <nl> + / path / to / tensorflow / serving / bazel - bin / deepspeech_demos / deepspeech_demos - - server = localhost : 9000 <nl> + ` ` ` <nl> + <nl> + Now navigate to http : / / localhost : 8080 in a web browser . <nl> new file mode 100644 <nl> index 000000000 . . 6a0a180a0 <nl> mmm / dev / null <nl> ppp b / demos / deepspeech_demos . py <nl> <nl> + # ! / usr / bin / env python2 . 7 <nl> + <nl> + " " " A client that talks to tensorflow_model_server loaded with deepspeech model . <nl> + <nl> + The client launches a local web server , by default accessible at localhost : 8080 <nl> + that provides a user interface to demonstrations of the DeepSpeech model . <nl> + <nl> + Typical usage example : <nl> + <nl> + deepspeech_demos . py - - server = localhost : 9000 <nl> + " " " <nl> + <nl> + import os <nl> + import sys <nl> + import time <nl> + import wave <nl> + import base64 <nl> + import signal <nl> + import socket <nl> + import hashlib <nl> + import pyaudio <nl> + import StringIO <nl> + import threading <nl> + import webrtcvad <nl> + import numpy as np <nl> + import SocketServer <nl> + import SimpleHTTPServer <nl> + import tensorflow as tf <nl> + from array import array <nl> + from grpc . beta import implementations <nl> + from websocket_server import WebsocketServer <nl> + from tensorflow_serving . apis import predict_pb2 <nl> + from tensorflow_serving . apis import prediction_service_pb2 <nl> + <nl> + sys . path . append ( os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , ' . . ' ) ) <nl> + from util . text import ndarray_to_text <nl> + from util . audio import audiofile_to_input_vector <nl> + <nl> + tf . app . flags . DEFINE_integer ( ' port ' , 8080 , ' PredictionService host : port ' ) <nl> + tf . app . flags . DEFINE_string ( ' server ' , ' ' , ' PredictionService host : port ' ) <nl> + # These need to match the constants used when training the deepspeech model <nl> + tf . app . flags . DEFINE_integer ( ' n_input ' , 26 , ' Number of MFCC features ' ) <nl> + tf . app . flags . DEFINE_integer ( ' n_context ' , 9 , ' Number of frames of context ' ) <nl> + FLAGS = tf . app . flags . FLAGS <nl> + <nl> + FRAME_SIZE = 160 <nl> + FRAME_LENGTH = 10 <nl> + SILENCE_BEFORE_COMPLETE = ( 500 * FRAME_SIZE ) / FRAME_LENGTH <nl> + MAXIMUM_LENGTH = ( 10000 * FRAME_SIZE ) / FRAME_LENGTH <nl> + SEND_INTERVAL = ( 300 * FRAME_SIZE ) / FRAME_LENGTH <nl> + <nl> + def _create_rpc_callback ( event , server ) : <nl> + def _callback ( result_future ) : <nl> + exception = result_future . exception ( ) <nl> + if exception : <nl> + print exception <nl> + else : <nl> + results = tf . contrib . util . make_ndarray ( result_future . result ( ) . outputs [ ' outputs ' ] ) <nl> + for result in results [ 0 ] : <nl> + server . message ( ndarray_to_text ( result ) ) <nl> + event . set ( ) <nl> + return _callback <nl> + <nl> + def do_inference ( hostport , audio_file , server ) : <nl> + audio_waves = audiofile_to_input_vector ( <nl> + audio_file , FLAGS . n_input , FLAGS . n_context ) <nl> + audio = np . array ( [ audio_waves ] ) <nl> + <nl> + host , port = hostport . split ( ' : ' ) <nl> + channel = implementations . insecure_channel ( host , int ( port ) ) <nl> + stub = prediction_service_pb2 . beta_create_PredictionService_stub ( channel ) <nl> + <nl> + request = predict_pb2 . PredictRequest ( ) <nl> + request . model_spec . name = ' deepspeech ' <nl> + request . inputs [ ' input ' ] . CopyFrom ( tf . contrib . util . make_tensor_proto ( audio ) ) <nl> + <nl> + event = threading . Event ( ) <nl> + result_future = stub . Predict . future ( request , 5 . 0 ) # 5 seconds <nl> + result_future . add_done_callback ( _create_rpc_callback ( event , server ) ) <nl> + if event . is_set ( ) ! = True : <nl> + event . wait ( ) <nl> + <nl> + class NoCacheHTTPRequestHAndler ( SimpleHTTPServer . SimpleHTTPRequestHandler ) : <nl> + def end_headers ( self ) : <nl> + self . send_headers ( ) <nl> + SimpleHTTPServer . SimpleHTTPRequestHandler . end_headers ( self ) <nl> + <nl> + def send_headers ( self ) : <nl> + self . send_header ( ' Cache - Control ' , ' no - cache , no - store , must - revalidate ' ) <nl> + self . send_header ( ' Pragma ' , ' no - cache ' ) <nl> + self . send_header ( ' Expires ' , ' 0 ' ) <nl> + <nl> + class DSSocketServer ( threading . Thread ) : <nl> + def __init__ ( self , port = 9876 ) : <nl> + threading . Thread . __init__ ( self ) <nl> + self . setDaemon = True <nl> + <nl> + self . server = WebsocketServer ( port ) <nl> + self . server . set_fn_message_received ( self . message_received ) <nl> + <nl> + self . stopped = 0 <nl> + <nl> + def message_received ( self , client , server , message ) : <nl> + print ' Message received : % s ' % ( message ) <nl> + if message = = ' STOP ' : <nl> + self . stopped + = 1 <nl> + elif message = = ' START ' : <nl> + self . stopped - = 1 <nl> + <nl> + def run ( self ) : <nl> + self . server . run_forever ( ) <nl> + <nl> + def message ( self , text ) : <nl> + self . server . send_message_to_all ( text ) <nl> + <nl> + class DSWebServer ( threading . Thread ) : <nl> + def __init__ ( self ) : <nl> + threading . Thread . __init__ ( self ) <nl> + self . setDaemon = True <nl> + <nl> + self . server = SocketServer . ThreadingTCPServer ( ( ' ' , FLAGS . port ) , <nl> + NoCacheHTTPRequestHAndler ) <nl> + self . server . allow_reuse_address = True <nl> + <nl> + self . socket_server = DSSocketServer ( ) <nl> + <nl> + def run ( self ) : <nl> + print ' Starting server , visit http : / / localhost : % d / ' % ( FLAGS . port ) <nl> + self . socket_server . start ( ) <nl> + self . server . serve_forever ( ) <nl> + <nl> + def message ( self , text ) : <nl> + self . socket_server . message ( text ) <nl> + <nl> + @ property <nl> + def stopped ( self ) : <nl> + return self . socket_server . stopped > 0 <nl> + <nl> + def main ( _ ) : <nl> + if not FLAGS . server : <nl> + print ' please specify server host : port ' <nl> + return <nl> + <nl> + vad = webrtcvad . Vad ( ) <nl> + pa = pyaudio . PyAudio ( ) <nl> + <nl> + # Default to using pulse <nl> + device = 0 <nl> + for i in xrange ( 0 , pa . get_device_count ( ) ) : <nl> + info = pa . get_device_info_by_index ( i ) <nl> + if info [ ' name ' ] = = ' pulse ' : <nl> + device = i <nl> + break <nl> + <nl> + # 320 frames = 10ms @ 16 - bit 16kHz <nl> + stream = pa . open ( format = pyaudio . paInt16 , channels = 1 , rate = 16000 , <nl> + input_device_index = device , input = True , <nl> + # output_device_index = device , output = True , # for debugging <nl> + frames_per_buffer = FRAME_SIZE ) <nl> + <nl> + silent_frames = 0 <nl> + recorded = StringIO . StringIO ( ) <nl> + recorded = array ( ' h ' ) <nl> + recording = False <nl> + <nl> + # Start web server <nl> + server = DSWebServer ( ) <nl> + server . start ( ) <nl> + <nl> + # Start recording / transcribing / serving <nl> + print ( ' Listening . . . ' ) <nl> + while True : <nl> + while stream . is_active ( ) and stream . get_read_available ( ) > = FRAME_SIZE : <nl> + audio = array ( ' h ' , stream . read ( FRAME_SIZE ) ) <nl> + if sys . byteorder = = ' big ' : <nl> + audio . byteswap ( ) <nl> + <nl> + if vad . is_speech ( audio . tostring ( ) , 16000 ) : <nl> + if recording ! = True : <nl> + recording = True <nl> + print ' Recording . . . ' <nl> + server . message ( ' RECORD ' ) <nl> + silent_frames = 0 <nl> + else : <nl> + if recording : <nl> + silent_frames + = 1 <nl> + <nl> + if recording : <nl> + recorded . extend ( audio ) <nl> + <nl> + if len ( recorded ) > = MAXIMUM_LENGTH : <nl> + break <nl> + <nl> + if server . stopped = = stream . is_active ( ) : <nl> + if server . stopped : <nl> + stream . stop_stream ( ) <nl> + if len ( recorded ) : <nl> + recorded = array ( ' h ' ) <nl> + recording = False <nl> + silent_frames = 0 <nl> + print ' Stopped recording ' <nl> + else : <nl> + stream . start_stream ( ) <nl> + print ' Resume recording ' <nl> + <nl> + if recording and len ( recorded ) % SEND_INTERVAL is 0 : <nl> + audiofile = StringIO . StringIO ( ) <nl> + encoder = wave . open ( audiofile , ' wb ' ) <nl> + encoder . setnchannels ( 1 ) <nl> + encoder . setsampwidth ( pa . get_sample_size ( pyaudio . paInt16 ) ) <nl> + encoder . setframerate ( 16000 ) <nl> + encoder . writeframes ( recorded . tostring ( ) ) <nl> + encoder . close ( ) <nl> + <nl> + # For debugging <nl> + # stream . write ( audiofile . getvalue ( ) ) <nl> + <nl> + audiofile . seek ( 0 ) <nl> + # sys . stdout . write ( ' \ 033 [ 2J \ 033 [ H ' ) # Clear screen , return to home <nl> + do_inference ( FLAGS . server , audiofile , server ) <nl> + audiofile . close ( ) <nl> + <nl> + if silent_frames > = SILENCE_BEFORE_COMPLETE or len ( recorded ) > = MAXIMUM_LENGTH : <nl> + server . message ( ' END ' ) <nl> + print ( ' Listening . . . ' ) <nl> + silent_frames = 0 <nl> + recorded = array ( ' h ' ) <nl> + recording = False <nl> + <nl> + stream . stop_stream ( ) <nl> + stream . close ( ) <nl> + pa . terminate ( ) <nl> + <nl> + if __name__ = = ' __main__ ' : <nl> + try : <nl> + tf . app . run ( ) <nl> + except KeyboardInterrupt : <nl> + os . _exit ( 0 ) <nl> + <nl> new file mode 100644 <nl> index 000000000 . . cc8eefec5 <nl> mmm / dev / null <nl> ppp b / demos / dictation . html <nl> <nl> + < ! DOCTYPE html > <nl> + < html xmlns = " http : / / www . w3 . org / 1999 / xhtml " > <nl> + < head > <nl> + < title > DeepSpeech continuous speech recognition < / title > <nl> + < meta charset = " UTF - 8 " > <nl> + < script > <nl> + function add_word ( text ) { <nl> + var div = document . createElement ( ' div ' ) ; <nl> + div . className = ' word ' ; <nl> + div . innerHTML = text ; <nl> + document . body . appendChild ( div ) ; <nl> + if ( window . scrollY ! = = window . scrollMaxY ) { <nl> + window . scrollTo ( 0 , window . scrollMaxY ) ; <nl> + } <nl> + return div ; <nl> + } <nl> + window . onload = function ( ) { <nl> + var lastWord = null ; <nl> + var s = new WebSocket ( ' ws : / / localhost : 9876 / ' ) ; <nl> + s . onopen = function ( e ) { console . log ( ' Connected : ' , e ) ; } <nl> + s . onclose = function ( e ) { console . log ( ' Connection closed : ' , e ) ; } <nl> + s . onerror = function ( e ) { console . error ( ' Error : ' , e ) ; } <nl> + <nl> + var mic = document . getElementById ( ' mic ' ) ; <nl> + <nl> + s . onmessage = function ( e ) { <nl> + if ( e . data = = ' END ' ) { <nl> + lastWord = null ; <nl> + mic . classList . toggle ( ' recording ' , false ) ; <nl> + return ; <nl> + } else if ( e . data = = ' RECORD ' ) { <nl> + mic . classList . toggle ( ' recording ' , true ) ; <nl> + return ; <nl> + } <nl> + if ( lastWord ) { <nl> + lastWord . textContent = e . data ; <nl> + } else { <nl> + lastWord = add_word ( e . data ) ; <nl> + } <nl> + } <nl> + } ; <nl> + < / script > <nl> + < style > <nl> + html { <nl> + margin : 0 ; <nl> + padding : 0 ; <nl> + border : 0 ; <nl> + width : 100vw ; <nl> + height : 100vh ; <nl> + overflow - x : hidden ; <nl> + scroll - behavior : smooth ; <nl> + } <nl> + <nl> + body { <nl> + margin : 0 ; <nl> + } <nl> + <nl> + div . word { <nl> + display : block ; <nl> + animation : 0 . 5s ease - in forwards fade - in ; <nl> + font - size : 3vw ; <nl> + margin : 2vw ; <nl> + font - family : sans ; <nl> + font - weight : 600 ; <nl> + } <nl> + <nl> + # mic { <nl> + position : fixed ; <nl> + width : 9vw ; <nl> + bottom : 0 ; <nl> + right : 3vw ; <nl> + opacity : 0 ; <nl> + transition : opacity 0 . 2s ; <nl> + } <nl> + <nl> + # mic . recording { <nl> + opacity : 1 ; <nl> + } <nl> + <nl> + @ keyframes fade - in { <nl> + from { opacity : 0 ; } to { opacity : 1 ; } <nl> + } <nl> + < / style > <nl> + < / head > <nl> + < body > <nl> + < img id = ' mic ' src = ' images / microphone . png ' > < / img > <nl> + < / body > <nl> + < / html > <nl> new file mode 100644 <nl> index 000000000 . . c26ceebb8 <nl> Binary files / dev / null and b / demos / images / microphone . png differ <nl> new file mode 100644 <nl> index 000000000 . . 2a92fbfd2 <nl> Binary files / dev / null and b / demos / images / santa - head . png differ <nl> new file mode 100644 <nl> index 000000000 . . c47cfc1e4 <nl> Binary files / dev / null and b / demos / images / santa - mouth . png differ <nl> new file mode 100644 <nl> index 000000000 . . 2e6cef819 <nl> mmm / dev / null <nl> ppp b / demos / index . html <nl> <nl> + < ! DOCTYPE html > <nl> + < html xmlns = " http : / / www . w3 . org / 1999 / xhtml " > <nl> + < head > <nl> + < title > DeepSpeech demos < / title > <nl> + < meta charset = " UTF - 8 " > <nl> + < style > <nl> + < / style > <nl> + < / head > <nl> + < body > <nl> + < h1 > DeepSpeech demos < / h1 > <nl> + < ul > <nl> + < li > < a href = " dictation . html " > Continuous speech recognition < / a > < / li > <nl> + < li > < a href = " santa . html " > Talking Santa < / a > < / li > <nl> + < / ul > <nl> + < / body > <nl> + < / html > <nl> new file mode 100644 <nl> index 000000000 . . 277224ec7 <nl> mmm / dev / null <nl> ppp b / demos / santa . html <nl> <nl> + < ! DOCTYPE html > <nl> + < html xmlns = " http : / / www . w3 . org / 1999 / xhtml " > <nl> + < head > <nl> + < title > DeepSpeech Talking Santa < / title > <nl> + < meta charset = " UTF - 8 " > <nl> + < script > <nl> + window . onload = function ( ) { <nl> + var talking = false ; <nl> + var synth = window . speechSynthesis ; <nl> + var mic = document . getElementById ( ' mic ' ) ; <nl> + var head = document . getElementById ( ' head ' ) ; <nl> + window . s = new WebSocket ( ' ws : / / localhost : 9876 / ' ) ; <nl> + var lastText = null ; <nl> + <nl> + window . talk = function ( text ) { <nl> + if ( talking ) { <nl> + return ; <nl> + } <nl> + <nl> + console . log ( ' Saying : ' + text ) ; <nl> + <nl> + utt = new SpeechSynthesisUtterance ( text ) ; <nl> + utt . pitch = 2 . 5 ; <nl> + utt . rate = 1 . 5 ; <nl> + <nl> + synth . speak ( utt ) ; <nl> + <nl> + talking = true ; <nl> + s . send ( ' STOP ' ) ; <nl> + head . classList . toggle ( ' talking ' , true ) ; <nl> + utt . onend = function ( ) { <nl> + talking = false ; <nl> + head . classList . toggle ( ' talking ' , false ) ; <nl> + s . send ( ' START ' ) ; <nl> + } <nl> + } ; <nl> + <nl> + s . onopen = function ( e ) { console . log ( ' Connected : ' , e ) ; } <nl> + s . onclose = function ( e ) { console . log ( ' Connection closed : ' , e ) ; } <nl> + s . onerror = function ( e ) { console . error ( ' Error : ' , e ) ; } <nl> + s . onmessage = function ( e ) { <nl> + if ( e . data = = ' END ' ) { <nl> + if ( lastText ) { <nl> + talk ( lastText ) ; <nl> + lastText = null ; <nl> + } <nl> + mic . classList . toggle ( ' recording ' , false ) ; <nl> + return ; <nl> + } else if ( e . data = = ' RECORD ' ) { <nl> + mic . classList . toggle ( ' recording ' , true ) ; <nl> + return ; <nl> + } <nl> + lastText = e . data ; <nl> + } <nl> + } ; <nl> + < / script > <nl> + < style > <nl> + html { <nl> + margin : 0 ; <nl> + padding : 0 ; <nl> + border : 0 ; <nl> + width : 100vw ; <nl> + height : 100vh ; <nl> + overflow - x : hidden ; <nl> + scroll - behavior : smooth ; <nl> + } <nl> + <nl> + body { <nl> + margin : 0 ; <nl> + } <nl> + <nl> + div . word { <nl> + display : block ; <nl> + animation : 0 . 5s ease - in forwards fade - in ; <nl> + font - size : 3vw ; <nl> + margin : 2vw ; <nl> + font - family : sans ; <nl> + font - weight : 600 ; <nl> + } <nl> + <nl> + # head , # mouth { <nl> + position : fixed ; <nl> + width : 50vw ; <nl> + top : 50 % ; <nl> + left : 50 % ; <nl> + margin - right : - 50 % ; <nl> + transform : translate ( - 50 % , - 50 % ) ; <nl> + } <nl> + <nl> + # head { <nl> + z - index : 1 ; <nl> + animation : unset ; <nl> + } <nl> + <nl> + # head . talking { <nl> + animation : steps ( 10 ) 1s infinite talking ; <nl> + } <nl> + <nl> + @ keyframes talking { <nl> + 0 % { transform : translate ( - 50 % , - 50 % ) ; } <nl> + 10 % { transform : translate ( - 50 % , - 55 % ) rotate ( - 20deg ) ; } <nl> + 20 % { transform : translate ( - 50 % , - 55 % ) rotate ( 10deg ) ; } <nl> + 30 % { transform : translate ( - 50 % , - 50 % ) rotate ( 20deg ) ; } <nl> + 40 % { transform : translate ( - 50 % , - 55 % ) rotate ( - 10deg ) ; } <nl> + 50 % { transform : translate ( - 50 % , - 50 % ) rotate ( 10deg ) ; } <nl> + 60 % { transform : translate ( - 50 % , - 55 % ) rotate ( 20deg ) ; } <nl> + 70 % { transform : translate ( - 50 % , - 50 % ) rotate ( - 20deg ) ; } <nl> + 80 % { transform : translate ( - 50 % , - 50 % ) rotate ( 10deg ) ; } <nl> + 90 % { transform : translate ( - 50 % , - 55 % ) rotate ( 20deg ) ; } <nl> + 100 % { transform : translate ( - 50 % , - 50 % ) rotate ( - 10deg ) ; } <nl> + } <nl> + <nl> + # mic { <nl> + position : fixed ; <nl> + width : 9vw ; <nl> + bottom : 0 ; <nl> + right : 3vw ; <nl> + opacity : 0 ; <nl> + transition : opacity 0 . 2s ; <nl> + } <nl> + <nl> + # mic . recording { <nl> + opacity : 1 ; <nl> + } <nl> + <nl> + @ keyframes fade - in { <nl> + from { opacity : 0 ; } to { opacity : 1 ; } <nl> + } <nl> + < / style > <nl> + < / head > <nl> + < body > <nl> + < img id = ' head ' src = ' images / santa - head . png ' > < / img > <nl> + < img id = ' mouth ' src = ' images / santa - mouth . png ' > < / img > <nl> + < img id = ' mic ' src = ' images / microphone . png ' > < / img > <nl> + < / body > <nl> + < / html > <nl>
Add a demos directory with two speech recognition demos
mozilla/DeepSpeech
e03c21235f7c743676126fcfb814d4110b3d30ce
2016-11-30T18:15:22Z
mmm a / modules / bridge / test / bridge_receiver_test . cc <nl> ppp b / modules / bridge / test / bridge_receiver_test . cc <nl> void * pthread_handle_message ( void * pfd ) { <nl> int total_recv = 2 * FRAME_SIZE ; <nl> char total_buf [ 2 * FRAME_SIZE ] = { 0 } ; <nl> bytes = <nl> - static_cast < int > ( recvfrom ( * static_cast < int * > ( pfd ) , total_buf , total_recv , 0 , <nl> - ( struct sockaddr * ) & client_addr , & sock_len ) ) ; <nl> + static_cast < int > ( recvfrom ( * static_cast < int * > ( pfd ) , total_buf , <nl> + total_recv , 0 , ( struct sockaddr * ) & client_addr , & sock_len ) ) ; <nl> ADEBUG < < " total recv " < < bytes ; <nl> if ( bytes < = 0 | | bytes > total_recv ) { <nl> pthread_exit ( nullptr ) ; <nl> bool receive ( uint16_t port ) { <nl> } <nl> int opt = SO_REUSEADDR ; <nl> setsockopt ( listener_sock , SOL_SOCKET , SO_REUSEADDR , & opt , sizeof ( opt ) ) ; <nl> - if ( fcntl ( listener_sock , F_SETFL , fcntl ( listener_sock , F_GETFD , 0 ) | O_NONBLOCK ) = = - 1 ) { <nl> + if ( fcntl ( listener_sock , F_SETFL , <nl> + fcntl ( listener_sock , F_GETFD , 0 ) | O_NONBLOCK ) = = - 1 ) { <nl> ADEBUG < < " set nonblocking failed " ; <nl> return false ; <nl> } <nl>
fix lint
ApolloAuto/apollo
95b97e617201bddcf626184b31b5737620f02c52
2019-07-17T17:32:17Z
mmm a / README . md <nl> ppp b / README . md <nl> Gallery <nl> ! [ screenshot 4 ] ( / web / test_window_04 . png ? raw = true ) <nl> ! [ screenshot 4 ] ( / web / examples_02 . png ? raw = true ) <nl> <nl> - ImGui can load TTF fonts . UTF - 8 is supported for text display and input . Here using M + font to display Japanese : <nl> + ImGui can load TTF fonts . UTF - 8 is supported for text display and input . Here using Arial Unicode font to display Japanese : <nl> <nl> ! [ utf - 8 screenshot ] ( / web / utf8_sample_01 . png ? raw = true ) <nl> <nl> mmm a / examples / directx11_example / directx11_example . vcxproj <nl> ppp b / examples / directx11_example / directx11_example . vcxproj <nl> <nl> < ConfigurationType > Application < / ConfigurationType > <nl> < UseDebugLibraries > false < / UseDebugLibraries > <nl> < WholeProgramOptimization > true < / WholeProgramOptimization > <nl> - < CharacterSet > MultiByte < / CharacterSet > <nl> + < CharacterSet > Unicode < / CharacterSet > <nl> < / PropertyGroup > <nl> < Import Project = " $ ( VCTargetsPath ) \ Microsoft . Cpp . props " / > <nl> < ImportGroup Label = " ExtensionSettings " > <nl> new file mode 100644 <nl> index 0000000000 . . 767c63ad00 <nl> Binary files / dev / null and b / extra_fonts / DroidSans . ttf differ <nl> mmm a / extra_fonts / README . txt <nl> ppp b / extra_fonts / README . txt <nl> <nl> EXTRA FONTS FOR IMGUI <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> <nl> + DroidSans . ttf <nl> + Copyright ( c ) Steve Matteson <nl> + Apache License , version 2 . 0 <nl> + http : / / www . google . com / fonts / specimen / Droid + Sans <nl> + <nl> ProggyClean . ttf <nl> Copyright ( c ) 2004 , 2005 Tristan Grimmer <nl> MIT License <nl> mmm a / imgui . cpp <nl> ppp b / imgui . cpp <nl> <nl> - / / ImGui library v1 . 30 <nl> + / / ImGui library v1 . 31 wip <nl> / / See ImGui : : ShowTestWindow ( ) for sample code . <nl> / / Read ' Programmer guide ' below for notes on how to setup ImGui in your codebase . <nl> / / Get latest version at https : / / github . com / ocornut / imgui <nl> <nl> Occasionally introducing changes that are breaking the API . The breakage are generally minor and easy to fix . <nl> Here is a change - log of API breaking changes , if you are using one of the functions listed , expect to have to fix some code . <nl> <nl> + - 2015 / 02 / 01 ( 1 . 31 ) - removed IO . MemReallocFn ( unused ) <nl> - 2015 / 01 / 19 ( 1 . 30 ) - renamed ImGuiStorage : : GetIntPtr ( ) / GetFloatPtr ( ) to GetIntRef ( ) / GetIntRef ( ) because Ptr was conflicting with actual pointer storage functions . <nl> - 2015 / 01 / 11 ( 1 . 30 ) - big font / image API change ! now loads TTF file . allow for multiple fonts . no need for a PNG loader . <nl> ( 1 . 30 ) - removed GetDefaultFontData ( ) . uses io . Fonts - > GetTextureData * ( ) API to retrieve uncompressed pixels . <nl> struct ImGuiState <nl> <nl> / / Render <nl> ImVector < ImDrawList * > RenderDrawLists ; <nl> + ImVector < ImGuiWindow * > RenderSortedWindows ; <nl> <nl> / / Widget state <nl> ImGuiTextEditState InputTextState ; <nl> void ImGui : : NewFrame ( ) <nl> <nl> / / No window should be open at the beginning of the frame . <nl> / / But in order to allow the user to call NewFrame ( ) multiple times without calling Render ( ) , we are doing an explicit clear . <nl> - g . CurrentWindowStack . clear ( ) ; <nl> + g . CurrentWindowStack . resize ( 0 ) ; <nl> <nl> / / Create implicit window - we will only render it if the user has added something to it . <nl> ImGui : : Begin ( " Debug " , NULL , ImVec2 ( 400 , 400 ) ) ; <nl> void ImGui : : Render ( ) <nl> <nl> / / Sort the window list so that all child windows are after their parent <nl> / / We cannot do that on FocusWindow ( ) because childs may not exist yet <nl> - ImVector < ImGuiWindow * > sorted_windows ; <nl> - sorted_windows . reserve ( g . Windows . size ( ) ) ; <nl> + g . RenderSortedWindows . resize ( 0 ) ; <nl> + g . RenderSortedWindows . reserve ( g . Windows . size ( ) ) ; <nl> for ( size_t i = 0 ; i ! = g . Windows . size ( ) ; i + + ) <nl> { <nl> ImGuiWindow * window = g . Windows [ i ] ; <nl> - if ( window - > Flags & ImGuiWindowFlags_ChildWindow ) / / if a child is visible its parent will add it <nl> + if ( window - > Flags & ImGuiWindowFlags_ChildWindow ) / / if a child is visible its parent will add it <nl> if ( window - > Visible ) <nl> continue ; <nl> - AddWindowToSortedBuffer ( window , sorted_windows ) ; <nl> + AddWindowToSortedBuffer ( window , g . RenderSortedWindows ) ; <nl> } <nl> - IM_ASSERT ( g . Windows . size ( ) = = sorted_windows . size ( ) ) ; / / We done something wrong <nl> - g . Windows . swap ( sorted_windows ) ; <nl> + IM_ASSERT ( g . Windows . size ( ) = = g . RenderSortedWindows . size ( ) ) ; / / We done something wrong <nl> + g . Windows . swap ( g . RenderSortedWindows ) ; <nl> <nl> / / Clear data for next frame <nl> g . IO . MouseWheel = 0 . 0f ; <nl> static bool InputTextFilterCharacter ( ImWchar c , ImGuiInputTextFlags flags ) <nl> if ( c < 128 & & c ! = ' ' & & ! isprint ( ( int ) ( c & 0xFF ) ) ) <nl> return true ; <nl> <nl> + if ( c > = 0xE000 & & c < = 0xF8FF ) / / Filter private Unicode range . I don ' t imagine anybody would want to input them . GLFW on OSX seems to send private characters for special keys like arrow keys . <nl> + return true ; <nl> + <nl> if ( flags & ImGuiInputTextFlags_CharsDecimal ) <nl> if ( ! ( c > = ' 0 ' & & c < = ' 9 ' ) & & ( c ! = ' . ' ) & & ( c ! = ' - ' ) & & ( c ! = ' + ' ) & & ( c ! = ' * ' ) & & ( c ! = ' / ' ) ) <nl> return true ; <nl> mmm a / imgui . h <nl> ppp b / imgui . h <nl> <nl> - / / ImGui library v1 . 30 <nl> + / / ImGui library v1 . 31 wip <nl> / / See . cpp file for commentary . <nl> / / See ImGui : : ShowTestWindow ( ) for sample code . <nl> / / Read ' Programmer guide ' in . cpp for notes on how to setup ImGui in your codebase . <nl> struct ImVec4 <nl> <nl> namespace ImGui <nl> { <nl> - / / Proxy functions to access the MemAllocFn / MemFreeFn / MemReallocFn pointers in ImGui : : GetIO ( ) . The only reason they exist here is to allow ImVector < > to compile inline . <nl> + / / Proxy functions to access the MemAllocFn / MemFreeFn pointers in ImGui : : GetIO ( ) . The only reason they exist here is to allow ImVector < > to compile inline . <nl> IMGUI_API void * MemAlloc ( size_t sz ) ; <nl> IMGUI_API void MemFree ( void * ptr ) ; <nl> } <nl> class ImVector <nl> inline value_type & back ( ) { IM_ASSERT ( Size > 0 ) ; return Data [ Size - 1 ] ; } <nl> inline const value_type & back ( ) const { IM_ASSERT ( Size > 0 ) ; return Data [ Size - 1 ] ; } <nl> inline void swap ( ImVector < T > & rhs ) { const size_t rhs_size = rhs . Size ; rhs . Size = Size ; Size = rhs_size ; const size_t rhs_cap = rhs . Capacity ; rhs . Capacity = Capacity ; Capacity = rhs_cap ; value_type * rhs_data = rhs . Data ; rhs . Data = Data ; Data = rhs_data ; } <nl> - inline void reserve ( size_t new_capacity ) { if ( new_capacity < = Capacity ) <nl> - return ; <nl> - T * NewData = ( value_type * ) ImGui : : MemAlloc ( new_capacity * sizeof ( value_type ) ) ; <nl> - memcpy ( NewData , Data , Size * sizeof ( value_type ) ) ; <nl> - ImGui : : MemFree ( Data ) ; <nl> - Data = NewData ; <nl> - Capacity = new_capacity ; } <nl> + <nl> inline void resize ( size_t new_size ) { if ( new_size > Capacity ) reserve ( new_size ) ; Size = new_size ; } <nl> + inline void reserve ( size_t new_capacity ) <nl> + { <nl> + if ( new_capacity < = Capacity ) return ; <nl> + T * new_data = ( value_type * ) ImGui : : MemAlloc ( new_capacity * sizeof ( value_type ) ) ; <nl> + memcpy ( new_data , Data , Size * sizeof ( value_type ) ) ; <nl> + ImGui : : MemFree ( Data ) ; <nl> + Data = new_data ; <nl> + Capacity = new_capacity ; <nl> + } <nl> <nl> inline void push_back ( const value_type & v ) { if ( Size = = Capacity ) reserve ( Capacity ? Capacity * 2 : 4 ) ; Data [ Size + + ] = v ; } <nl> inline void pop_back ( ) { IM_ASSERT ( Size > 0 ) ; Size - - ; } <nl> struct ImGuiIO <nl> const char * ( * GetClipboardTextFn ) ( ) ; <nl> void ( * SetClipboardTextFn ) ( const char * text ) ; <nl> <nl> - / / Optional : override memory allocations ( default to posix malloc / free ) <nl> + / / Optional : override memory allocations ( default to posix malloc / free ) . MemFreeFn ( ) may be called with a NULL pointer . <nl> void * ( * MemAllocFn ) ( size_t sz ) ; <nl> void ( * MemFreeFn ) ( void * ptr ) ; <nl> <nl> struct ImColor <nl> { <nl> ImVec4 Value ; <nl> <nl> - ImColor ( int r , int g , int b , int a = 255 ) { Value . x = r / 255 . 0f ; Value . y = g / 255 . 0f ; Value . z = b / 255 . 0f ; Value . w = a / 255 . 0f ; } <nl> + ImColor ( int r , int g , int b , int a = 255 ) { Value . x = ( float ) r / 255 . 0f ; Value . y = ( float ) g / 255 . 0f ; Value . z = ( float ) b / 255 . 0f ; Value . w = ( float ) a / 255 . 0f ; } <nl> ImColor ( float r , float g , float b , float a = 1 . 0f ) { Value . x = r ; Value . y = g ; Value . z = b ; Value . w = a ; } <nl> ImColor ( const ImVec4 & col ) { Value = col ; } <nl> <nl>
Merged changes from original master
ocornut/imgui
e6318abcee12ff8e1a426bd4c07548f0f2ea8ff8
2015-02-01T17:09:03Z
mmm a / scripts / process_headers . pl <nl> ppp b / scripts / process_headers . pl <nl> ( $ $ ) <nl> for my $ pair ( @ pairs ) { <nl> my $ header = $ pair - > [ 0 ] ; <nl> my $ value = $ pair - > [ 1 ] ; <nl> - print " $ value : $ header " ; <nl> + print " $ value : $ header " ; <nl> + my $ spacing = " " ; <nl> for my $ boost ( keys % { $ boosts { $ header } } ) { <nl> - print " < $ boost > " ; <nl> + print " $ spacing < $ boost > " ; <nl> + $ spacing = " " ; <nl> } <nl> + <nl> print " \ n " ; <nl> } <nl>
Made process_headers . pl not print with trailing whitespace .
rethinkdb/rethinkdb
db5b04b2038c988f22f769f7530d124f7a85574c
2012-08-10T03:08:46Z
mmm a / Makefile <nl> ppp b / Makefile <nl> ifeq ( $ ( HAS_PKG_CONFIG ) , true ) <nl> OPENSSL_ALPN_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 0 . 2 openssl <nl> OPENSSL_NPN_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 0 . 1 openssl <nl> ZLIB_CHECK_CMD = $ ( PKG_CONFIG ) - - exists zlib <nl> - PROTOBUF_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 3 . 0 . 0 protobuf <nl> + PROTOBUF_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 3 . 5 . 0 protobuf <nl> CARES_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 11 . 0 libcares <nl> else # HAS_PKG_CONFIG <nl> <nl> protobuf_dep_message : <nl> @ echo <nl> @ echo " DEPENDENCY ERROR " <nl> @ echo <nl> - @ echo " The target you are trying to run requires protobuf 3 . 0 . 0 + " <nl> + @ echo " The target you are trying to run requires protobuf 3 . 5 . 0 + " <nl> @ echo " Your system doesn ' t have it , and neither does the third_party directory . " <nl> @ echo <nl> @ echo " Please consult INSTALL to get more information . " <nl> protoc_dep_message : <nl> @ echo <nl> @ echo " DEPENDENCY ERROR " <nl> @ echo <nl> - @ echo " The target you are trying to run requires protobuf - compiler 3 . 0 . 0 + " <nl> + @ echo " The target you are trying to run requires protobuf - compiler 3 . 5 . 0 + " <nl> @ echo " Your system doesn ' t have it , and neither does the third_party directory . " <nl> @ echo <nl> @ echo " Please consult INSTALL to get more information . " <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alarm_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_counter_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_crypt_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_crypter_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_frame_handler_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_frame_protector_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_grpc_record_protocol_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_handshaker_client_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_handshaker_service_api_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_iovec_record_protocol_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_security_connector_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_tsi_handshaker_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_tsi_utils_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / alts_zero_copy_grpc_protector_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / async_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / auth_property_iterator_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / backoff_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bdp_estimator_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_arena : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_call_create : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_chttp2_hpack : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_chttp2_transport : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_closure : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_cq : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_cq_multiple_threads : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_error : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_fullstack_streaming_ping_pong : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_fullstack_streaming_pump : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_fullstack_trickle : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_fullstack_unary_ping_pong : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_metadata : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / bm_pollset : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / byte_stream_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / channel_arguments_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / channel_filter_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / channel_trace_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / check_gcp_environment_linux_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / check_gcp_environment_windows_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / chttp2_settings_timeout_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / cli_call_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / client_channel_stress_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / client_crash_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / client_crash_test_server : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / client_lb_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / codegen_test_full : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / codegen_test_minimal : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / credentials_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / cxx_byte_buffer_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / cxx_slice_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / cxx_string_ref_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / cxx_time_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / error_details_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / exception_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / filter_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / generic_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / golden_file_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_alts_credentials_options_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_cli : protobuf_dep_error <nl> <nl> GRPC_CPP_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( basen <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_cpp_plugin : protobuf_dep_error <nl> <nl> GRPC_CSHARP_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( ba <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_csharp_plugin : protobuf_dep_error <nl> <nl> GRPC_NODE_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( base <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_node_plugin : protobuf_dep_error <nl> <nl> GRPC_OBJECTIVE_C_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_objective_c_plugin : protobuf_dep_error <nl> <nl> GRPC_PHP_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( basen <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_php_plugin : protobuf_dep_error <nl> <nl> GRPC_PYTHON_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( ba <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_python_plugin : protobuf_dep_error <nl> <nl> GRPC_RUBY_PLUGIN_OBJS = $ ( addprefix $ ( OBJDIR ) / $ ( CONFIG ) / , $ ( addsuffix . o , $ ( base <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_ruby_plugin : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpc_tool_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpclb_api_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / grpclb_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / h2_ssl_cert_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / h2_ssl_session_reuse_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / health_service_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / http2_client : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / hybrid_end2end_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / inlined_vector_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / inproc_sync_unary_ping_pong_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / interop_client : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / interop_server : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / interop_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / json_run_localhost : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / memory_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / metrics_client : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / mock_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / nonblocking_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / noop - benchmark : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / orphanable_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / proto_server_reflection_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / proto_utils_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / qps_interarrival_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / qps_json_driver : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / qps_openloop_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / qps_worker : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / reconnect_interop_client : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / reconnect_interop_server : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / ref_counted_ptr_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / ref_counted_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / secure_auth_context_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / secure_sync_unary_ping_pong_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_builder_plugin_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_builder_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_context_test_spouse_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_crash_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_crash_test_client : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_early_return_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / server_request_call_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / shutdown_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / slice_hash_table_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / slice_weak_hash_table_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / stats_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / status_metadata_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / status_util_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / streaming_throughput_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / stress_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / thread_manager_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / thread_stress_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / transport_pid_controller_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / transport_security_common_api_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / writes_per_rpc_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CRYPTO_TEST_DATA_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_crypto_test_data : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_ASN1_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_asn1_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_BASE64_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_base64_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_BIO_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_bio_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_BUF_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_buf_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_BYTESTRING_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_bytestring_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CHACHA_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_chacha_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_AEAD_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_aead_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CIPHER_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_cipher_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CMAC_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_cmac_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_COMPILER_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_compiler_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CONSTANT_TIME_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_constant_time_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_ED25519_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ed25519_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_SPAKE25519_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_spake25519_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_X25519_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_x25519_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_DH_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_dh_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_DIGEST_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_digest_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_DSA_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_dsa_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_ECDH_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ecdh_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_ERR_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_err_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_EVP_EXTRA_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_evp_extra_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_EVP_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_evp_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_PBKDF_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_pbkdf_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_SCRYPT_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_scrypt_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_AES_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_aes_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_BN_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_bn_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_EC_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ec_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_P256 - X86_64_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_p256 - x86_64_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_ECDSA_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ecdsa_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_GCM_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_gcm_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_CTRDRBG_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ctrdrbg_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_HKDF_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_hkdf_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_HMAC_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_hmac_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_LHASH_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_lhash_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_OBJ_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_obj_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_PKCS7_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_pkcs7_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_PKCS12_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_pkcs12_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_PKCS8_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_pkcs8_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_POLY1305_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_poly1305_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_POOL_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_pool_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_REFCOUNT_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_refcount_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_RSA_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_rsa_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_FILE_TEST_GTEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_file_test_gtest : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_GTEST_MAIN_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_gtest_main : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_THREAD_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_thread_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_X509_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_x509_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_TAB_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_tab_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_V3NAME_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_v3name_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_SPAN_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_span_test : protobuf_dep_error <nl> <nl> $ ( BORINGSSL_SSL_TEST_OBJS ) : CPPFLAGS + = - DOPENSSL_NO_ASM - D_GNU_SOURCE <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / boringssl_ssl_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / resolver_component_test_unsecure : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / resolver_component_test : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / resolver_component_tests_runner_invoker_unsecure : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / resolver_component_tests_runner_invoker : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / address_sorting_test_unsecure : protobuf_dep_error <nl> <nl> else <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / address_sorting_test : protobuf_dep_error <nl> <nl> mmm a / templates / Makefile . template <nl> ppp b / templates / Makefile . template <nl> <nl> OPENSSL_ALPN_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 0 . 2 openssl <nl> OPENSSL_NPN_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 0 . 1 openssl <nl> ZLIB_CHECK_CMD = $ ( PKG_CONFIG ) - - exists zlib <nl> - PROTOBUF_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 3 . 0 . 0 protobuf <nl> + PROTOBUF_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 3 . 5 . 0 protobuf <nl> CARES_CHECK_CMD = $ ( PKG_CONFIG ) - - atleast - version = 1 . 11 . 0 libcares <nl> else # HAS_PKG_CONFIG <nl> <nl> <nl> @ echo <nl> @ echo " DEPENDENCY ERROR " <nl> @ echo <nl> - @ echo " The target you are trying to run requires protobuf 3 . 0 . 0 + " <nl> + @ echo " The target you are trying to run requires protobuf 3 . 5 . 0 + " <nl> @ echo " Your system doesn ' t have it , and neither does the third_party directory . " <nl> @ echo <nl> @ echo " Please consult INSTALL to get more information . " <nl> <nl> @ echo <nl> @ echo " DEPENDENCY ERROR " <nl> @ echo <nl> - @ echo " The target you are trying to run requires protobuf - compiler 3 . 0 . 0 + " <nl> + @ echo " The target you are trying to run requires protobuf - compiler 3 . 5 . 0 + " <nl> @ echo " Your system doesn ' t have it , and neither does the third_party directory . " <nl> @ echo <nl> @ echo " Please consult INSTALL to get more information . " <nl> <nl> % endif <nl> % if lib . language = = ' c + + ' : <nl> # # If the lib was C + + , we have to close the Makefile ' s if that tested <nl> - # # the presence of protobuf 3 . 0 . 0 + <nl> + # # the presence of protobuf 3 . 5 . 0 + <nl> <nl> endif <nl> % endif <nl> <nl> <nl> ifeq ( $ ( NO_PROTOBUF ) , true ) <nl> <nl> - # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 0 . 0 + . <nl> + # You can ' t build the protoc plugins or protobuf - enabled targets if you don ' t have protobuf 3 . 5 . 0 + . <nl> <nl> $ ( BINDIR ) / $ ( CONFIG ) / $ { tgt . name } : protobuf_dep_error <nl> <nl>
Bump protobuf version check from 3 . 0 . 0 to 3 . 5 . 0
grpc/grpc
cdc0e28fe56fafc5134cbf137a5894644ff9a22d
2018-03-27T01:16:15Z
mmm a / xbmc / windowing / CMakeLists . txt <nl> ppp b / xbmc / windowing / CMakeLists . txt <nl> set ( HEADERS OSScreenSaver . h <nl> XBMC_events . h <nl> VideoSync . h ) <nl> <nl> - if ( CORE_PLATFORM_NAME_LC STREQUAL rbpi OR CORE_PLATFORM_NAME_LC STREQUAL gbm ) <nl> + if ( CORE_PLATFORM_NAME_LC STREQUAL rbpi OR CORE_PLATFORM_NAME_LC STREQUAL gbm OR CORE_PLATFORM_NAME_LC STREQUAL aml ) <nl> list ( APPEND SOURCES WinEventsLinux . cpp ) <nl> list ( APPEND HEADERS WinEventsLinux . h ) <nl> endif ( ) <nl> mmm a / xbmc / windowing / amlogic / WinSystemAmlogic . cpp <nl> ppp b / xbmc / windowing / amlogic / WinSystemAmlogic . cpp <nl> <nl> # include " utils / log . h " <nl> # include " utils / SysfsUtils . h " <nl> # include " threads / SingleLock . h " <nl> + # include " . . / WinEventsLinux . h " <nl> <nl> # include < linux / fb . h > <nl> <nl> CWinSystemAmlogic : : CWinSystemAmlogic ( ) <nl> <nl> aml_permissions ( ) ; <nl> aml_disable_freeScale ( ) ; <nl> + <nl> + m_winEvents . reset ( new CWinEventsLinux ( ) ) ; <nl> } <nl> <nl> CWinSystemAmlogic : : ~ CWinSystemAmlogic ( ) <nl>
Merge pull request from stefansaraev / aml - renderloop
xbmc/xbmc
b165e995b5491ed1581c4b8c264d8bd4b230a306
2017-11-17T14:49:35Z
mmm a / src / core / grabber / include / manipulator / event_manipulator . hpp <nl> ppp b / src / core / grabber / include / manipulator / event_manipulator . hpp <nl> <nl> # include " logger . hpp " <nl> # include " manipulator . hpp " <nl> # include " modifier_flag_manager . hpp " <nl> + # include " pointing_button_manager . hpp " <nl> # include " system_preferences . hpp " <nl> # include " types . hpp " <nl> # include " virtual_hid_manager_client . hpp " <nl> class event_manipulator final { <nl> event_manipulator ( void ) : event_dispatcher_manager_ ( ) , <nl> event_source_ ( CGEventSourceCreate ( kCGEventSourceStateHIDSystemState ) ) , <nl> virtual_hid_manager_client_ ( logger : : get_logger ( ) ) , <nl> - modifier_flag_manager_ ( ) , <nl> key_repeat_manager_ ( * this ) { <nl> } <nl> <nl> class event_manipulator final { <nl> modifier_flag_manager_ . unlock ( ) ; <nl> <nl> event_dispatcher_manager_ . set_caps_lock_state ( false ) ; <nl> + <nl> + hid_report : : pointing_input report ; <nl> + virtual_hid_manager_client_ . post_pointing_input_report ( report ) ; <nl> } <nl> <nl> void reset_modifier_flag_state ( void ) { <nl> class event_manipulator final { <nl> / / Do not call modifier_flag_manager_ . unlock ( ) here . <nl> } <nl> <nl> + void reset_pointing_button_state ( void ) { <nl> + pointing_button_manager_ . reset ( ) ; <nl> + } <nl> + <nl> void relaunch_event_dispatcher ( void ) { <nl> event_dispatcher_manager_ . relaunch ( ) ; <nl> } <nl> class event_manipulator final { <nl> initial_key_repeat_milliseconds , key_repeat_milliseconds ) ; <nl> } <nl> <nl> + void handle_pointing_event ( device_registry_entry_id device_registry_entry_id , <nl> + krbn : : pointing_event pointing_event , <nl> + boost : : optional < krbn : : pointing_button > pointing_button , <nl> + CFIndex integer_value ) { <nl> + hid_report : : pointing_input report ; <nl> + <nl> + switch ( pointing_event ) { <nl> + case krbn : : pointing_event : : button : <nl> + if ( pointing_button & & * pointing_button ! = krbn : : pointing_button : : zero ) { <nl> + logger : : get_logger ( ) . info ( " button : { 0 } " , static_cast < uint32_t > ( * pointing_button ) ) ; <nl> + pointing_button_manager_ . manipulate ( * pointing_button , <nl> + integer_value ? pointing_button_manager : : operation : : increase : pointing_button_manager : : operation : : decrease ) ; <nl> + } <nl> + break ; <nl> + <nl> + case krbn : : pointing_event : : x : <nl> + report . x = integer_value ; <nl> + break ; <nl> + <nl> + case krbn : : pointing_event : : y : <nl> + report . y = integer_value ; <nl> + break ; <nl> + <nl> + case krbn : : pointing_event : : vertical_wheel : <nl> + report . vertical_wheel = integer_value ; <nl> + break ; <nl> + <nl> + case krbn : : pointing_event : : horizontal_wheel : <nl> + report . horizontal_wheel = integer_value ; <nl> + break ; <nl> + <nl> + default : <nl> + break ; <nl> + } <nl> + <nl> + auto bits = pointing_button_manager_ . get_hid_report_bits ( ) ; <nl> + report . buttons [ 0 ] = ( bits > > 0 ) & 0xff ; <nl> + report . buttons [ 1 ] = ( bits > > 8 ) & 0xff ; <nl> + report . buttons [ 2 ] = ( bits > > 16 ) & 0xff ; <nl> + report . buttons [ 3 ] = ( bits > > 24 ) & 0xff ; <nl> + <nl> + virtual_hid_manager_client_ . post_pointing_input_report ( report ) ; <nl> + } <nl> + <nl> void stop_key_repeat ( void ) { <nl> key_repeat_manager_ . stop ( ) ; <nl> } <nl> class event_manipulator final { <nl> <nl> event_dispatcher_manager event_dispatcher_manager_ ; <nl> modifier_flag_manager modifier_flag_manager_ ; <nl> + pointing_button_manager pointing_button_manager_ ; <nl> CGEventSourceRef event_source_ ; <nl> virtual_hid_manager_client virtual_hid_manager_client_ ; <nl> key_repeat_manager key_repeat_manager_ ; <nl>
use pointing_button_manager
pqrs-org/Karabiner-Elements
ea0661aa655b2ce12a5384189b03b8d9a32b9b04
2016-09-29T03:13:19Z
mmm a / tensorflow / core / kernels / gpu_utils . h <nl> ppp b / tensorflow / core / kernels / gpu_utils . h <nl> class AutoTuneMap { <nl> } <nl> if ( new_score > = min_score_threshold_ ) { <nl> VLOG ( 1 ) < < GetActionSummary ( " accepts " , params , config ) ; <nl> + } else if ( autotune_global_count_ > = max_autotune_global_count_ ) { <nl> + / / The autotuning exceeds the max iteration threshold and we accept the <nl> + / / the winner if it exists in the map , otherwise we accept the current <nl> + / / winner . <nl> + auto winner = params_config_map_ . find ( params ) ; <nl> + if ( winner = = params_config_map_ . end ( ) ) { <nl> + VLOG ( 1 ) < < GetActionSummary ( " creates " , params , config ) ; <nl> + for ( int i = 0 ; i < min_score_threshold_ ; + + i ) { <nl> + VLOG ( 1 ) < < GetActionSummary ( " promotes " , params , config ) ; <nl> + } <nl> + params_config_map_ . insert ( <nl> + std : : make_pair ( params , ValueType { config , min_score_threshold_ , 1 } ) ) ; <nl> + } else { <nl> + int promotes_times = min_score_threshold_ - winner - > second . score ; <nl> + for ( int i = 0 ; i < promotes_times ; + + i ) { <nl> + VLOG ( 1 ) < < GetActionSummary ( " promotes " , params , config ) ; <nl> + } <nl> + winner - > second . score = min_score_threshold_ ; <nl> + } <nl> + VLOG ( 1 ) < < GetActionSummary ( " accepts " , params , config ) ; <nl> } <nl> + autotune_global_count_ + + ; <nl> } <nl> <nl> private : <nl> class AutoTuneMap { <nl> min_score_threshold_ = std : : max ( min_score_threshold_ , 1 ) ; <nl> max_autotune_count_ = std : : max ( <nl> 5 * min_score_threshold_ * min_score_threshold_ , min_warmup_iterations ) ; <nl> + max_autotune_global_count_ = 2 * max_autotune_count_ ; <nl> + autotune_global_count_ = 0 ; <nl> } <nl> <nl> template < class Group , class Params , class Cfg > <nl> class AutoTuneMap { <nl> string name_ ; <nl> int32 min_score_threshold_ ; <nl> int32 max_autotune_count_ ; <nl> + int32 max_autotune_global_count_ ; <nl> + int32 autotune_global_count_ ; <nl> <nl> TF_DISALLOW_COPY_AND_ASSIGN ( AutoTuneMap ) ; <nl> } ; <nl>
Merge pull request from houtoms : pr_autotune_2
tensorflow/tensorflow
f3363ccc4e4d033f9a7041102926e9d54c831af7
2019-05-01T23:17:08Z
mmm a / lib / Sema / CSGen . cpp <nl> ppp b / lib / Sema / CSGen . cpp <nl> namespace { <nl> diag : : super_with_no_base_class ) ; <nl> } <nl> <nl> - Type resolveTypeReferenceInExpression ( TypeRepr * rep ) { <nl> + Type resolveTypeReferenceInExpression ( TypeRepr * repr ) { <nl> + TypeLoc loc ( repr ) ; <nl> + return resolveTypeReferenceInExpression ( loc ) ; <nl> + } <nl> + <nl> + Type resolveTypeReferenceInExpression ( TypeLoc & loc ) { <nl> TypeResolutionOptions options ( TypeResolverContext : : InExpression ) ; <nl> options | = TypeResolutionFlags : : AllowUnboundGenerics ; <nl> - return TypeResolution : : forContextual ( CS . DC ) . resolveType ( rep , <nl> - options ) ; <nl> + bool hadError = CS . TC . validateType ( <nl> + loc , TypeResolution : : forContextual ( CS . DC ) , options ) ; <nl> + return hadError ? Type ( ) : loc . getType ( ) ; <nl> } <nl> <nl> Type visitTypeExpr ( TypeExpr * E ) { <nl> Type type ; <nl> / / If this is an implicit TypeExpr , don ' t validate its contents . <nl> - if ( E - > getTypeLoc ( ) . wasValidated ( ) ) { <nl> - type = E - > getTypeLoc ( ) . getType ( ) ; <nl> - } else if ( auto * rep = E - > getTypeRepr ( ) ) { <nl> - type = resolveTypeReferenceInExpression ( rep ) ; <nl> + auto & typeLoc = E - > getTypeLoc ( ) ; <nl> + if ( typeLoc . wasValidated ( ) ) { <nl> + type = typeLoc . getType ( ) ; <nl> + } else if ( typeLoc . hasLocation ( ) ) { <nl> + type = resolveTypeReferenceInExpression ( typeLoc ) ; <nl> } <nl> <nl> if ( ! type | | type - > hasError ( ) ) return Type ( ) ; <nl> mmm a / lib / Sema / TypeCheckPattern . cpp <nl> ppp b / lib / Sema / TypeCheckPattern . cpp <nl> static bool validateParameterType ( ParamDecl * decl , TypeResolution resolution , <nl> } <nl> } <nl> <nl> - / / If this parameter declaration is marked as ` @ autoclosure ` <nl> - / / let ' s make sure that its parameter type is indeed a function , <nl> - / / this decision couldn ' t be made based on type representative <nl> - / / alone because it may be later resolved into an invalid type . <nl> - if ( decl - > isAutoClosure ( ) ) <nl> - hadError | = ! ( Ty & & Ty - > is < FunctionType > ( ) ) ; <nl> - <nl> if ( hadError ) <nl> TL . setInvalidType ( TC . Context ) ; <nl> <nl> mmm a / lib / Sema / TypeCheckType . cpp <nl> ppp b / lib / Sema / TypeCheckType . cpp <nl> Type TypeChecker : : resolveIdentifierType ( <nl> return result ; <nl> } <nl> <nl> + / / / Validate whether type associated with @ autoclosure attribute is correct , <nl> + / / / it supposed to be a function type with no parameters . <nl> + / / / \ returns true if there was an error , false otherwise . <nl> + static bool validateAutoClosureAttr ( TypeChecker & TC , const SourceLoc & loc , <nl> + Type paramType ) { <nl> + if ( auto * fnType = paramType - > getAs < FunctionType > ( ) ) { <nl> + if ( fnType - > getNumParams ( ) ! = 0 ) { <nl> + TC . diagnose ( loc , diag : : autoclosure_function_input_nonunit ) ; <nl> + return true ; <nl> + } <nl> + / / A function type with no parameters . <nl> + return false ; <nl> + } <nl> + <nl> + TC . diagnose ( loc , diag : : autoclosure_function_type ) ; <nl> + return true ; <nl> + } <nl> + <nl> + / / / Check whether the type associated with particular source location <nl> + / / / has ` @ autoclosure ` attribute , and if so , validate that such use is correct . <nl> + / / / \ returns true if there was an error , false otherwise . <nl> + static bool validateAutoClosureAttributeUse ( TypeChecker & TC , const TypeLoc & loc , <nl> + Type type , <nl> + TypeResolutionOptions options ) { <nl> + auto * TR = loc . getTypeRepr ( ) ; <nl> + if ( ! TR | | TR - > isInvalid ( ) ) <nl> + return false ; <nl> + <nl> + / / If is a parameter declaration marked as @ autoclosure . <nl> + if ( options . is ( TypeResolverContext : : FunctionInput ) ) { <nl> + if ( auto * ATR = dyn_cast < AttributedTypeRepr > ( TR ) ) { <nl> + const auto attrLoc = ATR - > getAttrs ( ) . getLoc ( TAK_autoclosure ) ; <nl> + if ( attrLoc . isValid ( ) ) <nl> + return validateAutoClosureAttr ( TC , attrLoc , type ) ; <nl> + } <nl> + } <nl> + <nl> + / / Otherwise , let ' s dig into the type and see if there are any <nl> + / / functions with parameters marked as @ autoclosure , <nl> + / / such would be a part of expressions like : <nl> + / / ` let _ : ( @ autoclosure ( ) - > Int ) - > Void = . . . ` . <nl> + bool isValid = true ; <nl> + type . visit ( [ & ] ( Type subType ) { <nl> + if ( auto * fnType = subType - > getAs < FunctionType > ( ) ) { <nl> + isValid & = llvm : : none_of ( <nl> + fnType - > getParams ( ) , [ & ] ( const FunctionType : : Param & param ) { <nl> + return param . isAutoClosure ( ) & & <nl> + validateAutoClosureAttr ( TC , loc . getLoc ( ) , <nl> + param . getPlainType ( ) ) ; <nl> + } ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + return ! isValid ; <nl> + } <nl> + <nl> bool TypeChecker : : validateType ( TypeLoc & Loc , TypeResolution resolution , <nl> TypeResolutionOptions options ) { <nl> / / If we ' ve already validated this type , don ' t do so again . <nl> bool TypeChecker : : validateType ( TypeLoc & Loc , TypeResolution resolution , <nl> type = resolution . resolveType ( Loc . getTypeRepr ( ) , options ) ; <nl> if ( ! type ) { <nl> type = ErrorType : : get ( Context ) ; <nl> - <nl> / / Diagnose types that are illegal in SIL . <nl> } else if ( options . contains ( TypeResolutionFlags : : SILType ) <nl> & & ! type - > isLegalSILType ( ) ) { <nl> diagnose ( Loc . getLoc ( ) , diag : : illegal_sil_type , type ) ; <nl> Loc . setInvalidType ( Context ) ; <nl> return true ; <nl> + } else if ( validateAutoClosureAttributeUse ( * this , Loc , type , options ) ) { <nl> + type = ErrorType : : get ( Context ) ; <nl> } <nl> } <nl> <nl> Type TypeResolver : : resolveAttributedType ( TypeAttributes & attrs , <nl> / / Remember whether this is a function parameter . <nl> bool isParam = options . is ( TypeResolverContext : : FunctionInput ) ; <nl> <nl> - bool isVariadicFunctionParam = <nl> - options . is ( TypeResolverContext : : VariadicFunctionInput ) & & <nl> - ! options . hasBase ( TypeResolverContext : : EnumElementDecl ) ; <nl> - <nl> / / The type we ' re working with , in case we want to build it differently <nl> / / based on the attributes we see . <nl> Type ty ; <nl> Type TypeResolver : : resolveAttributedType ( TypeAttributes & attrs , <nl> checkUnsupportedAttr ( silOnlyAttr ) ; <nl> } <nl> } <nl> - <nl> - bool hasFunctionAttr = false ; <nl> - for ( auto i : FunctionAttrs ) <nl> - if ( attrs . has ( i ) ) { <nl> - hasFunctionAttr = true ; <nl> - break ; <nl> - } <nl> - <nl> - / / If we have an @ autoclosure then try resolving the top level type repr <nl> - / / first as it may be pointing to a typealias <nl> - if ( attrs . has ( TAK_autoclosure ) ) { <nl> - if ( auto CITR = dyn_cast < ComponentIdentTypeRepr > ( repr ) ) { <nl> - auto typeAliasResolver = TypeResolverContext : : TypeAliasDecl ; <nl> - if ( auto type = resolveTopLevelIdentTypeComponent ( resolution , CITR , <nl> - typeAliasResolver ) ) { <nl> - if ( auto TAT = dyn_cast < TypeAliasType > ( type . getPointer ( ) ) ) { <nl> - repr = TAT - > getDecl ( ) - > getUnderlyingTypeLoc ( ) . getTypeRepr ( ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> + <nl> + bool hasFunctionAttr = <nl> + llvm : : any_of ( FunctionAttrs , [ & attrs ] ( const TypeAttrKind & attr ) { <nl> + return attrs . has ( attr ) ; <nl> + } ) ; <nl> + <nl> / / Function attributes require a syntactic function type . <nl> auto * fnRepr = dyn_cast < FunctionTypeRepr > ( repr ) ; <nl> <nl> - if ( ! fnRepr ) { <nl> - if ( attrs . has ( TAK_autoclosure ) ) { <nl> - diagnose ( attrs . getLoc ( TAK_autoclosure ) , diag : : autoclosure_function_type ) ; <nl> - attrs . clearAttribute ( TAK_autoclosure ) ; <nl> - } <nl> - / / Fall through to diagnose below . <nl> - } else if ( hasFunctionAttr & & ( options & TypeResolutionFlags : : SILType ) ) { <nl> - SILFunctionType : : Representation rep ; <nl> - TypeRepr * witnessMethodProtocol = nullptr ; <nl> + if ( fnRepr & & hasFunctionAttr ) { <nl> + if ( options & TypeResolutionFlags : : SILType ) { <nl> + SILFunctionType : : Representation rep ; <nl> + TypeRepr * witnessMethodProtocol = nullptr ; <nl> <nl> - auto coroutineKind = SILCoroutineKind : : None ; <nl> - if ( attrs . has ( TAK_yield_once ) ) { <nl> - coroutineKind = SILCoroutineKind : : YieldOnce ; <nl> - } else if ( attrs . has ( TAK_yield_many ) ) { <nl> - coroutineKind = SILCoroutineKind : : YieldMany ; <nl> - } <nl> + auto coroutineKind = SILCoroutineKind : : None ; <nl> + if ( attrs . has ( TAK_yield_once ) ) { <nl> + coroutineKind = SILCoroutineKind : : YieldOnce ; <nl> + } else if ( attrs . has ( TAK_yield_many ) ) { <nl> + coroutineKind = SILCoroutineKind : : YieldMany ; <nl> + } <nl> <nl> - auto calleeConvention = ParameterConvention : : Direct_Unowned ; <nl> - if ( attrs . has ( TAK_callee_owned ) ) { <nl> - if ( attrs . has ( TAK_callee_guaranteed ) ) { <nl> - diagnose ( attrs . getLoc ( TAK_callee_owned ) , <nl> - diag : : sil_function_repeat_convention , / * callee * / 2 ) ; <nl> + auto calleeConvention = ParameterConvention : : Direct_Unowned ; <nl> + if ( attrs . has ( TAK_callee_owned ) ) { <nl> + if ( attrs . has ( TAK_callee_guaranteed ) ) { <nl> + diagnose ( attrs . getLoc ( TAK_callee_owned ) , <nl> + diag : : sil_function_repeat_convention , / * callee * / 2 ) ; <nl> + } <nl> + calleeConvention = ParameterConvention : : Direct_Owned ; <nl> + } else if ( attrs . has ( TAK_callee_guaranteed ) ) { <nl> + calleeConvention = ParameterConvention : : Direct_Guaranteed ; <nl> } <nl> - calleeConvention = ParameterConvention : : Direct_Owned ; <nl> - } else if ( attrs . has ( TAK_callee_guaranteed ) ) { <nl> - calleeConvention = ParameterConvention : : Direct_Guaranteed ; <nl> - } <nl> <nl> - if ( ! attrs . hasConvention ( ) ) { <nl> - rep = SILFunctionType : : Representation : : Thick ; <nl> - } else { <nl> - auto convention = attrs . getConvention ( ) ; <nl> - / / SIL exposes a greater number of conventions than Swift source . <nl> - auto parsedRep = <nl> - llvm : : StringSwitch < Optional < SILFunctionType : : Representation > > ( <nl> - convention ) <nl> - . Case ( " thick " , SILFunctionType : : Representation : : Thick ) <nl> - . Case ( " block " , SILFunctionType : : Representation : : Block ) <nl> - . Case ( " thin " , SILFunctionType : : Representation : : Thin ) <nl> - . Case ( " c " , SILFunctionType : : Representation : : CFunctionPointer ) <nl> - . Case ( " method " , SILFunctionType : : Representation : : Method ) <nl> - . Case ( " objc_method " , SILFunctionType : : Representation : : ObjCMethod ) <nl> - . Case ( " witness_method " , <nl> - SILFunctionType : : Representation : : WitnessMethod ) <nl> - . Default ( None ) ; <nl> - if ( ! parsedRep ) { <nl> - diagnose ( attrs . getLoc ( TAK_convention ) , <nl> - diag : : unsupported_sil_convention , attrs . getConvention ( ) ) ; <nl> - rep = SILFunctionType : : Representation : : Thin ; <nl> + if ( ! attrs . hasConvention ( ) ) { <nl> + rep = SILFunctionType : : Representation : : Thick ; <nl> } else { <nl> - rep = * parsedRep ; <nl> - } <nl> + auto convention = attrs . getConvention ( ) ; <nl> + / / SIL exposes a greater number of conventions than Swift source . <nl> + auto parsedRep = <nl> + llvm : : StringSwitch < Optional < SILFunctionType : : Representation > > ( <nl> + convention ) <nl> + . Case ( " thick " , SILFunctionType : : Representation : : Thick ) <nl> + . Case ( " block " , SILFunctionType : : Representation : : Block ) <nl> + . Case ( " thin " , SILFunctionType : : Representation : : Thin ) <nl> + . Case ( " c " , SILFunctionType : : Representation : : CFunctionPointer ) <nl> + . Case ( " method " , SILFunctionType : : Representation : : Method ) <nl> + . Case ( " objc_method " , <nl> + SILFunctionType : : Representation : : ObjCMethod ) <nl> + . Case ( " witness_method " , <nl> + SILFunctionType : : Representation : : WitnessMethod ) <nl> + . Default ( None ) ; <nl> + if ( ! parsedRep ) { <nl> + diagnose ( attrs . getLoc ( TAK_convention ) , <nl> + diag : : unsupported_sil_convention , attrs . getConvention ( ) ) ; <nl> + rep = SILFunctionType : : Representation : : Thin ; <nl> + } else { <nl> + rep = * parsedRep ; <nl> + } <nl> <nl> - if ( rep = = SILFunctionType : : Representation : : WitnessMethod ) { <nl> - auto protocolName = * attrs . conventionWitnessMethodProtocol ; <nl> - witnessMethodProtocol = new ( Context ) SimpleIdentTypeRepr ( <nl> - SourceLoc ( ) , Context . getIdentifier ( protocolName ) ) ; <nl> + if ( rep = = SILFunctionType : : Representation : : WitnessMethod ) { <nl> + auto protocolName = * attrs . conventionWitnessMethodProtocol ; <nl> + witnessMethodProtocol = new ( Context ) SimpleIdentTypeRepr ( <nl> + SourceLoc ( ) , Context . getIdentifier ( protocolName ) ) ; <nl> + } <nl> } <nl> - } <nl> <nl> - / / Resolve the function type directly with these attributes . <nl> - SILFunctionType : : ExtInfo extInfo ( rep , attrs . has ( TAK_pseudogeneric ) , <nl> - attrs . has ( TAK_noescape ) ) ; <nl> + / / Resolve the function type directly with these attributes . <nl> + SILFunctionType : : ExtInfo extInfo ( rep , attrs . has ( TAK_pseudogeneric ) , <nl> + attrs . has ( TAK_noescape ) ) ; <nl> <nl> - ty = resolveSILFunctionType ( fnRepr , options , coroutineKind , <nl> - extInfo , calleeConvention , <nl> - witnessMethodProtocol ) ; <nl> - if ( ! ty | | ty - > hasError ( ) ) return ty ; <nl> - } else if ( hasFunctionAttr ) { <nl> - FunctionType : : Representation rep = FunctionType : : Representation : : Swift ; <nl> - if ( attrs . hasConvention ( ) ) { <nl> - auto parsedRep = <nl> - llvm : : StringSwitch < Optional < FunctionType : : Representation > > <nl> - ( attrs . getConvention ( ) ) <nl> - . Case ( " swift " , FunctionType : : Representation : : Swift ) <nl> - . Case ( " block " , FunctionType : : Representation : : Block ) <nl> - . Case ( " thin " , FunctionType : : Representation : : Thin ) <nl> - . Case ( " c " , FunctionType : : Representation : : CFunctionPointer ) <nl> - . Default ( None ) ; <nl> - if ( ! parsedRep ) { <nl> - diagnose ( attrs . getLoc ( TAK_convention ) , <nl> - diag : : unsupported_convention , attrs . getConvention ( ) ) ; <nl> - rep = FunctionType : : Representation : : Swift ; <nl> - } else { <nl> - rep = * parsedRep ; <nl> + ty = resolveSILFunctionType ( fnRepr , options , coroutineKind , extInfo , <nl> + calleeConvention , witnessMethodProtocol ) ; <nl> + if ( ! ty | | ty - > hasError ( ) ) <nl> + return ty ; <nl> + } else { <nl> + FunctionType : : Representation rep = FunctionType : : Representation : : Swift ; <nl> + if ( attrs . hasConvention ( ) ) { <nl> + auto parsedRep = <nl> + llvm : : StringSwitch < Optional < FunctionType : : Representation > > ( <nl> + attrs . getConvention ( ) ) <nl> + . Case ( " swift " , FunctionType : : Representation : : Swift ) <nl> + . Case ( " block " , FunctionType : : Representation : : Block ) <nl> + . Case ( " thin " , FunctionType : : Representation : : Thin ) <nl> + . Case ( " c " , FunctionType : : Representation : : CFunctionPointer ) <nl> + . Default ( None ) ; <nl> + if ( ! parsedRep ) { <nl> + diagnose ( attrs . getLoc ( TAK_convention ) , diag : : unsupported_convention , <nl> + attrs . getConvention ( ) ) ; <nl> + rep = FunctionType : : Representation : : Swift ; <nl> + } else { <nl> + rep = * parsedRep ; <nl> + } <nl> } <nl> - } <nl> <nl> - / / @ autoclosure is only valid on parameters . <nl> - if ( ! isParam & & attrs . has ( TAK_autoclosure ) ) { <nl> - diagnose ( attrs . getLoc ( TAK_autoclosure ) , <nl> - isVariadicFunctionParam <nl> - ? diag : : attr_not_on_variadic_parameters <nl> - : diag : : attr_only_on_parameters , " @ autoclosure " ) ; <nl> - attrs . clearAttribute ( TAK_autoclosure ) ; <nl> - } <nl> - <nl> - auto * FuncTyInput = fnRepr - > getArgsTypeRepr ( ) ; <nl> - if ( ( ! FuncTyInput | | FuncTyInput - > getNumElements ( ) ! = 0 ) <nl> - & & attrs . has ( TAK_autoclosure ) ) { <nl> - diagnose ( attrs . getLoc ( TAK_autoclosure ) , <nl> - diag : : autoclosure_function_input_nonunit ) ; <nl> - attrs . clearAttribute ( TAK_autoclosure ) ; <nl> - } <nl> + / / @ autoclosure is only valid on parameters . <nl> + if ( ! isParam & & attrs . has ( TAK_autoclosure ) ) { <nl> + bool isVariadicFunctionParam = <nl> + options . is ( TypeResolverContext : : VariadicFunctionInput ) & & <nl> + ! options . hasBase ( TypeResolverContext : : EnumElementDecl ) ; <nl> + <nl> + diagnose ( attrs . getLoc ( TAK_autoclosure ) , <nl> + isVariadicFunctionParam ? diag : : attr_not_on_variadic_parameters <nl> + : diag : : attr_only_on_parameters , <nl> + " @ autoclosure " ) ; <nl> + attrs . clearAttribute ( TAK_autoclosure ) ; <nl> + } <nl> <nl> - / / @ noreturn has been replaced with a ' Never ' return type . <nl> - if ( attrs . has ( TAK_noreturn ) ) { <nl> - auto loc = attrs . getLoc ( TAK_noreturn ) ; <nl> - auto attrRange = getTypeAttrRangeWithAt ( Context , loc ) ; <nl> - auto resultRange = fnRepr - > getResultTypeRepr ( ) - > getSourceRange ( ) ; <nl> + / / @ noreturn has been replaced with a ' Never ' return type . <nl> + if ( attrs . has ( TAK_noreturn ) ) { <nl> + auto loc = attrs . getLoc ( TAK_noreturn ) ; <nl> + auto attrRange = getTypeAttrRangeWithAt ( Context , loc ) ; <nl> + auto resultRange = fnRepr - > getResultTypeRepr ( ) - > getSourceRange ( ) ; <nl> <nl> - diagnose ( loc , diag : : noreturn_not_supported ) <nl> - . fixItRemove ( attrRange ) <nl> - . fixItReplace ( resultRange , " Never " ) ; <nl> - } <nl> + diagnose ( loc , diag : : noreturn_not_supported ) <nl> + . fixItRemove ( attrRange ) <nl> + . fixItReplace ( resultRange , " Never " ) ; <nl> + } <nl> <nl> - / / Resolve the function type directly with these attributes . <nl> - FunctionType : : ExtInfo extInfo ( rep , <nl> - attrs . has ( TAK_noescape ) , <nl> - fnRepr - > throws ( ) ) ; <nl> + / / Resolve the function type directly with these attributes . <nl> + FunctionType : : ExtInfo extInfo ( rep , attrs . has ( TAK_noescape ) , <nl> + fnRepr - > throws ( ) ) ; <nl> <nl> - ty = resolveASTFunctionType ( fnRepr , options , extInfo ) ; <nl> - if ( ! ty | | ty - > hasError ( ) ) return ty ; <nl> + ty = resolveASTFunctionType ( fnRepr , options , extInfo ) ; <nl> + if ( ! ty | | ty - > hasError ( ) ) <nl> + return ty ; <nl> + } <nl> } <nl> <nl> auto instanceOptions = options ; <nl> Type TypeResolver : : resolveAttributedType ( TypeAttributes & attrs , <nl> } <nl> <nl> if ( hasFunctionAttr & & ! fnRepr ) { <nl> - / / @ autoclosure usually auto - implies @ noescape , don ' t complain about both <nl> - / / of them . <nl> - if ( attrs . has ( TAK_autoclosure ) ) <nl> + if ( attrs . has ( TAK_autoclosure ) ) { <nl> + / / @ autoclosure usually auto - implies @ noescape , <nl> + / / don ' t complain about both of them . <nl> attrs . clearAttribute ( TAK_noescape ) ; <nl> + / / @ autoclosure is going to be diagnosed when type of <nl> + / / the parameter is validated , because that attribute <nl> + / / applies to the declaration now . <nl> + attrs . clearAttribute ( TAK_autoclosure ) ; <nl> + } <nl> <nl> for ( auto i : FunctionAttrs ) { <nl> if ( ! attrs . has ( i ) ) <nl> bool TypeResolver : : resolveASTFunctionTypeParams ( <nl> } <nl> <nl> bool autoclosure = false ; <nl> - if ( auto * ATR = dyn_cast < AttributedTypeRepr > ( eltTypeRepr ) ) { <nl> - / / Make sure that parameter itself is of a function type , otherwise <nl> - / / the problem would already be diagnosed by ` resolveAttributedType ` <nl> - / / but attributes would stay unchanged . So as a recovery let ' s drop <nl> - / / ' autoclosure ' attribute from the resolved parameter . <nl> - autoclosure = <nl> - ty - > is < FunctionType > ( ) & & ATR - > getAttrs ( ) . has ( TAK_autoclosure ) ; <nl> - } <nl> + if ( auto * ATR = dyn_cast < AttributedTypeRepr > ( eltTypeRepr ) ) <nl> + autoclosure = ATR - > getAttrs ( ) . has ( TAK_autoclosure ) ; <nl> <nl> ValueOwnership ownership ; <nl> <nl> mmm a / test / attr / attr_autoclosure . swift <nl> ppp b / test / attr / attr_autoclosure . swift <nl> let _ : ( @ autoclosure ( escaping ) ( ) - > ( ) ) - > ( ) <nl> <nl> / / escaping is the name of param type <nl> let _ : ( @ autoclosure ( escaping ) - > ( ) ) - > ( ) / / expected - error { { use of undeclared type ' escaping ' } } <nl> - / / expected - error @ - 1 { { argument type of @ autoclosure parameter must be ' ( ) ' } } <nl> <nl> / / Migration <nl> / / expected - error @ + 1 { { attribute can only be applied to types , not declarations } } <nl> func rdar_47586626 ( ) { <nl> foo ( s ) / / ok <nl> bar ( s ) / / ok <nl> } <nl> + <nl> + protocol P_47586626 { <nl> + typealias F = ( ) - > Int <nl> + typealias G < T > = ( ) - > T <nl> + <nl> + func foo ( _ : @ autoclosure F ) <nl> + func bar < T > ( _ : @ autoclosure G < T > ) <nl> + } <nl>
Merge pull request from xedin / cleanup - autoclosure - diags
apple/swift
0a076199926ac5320318706f73a65c8fe4cdc773
2019-02-06T00:05:44Z
mmm a / CHANGELOG <nl> ppp b / CHANGELOG <nl> <nl> devel <nl> mmm - - <nl> <nl> + * Fixed issue # 9795 . Fixed NOT IN clause in ArangoSearch . <nl> + <nl> * The graph viewer of the web interface now tries to find a vertex document of <nl> all available vertex collections before it aborts . <nl> <nl> mmm a / arangod / IResearch / IResearchFilterFactory . cpp <nl> ppp b / arangod / IResearch / IResearchFilterFactory . cpp <nl> arangodb : : Result fromInArray ( irs : : boolean_filter * filter , QueryContext const & ct <nl> if ( filter ) { <nl> filter = arangodb : : aql : : NODE_TYPE_OPERATOR_BINARY_NIN = = node . type <nl> ? & static_cast < irs : : boolean_filter & > ( <nl> - filter - > add < irs : : Not > ( ) . filter < irs : : And > ( ) ) <nl> + filter - > add < irs : : Not > ( ) . filter < irs : : Or > ( ) ) <nl> : & static_cast < irs : : boolean_filter & > ( filter - > add < irs : : Or > ( ) ) ; <nl> filter - > boost ( filterCtx . boost ) ; <nl> } <nl> arangodb : : Result fromIn ( irs : : boolean_filter * filter , QueryContext const & ctx , <nl> <nl> filter = arangodb : : aql : : NODE_TYPE_OPERATOR_BINARY_NIN = = node . type <nl> ? & static_cast < irs : : boolean_filter & > ( <nl> - filter - > add < irs : : Not > ( ) . filter < irs : : And > ( ) ) <nl> + filter - > add < irs : : Not > ( ) . filter < irs : : Or > ( ) ) <nl> : & static_cast < irs : : boolean_filter & > ( filter - > add < irs : : Or > ( ) ) ; <nl> filter - > boost ( filterCtx . boost ) ; <nl> <nl> mmm a / tests / IResearch / IResearchFilterIn - test . cpp <nl> ppp b / tests / IResearch / IResearchFilterIn - test . cpp <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / simple attribute <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a " ) ) . term ( " 2 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a " ) ) . term ( " 3 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / simple offset <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " [ 1 ] " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " [ 1 ] " ) ) . term ( " 2 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " [ 1 ] " ) ) . term ( " 3 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / complex attribute name <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 2 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 3 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / complex attribute name , offset <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleStringIdentity ( " a . b . c [ 323 ] . e . f " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / complex attribute name , offset <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 1 . 5 ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleStringIdentity ( " a . b . c [ 323 ] . e . f " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / complex attribute name , offset , analyzer <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c [ 323 ] . e . f " , " test_analyzer " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / complex attribute name , offset , analyzer , boost <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 2 . 5 ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c [ 323 ] . e . f " , " test_analyzer " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / heterogeneous array values <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleStringIdentity ( " quick . brown . fox " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / heterogeneous array values , analyzer <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " quick . brown . fox " , " test_analyzer " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / heterogeneous array values , analyzer , boost <nl> { <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 1 . 5 ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " quick . brown . fox " , " test_analyzer " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> arangodb : : aql : : AqlValueHintDouble { 5 . 6 } ) ) ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleStringIdentity ( " a . b . c . e [ 4 ] . f [ 5 ] . g [ 3 ] . g . a " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( " x " , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleNumeric ( " a . b . c . e . f " ) ) . term ( term - > value ( ) ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 3 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( " x " , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c . e . f " , " test_analyzer " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( " x " , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 3 . 5 ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c . e . f " , " test_analyzer " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( var . name , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleNumeric ( " a . b . c . e . f " ) ) . term ( term - > value ( ) ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 3 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( var . name , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c . e . f " , " test_analyzer " ) ) <nl> . term ( " 1 " ) ; <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> ctx . vars . emplace ( var . name , value ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 1 . 5 ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleString ( " a . b . c . e . f " , " test_analyzer " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> EXPECT_TRUE ( expected = = * begin ) ; <nl> } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> EXPECT_TRUE ( expected = = * begin ) ; <nl> } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> EXPECT_TRUE ( expected = = * begin ) ; <nl> } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> - EXPECT_TRUE ( 1 . 5f = = andNode - > boost ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> + EXPECT_TRUE ( 1 . 5f = = orNode - > boost ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> EXPECT_TRUE ( expected = = * begin ) ; <nl> } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> dynamic_cast < arangodb : : iresearch : : ByExpression const * > ( & * begin ) ) ; <nl> } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> EXPECT_TRUE ( 1 = = actual . size ( ) ) ; <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { EXPECT_TRUE ( irs : : empty ( ) = = * begin ) ; } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / 3rd filter <nl> { EXPECT_TRUE ( irs : : all ( ) = = * + + begin ) ; } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & notNode = dynamic_cast < irs : : Not & > ( * actual . begin ( ) ) ; <nl> EXPECT_TRUE ( irs : : Not : : type ( ) = = notNode . type ( ) ) ; <nl> <nl> - auto const * andNode = dynamic_cast < irs : : And const * > ( notNode . filter ( ) ) ; <nl> - EXPECT_TRUE ( andNode ) ; <nl> - EXPECT_TRUE ( irs : : And : : type ( ) = = andNode - > type ( ) ) ; <nl> - EXPECT_TRUE ( 3 = = andNode - > size ( ) ) ; <nl> + auto const * orNode = dynamic_cast < irs : : Or const * > ( notNode . filter ( ) ) ; <nl> + EXPECT_TRUE ( orNode ) ; <nl> + EXPECT_TRUE ( irs : : Or : : type ( ) = = orNode - > type ( ) ) ; <nl> + EXPECT_TRUE ( 3 = = orNode - > size ( ) ) ; <nl> <nl> - auto begin = andNode - > begin ( ) ; <nl> + auto begin = orNode - > begin ( ) ; <nl> <nl> / / 1st filter <nl> { EXPECT_TRUE ( irs : : empty ( ) = = * begin ) ; } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> / / 3rd filter <nl> { EXPECT_TRUE ( irs : : all ( ) = = * + + begin ) ; } <nl> <nl> - EXPECT_TRUE ( andNode - > end ( ) = = + + begin ) ; <nl> + EXPECT_TRUE ( orNode - > end ( ) = = + + begin ) ; <nl> } <nl> } <nl> } <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & term = stream . attributes ( ) . get < irs : : term_attribute > ( ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) <nl> . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) <nl> TEST_F ( IResearchFilterInTest , BinaryNotIn ) { <nl> auto & term = stream . attributes ( ) . get < irs : : term_attribute > ( ) ; <nl> <nl> irs : : Or expected ; <nl> - auto & root = expected . add < irs : : Not > ( ) . filter < irs : : And > ( ) ; <nl> + auto & root = expected . add < irs : : Not > ( ) . filter < irs : : Or > ( ) ; <nl> root . boost ( 2 . 5 ) ; <nl> root . add < irs : : by_term > ( ) . field ( mangleStringIdentity ( " a . b . c . e . f " ) ) . term ( " 1 " ) ; <nl> root . add < irs : : by_term > ( ) <nl> mmm a / tests / IResearch / IResearchQueryIn - test . cpp <nl> ppp b / tests / IResearch / IResearchQueryIn - test . cpp <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 1 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d . value IN [ true ] SORT BM25 ( d ) ASC , " <nl> - " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value IN [ true ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value NOT IN [ true ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test bool via [ ] <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 1 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d [ ' value ' ] IN [ true , false ] SORT BM25 ( d ) " <nl> - " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] IN [ true , false ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] NOT IN [ true , false ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test numeric <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> insertedDocs [ 11 ] . slice ( ) , <nl> insertedDocs [ 13 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d . value IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> - " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value NOT IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test numeric , limit 2 <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> insertedDocs [ 8 ] . slice ( ) , <nl> insertedDocs [ 11 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d . value IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> - " TFIDF ( d ) DESC , d . seq LIMIT 2 RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq LIMIT 2 RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value NOT IN [ 123 , 1234 ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq LIMIT 2 RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + / / this also must not be there ( it is not in expected due to LIMIT clause ) <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( insertedDocs [ 13 ] . slice ( ) , resolved , true ) ) ; <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , 2 ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test numeric via [ ] <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> insertedDocs [ 11 ] . slice ( ) , <nl> insertedDocs [ 13 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d [ ' value ' ] IN [ 123 , 1234 ] SORT BM25 ( d ) " <nl> - " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] IN [ 123 , 1234 ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] NOT IN [ 123 , 1234 ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test null <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 0 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d . value IN [ null ] SORT BM25 ( d ) ASC , " <nl> - " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value IN [ null ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value NOT IN [ null ] SORT BM25 ( d ) ASC , " <nl> + " TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test null via [ ] <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 0 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d [ ' value ' ] IN [ null , null ] SORT BM25 ( d ) " <nl> - " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] IN [ null , null ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] NOT IN [ null , null ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test object <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 2 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d . value IN [ \ " abc \ " , \ " xyz \ " ] SORT BM25 ( d ) " <nl> - " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value IN [ \ " abc \ " , \ " xyz \ " ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d . value NOT IN [ \ " abc \ " , \ " xyz \ " ] SORT BM25 ( d ) " <nl> + " ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / test string via [ ] <nl> TEST_F ( IResearchQueryInTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 2 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH d [ ' value ' ] IN [ \ " abc \ " , \ " xyz \ " ] SORT " <nl> - " BM25 ( d ) ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] IN [ \ " abc \ " , \ " xyz \ " ] SORT " <nl> + " BM25 ( d ) ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH d [ ' value ' ] NOT IN [ \ " abc \ " , \ " xyz \ " ] SORT " <nl> + " BM25 ( d ) ASC , TFIDF ( d ) DESC , d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> } <nl> mmm a / tests / IResearch / IResearchQueryInRange - test . cpp <nl> ppp b / tests / IResearch / IResearchQueryInRange - test . cpp <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 1 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . value , false , true , false , true ) " <nl> - " SORT d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . value , false , true , false , true ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . value , false , true , false , true ) ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . value > = null & & d . value < = null <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 0 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . value , null , null , true , true ) " <nl> - " SORT d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . value , null , null , true , true ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . value , null , null , true , true ) ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . value > null & & d . value < = null <nl> { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> vocbase , <nl> " FOR d IN testView SEARCH IN_RANGE ( d . value , null , null , false , true ) " <nl> " SORT d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . value , null , null , false , true ) ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > = ' A ' & & d . name < = ' A ' <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 6 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' A ' , true , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' A ' , true , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' A ' , ' A ' , true , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > = ' B ' & & d . name < = ' A ' <nl> { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' B ' , ' A ' , true , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' B ' , ' A ' , true , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' B ' , ' A ' , true , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > = ' A ' & & d . name < = ' E ' <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 8 ] . slice ( ) , insertedDocs [ 9 ] . slice ( ) , <nl> insertedDocs [ 10 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , true , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , true , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' A ' , ' E ' , true , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > = ' A ' & & d . name < ' E ' <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 8 ] . slice ( ) , <nl> insertedDocs [ 9 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , true , false ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , true , false ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' A ' , ' E ' , true , false ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > ' A ' & & d . name < = ' E ' <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 9 ] . slice ( ) , <nl> insertedDocs [ 10 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , false , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , false , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' A ' , ' E ' , false , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . name > ' A ' & & d . name < ' E ' <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 8 ] . slice ( ) , <nl> insertedDocs [ 9 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , false , false ) " <nl> - " SORT d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . name , ' A ' , ' E ' , false , false ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . name , ' A ' , ' E ' , false , false ) ) " <nl> + " SORT d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . seq > = 5 & & d . seq < = - 1 <nl> { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . seq , 5 , - 1 , true , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . seq , 5 , - 1 , true , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . seq , 5 , - 1 , true , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . seq > = 1 & & d . seq < = 5 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 9 ] . slice ( ) , insertedDocs [ 10 ] . slice ( ) , <nl> insertedDocs [ 11 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , true , true ) SORT d . seq " <nl> - " RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , true , true ) SORT d . seq " <nl> + " RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . seq , 1 , 5 , true , true ) ) SORT d . seq " <nl> + " RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . seq > - 2 & & d . seq < = 5 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 9 ] . slice ( ) , insertedDocs [ 10 ] . slice ( ) , <nl> insertedDocs [ 11 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . seq , - 2 , 5 , false , true ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . seq , - 2 , 5 , false , true ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . seq , - 2 , 5 , false , true ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . seq > 1 & & d . seq < 5 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 9 ] . slice ( ) , <nl> insertedDocs [ 10 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , false , false ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , false , false ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . seq , 1 , 5 , false , false ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . seq > = 1 & & d . seq < 5 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> insertedDocs [ 9 ] . slice ( ) , <nl> insertedDocs [ 10 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , true , false ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . seq , 1 , 5 , true , false ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . seq , 1 , 5 , true , false ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . value > 3 & & d . value < 4 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 3 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . value , 3 , 4 , false , false ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . value , 3 , 4 , false , false ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . value , 3 , 4 , false , false ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> <nl> / / d . value > - 4 & & d . value < - 3 <nl> TEST_F ( IResearchQueryInRangeTest , test ) { <nl> std : : vector < arangodb : : velocypack : : Slice > expected = { <nl> insertedDocs [ 3 ] . slice ( ) , <nl> } ; <nl> - auto result = arangodb : : tests : : executeQuery ( <nl> - vocbase , <nl> - " FOR d IN testView SEARCH IN_RANGE ( d . value , - 4 , - 3 , false , false ) SORT " <nl> - " d . seq RETURN d " ) ; <nl> - ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> - auto slice = result . data - > slice ( ) ; <nl> - EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> - size_t i = 0 ; <nl> - <nl> - for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> - auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> - <nl> - EXPECT_TRUE ( ( i < expected . size ( ) ) ) ; <nl> - EXPECT_TRUE ( ( 0 = = arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> - resolved , true ) ) ) ; <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH IN_RANGE ( d . value , - 4 , - 3 , false , false ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + EXPECT_LT ( i , expected . size ( ) ) ; <nl> + EXPECT_EQ ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( expected [ i + + ] , <nl> + resolved , true ) ) ; <nl> + } <nl> + EXPECT_EQ ( i , expected . size ( ) ) ; <nl> + } <nl> + / / NOT <nl> + { <nl> + auto result = arangodb : : tests : : executeQuery ( <nl> + vocbase , <nl> + " FOR d IN testView SEARCH NOT ( IN_RANGE ( d . value , - 4 , - 3 , false , false ) ) SORT " <nl> + " d . seq RETURN d " ) ; <nl> + ASSERT_TRUE ( result . result . ok ( ) ) ; <nl> + auto slice = result . data - > slice ( ) ; <nl> + EXPECT_TRUE ( slice . isArray ( ) ) ; <nl> + size_t i = 0 ; <nl> + for ( arangodb : : velocypack : : ArrayIterator itr ( slice ) ; itr . valid ( ) ; + + itr ) { <nl> + auto const resolved = itr . value ( ) . resolveExternals ( ) ; <nl> + for ( const auto & u : expected ) { <nl> + EXPECT_NE ( 0 , arangodb : : basics : : VelocyPackHelper : : compare ( u , resolved , true ) ) ; <nl> + } <nl> + + + i ; <nl> + } <nl> + EXPECT_EQ ( i , ( insertedDocs . size ( ) - expected . size ( ) ) ) ; <nl> } <nl> - <nl> - EXPECT_TRUE ( ( i = = expected . size ( ) ) ) ; <nl> } <nl> } <nl> mmm a / tests / js / common / aql / aql - view - arangosearch - cluster . inc <nl> ppp b / tests / js / common / aql / aql - view - arangosearch - cluster . inc <nl> <nl> } <nl> } , <nl> <nl> - testViewCollectionOptions : function ( ) { <nl> + testViewCollectionOptions : function ( ) { <nl> var result = db . _query ( " FOR doc IN CompoundView SEARCH doc . a = = ' foo ' OPTIONS { waitForSync : true , collections : [ ' UnitTestsCollection ' ] } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 10 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeEqualityFilter : function ( ) { <nl> + testAttributeEqualityFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . a = = ' foo ' OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 10 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testMultipleAttributeEqualityFilter : function ( ) { <nl> + testMultipleAttributeEqualityFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . a = = ' foo ' & & doc . b = = ' bar ' OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 5 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testMultipleAttributeEqualityFilterSortAttribute : function ( ) { <nl> + testMultipleAttributeEqualityFilterSortAttribute : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . a = = ' foo ' & & doc . b = = ' bar ' OPTIONS { waitForSync : true } SORT doc . c RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 5 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testMultipleAttributeEqualityFilterSortAttributeDesc : function ( ) { <nl> + testMultipleAttributeEqualityFilterSortAttributeDesc : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . a = = ' foo ' AND doc . b = = ' bar ' OPTIONS { waitForSync : true } SORT doc . c DESC RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 5 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeLessFilter : function ( ) { <nl> + testAttributeLessFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c < 2 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 8 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeLeqFilter : function ( ) { <nl> + testAttributeLeqFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c < = 2 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 12 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeGeqFilter : function ( ) { <nl> + testAttributeGeqFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c > = 2 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 12 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeGreaterFilter : function ( ) { <nl> + testAttributeGreaterFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c > 2 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 8 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeOpenIntervalFilter : function ( ) { <nl> + testAttributeOpenIntervalFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c > 1 AND doc . c < 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 4 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeClosedIntervalFilter : function ( ) { <nl> + testAttributeClosedIntervalFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c > = 1 AND doc . c < = 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 12 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeIntervalExclusionFilter : function ( ) { <nl> + testAttributeIntervalExclusionFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c < 1 OR doc . c > 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 8 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeNeqFilter : function ( ) { <nl> + testAttributeNeqFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . a ! = ' foo ' OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 18 ) ; / / include documents without attribute ' a ' <nl> <nl> } ) ; <nl> } , <nl> <nl> - testStartsWithFilter : function ( ) { <nl> + testStartsWithFilter : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH STARTS_WITH ( doc . a , ' fo ' ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 10 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testStartsWithFilter2 : function ( ) { <nl> + testStartsWithFilter2 : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH STARTS_WITH ( doc . b , ' ba ' ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 10 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testStartsWithFilterSort : function ( ) { <nl> + testStartsWithFilterSort : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH STARTS_WITH ( doc . b , ' ba ' ) & & doc . c = = 0 OPTIONS { waitForSync : true } SORT doc . b RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 2 ) ; <nl> <nl> assertEqual ( result [ 1 ] . c , 0 ) ; <nl> } , <nl> <nl> - testPhraseFilter : function ( ) { <nl> + testPhraseFilter : function ( ) { <nl> var result0 = db . _query ( " FOR doc IN UnitTestsView SEARCH PHRASE ( doc . text , ' quick brown fox jumps ' , ' text_en ' ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result0 . length , 1 ) ; <nl> <nl> assertEqual ( result3 [ 0 ] . name , ' full ' ) ; <nl> } , <nl> <nl> - testExistsFilter : function ( ) { <nl> + testExistsFilter : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " full " ) ; <nl> expected . add ( " half " ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByAnalyzer : function ( ) { <nl> + testExistsFilterByAnalyzer : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " full " ) ; <nl> expected . add ( " half " ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByIdentityAnalyzer : function ( ) { <nl> + testExistsFilterByIdentityAnalyzer : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH EXISTS ( doc . text , ' analyzer ' ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( 0 , result . length ) ; <nl> } , <nl> <nl> - testExistsFilterByContextAnalyzer : function ( ) { <nl> + testExistsFilterByContextAnalyzer : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " full " ) ; <nl> expected . add ( " half " ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByString : function ( ) { <nl> + testExistsFilterByString : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " full " ) ; <nl> expected . add ( " half " ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByType : function ( ) { <nl> + testExistsFilterByType : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH EXISTS ( doc . text , ' type ' ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByTypeNull : function ( ) { <nl> + testExistsFilterByTypeNull : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " null " ) ; <nl> <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByTypeBool : function ( ) { <nl> + testExistsFilterByTypeBool : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " bool " ) ; <nl> <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testExistsFilterByTypeNumeric : function ( ) { <nl> + testExistsFilterByTypeNumeric : function ( ) { <nl> var expected = new Set ( ) ; <nl> expected . add ( " numeric " ) ; <nl> <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testViewInInnerLoop : function ( ) { <nl> + testViewInInnerLoop : function ( ) { <nl> var expected = new Set ( ) ; / / FIXME is there a better way to compare objects in js ? <nl> expected . add ( JSON . stringify ( { a : " foo " , b : " bar " , c : 0 } ) ) ; <nl> expected . add ( JSON . stringify ( { a : " foo " , b : " baz " , c : 0 } ) ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testViewInInnerLoopMultipleFilters : function ( ) { <nl> + testViewInInnerLoopMultipleFilters : function ( ) { <nl> var expected = new Set ( ) ; / / FIXME is there a better way to compare objects in js ? <nl> expected . add ( JSON . stringify ( { a : " foo " , b : " bar " , c : 0 } ) ) ; <nl> expected . add ( JSON . stringify ( { a : " foo " , b : " baz " , c : 0 } ) ) ; <nl> <nl> assertEqual ( expected . size , 0 ) ; <nl> } , <nl> <nl> - testViewInInnerLoopSortByAttribute : function ( ) { <nl> + testViewInInnerLoopSortByAttribute : function ( ) { <nl> var expected = [ ] ; <nl> expected . push ( { a : " bar " , b : " foo " , c : 1 } ) ; <nl> expected . push ( { a : " baz " , b : " foo " , c : 1 } ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testJoinTwoViewsSortByAttribute : function ( ) { <nl> + testJoinTwoViewsSortByAttribute : function ( ) { <nl> var expected = [ ] ; <nl> expected . push ( { a : " bar " , b : " foo " , c : 1 } ) ; <nl> expected . push ( { a : " baz " , b : " foo " , c : 1 } ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testWithKeywordForViewInGraph : function ( ) { <nl> + testWithKeywordForViewInGraph : function ( ) { <nl> var results = [ ] ; <nl> <nl> results [ 0 ] = db . _query ( <nl> <nl> } ) ; <nl> } , <nl> <nl> - testViewInSubquery : function ( ) { <nl> + testViewInSubquery : function ( ) { <nl> var entitiesData = [ <nl> { <nl> " _key " : " person1 " , <nl> <nl> links . drop ( ) ; <nl> } , <nl> <nl> - testAttributeInRangeOpenInterval : function ( ) { <nl> + testAttributeInRangeOpenInterval : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH IN_RANGE ( doc . c , 1 , 3 , false , false ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 4 ) ; <nl> <nl> } ) ; <nl> } , <nl> <nl> - testAttributeInRangeClosedInterval : function ( ) { <nl> + testAttributeInRangeClosedInterval : function ( ) { <nl> var result = db . _query ( " FOR doc IN UnitTestsView SEARCH IN_RANGE ( doc . c , 1 , 3 , true , true ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> assertEqual ( result . length , 12 ) ; <nl> <nl> assertEqual ( res . a , " foo " ) ; <nl> assertTrue ( res . score > 1 & & res . score < 2 ) ; <nl> } ) ; <nl> - } <nl> + } , <nl> + testAttributeNotInRangeOpenInterval : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH NOT ( IN_RANGE ( doc . c , 1 , 3 , false , false ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 24 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c < = 1 | | res . c > = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeNotInRangeClosedInterval : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH NOT ( IN_RANGE ( doc . c , 1 , 3 , true , true ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 16 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c < 1 | | res . c > 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeInRange : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c IN 1 . . 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 12 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c > = 1 | | res . c < = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeNotInRange : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c NOT IN 1 . . 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 16 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c < 1 | | res . c > 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeInArray : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c IN [ 1 , 3 ] OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> <nl> + assertEqual ( result . length , 8 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = 1 | | res . c = = = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeNotInArray : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c NOT IN [ 1 , 3 ] OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 20 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c ! = = 1 & & res . c ! = = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeInExpression : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 IN FLATTEN ( c ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , db . UnitTestsCollection . toArray ( ) . length ) ; <nl> + } , <nl> + testAttributeNotInExpression : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 NOT IN FLATTEN ( c ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 0 ) ; <nl> + } , <nl> + testAttributeInExpressionNonDet : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 IN NOOPT ( FLATTEN ( c ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , db . UnitTestsCollection . toArray ( ) . length ) ; <nl> + } , <nl> + testAttributeNotInExpressionNonDet : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 NOT IN NOOPT ( FLATTEN ( c ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 0 ) ; <nl> + } <nl> } ; <nl> } <nl> } ( ) ) ; <nl> mmm a / tests / js / common / aql / aql - view - arangosearch - noncluster . js <nl> ppp b / tests / js / common / aql / aql - view - arangosearch - noncluster . js <nl> function iResearchAqlTestSuite ( ) { <nl> <nl> var result = db . _query ( " LET outer = ( FOR out1 IN UnitTestsCollection FILTER out1 . a = = ' foo ' & & out1 . c = = 0 RETURN out1 ) FOR a IN outer FOR d IN UnitTestsView SEARCH d . a = = a . a & & d . c = = a . c & & d . b = = a . b OPTIONS { waitForSync : true } SORT d . b ASC RETURN d " ) . toArray ( ) ; <nl> <nl> - assertEqual ( result . length , expected . length ) ; <nl> + assertEqual ( result . length , expected . length ) ; <nl> var i = 0 ; <nl> result . forEach ( function ( res ) { <nl> var doc = expected [ i + + ] ; <nl> function iResearchAqlTestSuite ( ) { <nl> assertEqual ( res . a , " foo " ) ; <nl> assertTrue ( res . score > 1 & & res . score < 2 ) ; <nl> } ) ; <nl> + } , <nl> + testAttributeInRange : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c IN 1 . . 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 12 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c > = 1 | | res . c < = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeNotInRange : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c NOT IN 1 . . 3 OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 16 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c < 1 | | res . c > 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeInArray : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c IN [ 1 , 3 ] OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 8 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = 1 | | res . c = = = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeNotInArray : function ( ) { <nl> + var result = db . _query ( " FOR doc IN UnitTestsView SEARCH doc . c NOT IN [ 1 , 3 ] OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 20 ) ; <nl> + result . forEach ( function ( res ) { <nl> + assertTrue ( res . c = = = undefined | | res . c ! = = 1 & & res . c ! = = 3 ) ; <nl> + } ) ; <nl> + } , <nl> + testAttributeInExpression : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 IN FLATTEN ( c ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , db . UnitTestsCollection . toArray ( ) . length ) ; <nl> + <nl> + } , <nl> + testAttributeNotInExpression : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 NOT IN FLATTEN ( c ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 0 ) ; <nl> + } , <nl> + testAttributeInExpressionNonDet : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 IN NOOPT ( FLATTEN ( c ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , db . UnitTestsCollection . toArray ( ) . length ) ; <nl> + <nl> + } , <nl> + testAttributeNotInExpressionNonDet : function ( ) { <nl> + var result = db . _query ( " FOR c IN [ [ [ 1 , 3 ] ] ] FOR doc IN UnitTestsView SEARCH 1 NOT IN NOOPT ( FLATTEN ( c ) ) OPTIONS { waitForSync : true } RETURN doc " ) . toArray ( ) ; <nl> + <nl> + assertEqual ( result . length , 0 ) ; <nl> } <nl> <nl> } ; <nl>
Bug fix / issue ( )
arangodb/arangodb
622d7a3edf668b4c7ea8333cf09b79f7e96960b1
2019-08-28T11:43:04Z
mmm a / tensorflow / compiler / mlir / hlo / lib / Dialect / mhlo / transforms / hlo_legalize_to_lhlo . cc <nl> ppp b / tensorflow / compiler / mlir / hlo / lib / Dialect / mhlo / transforms / hlo_legalize_to_lhlo . cc <nl> namespace { <nl> <nl> template < typename T > <nl> using BaseOpConversion = BufferAssignmentOpConversionPattern < T > ; <nl> - using StdReturnOpConverter = <nl> - detail : : BufferAssignmentReturnOpConverter < mlir : : ReturnOp , mlir : : ReturnOp , <nl> - lmhlo : : CopyOp , true > ; <nl> <nl> Value InsertDynamicAllocAndDealloc ( Location loc , Value result , <nl> Value shape_operand , <nl> struct HloToLhloReduceOpConverter : public BaseOpConversion < mhlo : : ReduceOp > { <nl> / / Copy over the operations inside the region . <nl> rewriter . inlineRegionBefore ( op . body ( ) , new_op . body ( ) , new_op . body ( ) . end ( ) ) ; <nl> <nl> - / / Create new block arguments with correct type . <nl> + / / Convert the region signature to memref and add extra result . <nl> auto & entry_block = new_op . body ( ) . front ( ) ; <nl> - int original_arg_count = entry_block . getNumArguments ( ) ; <nl> - for ( int i = 0 ; i < original_arg_count ; + + i ) { <nl> - auto old_arg = entry_block . getArgument ( i ) ; <nl> - auto old_type = old_arg . getType ( ) . cast < TensorType > ( ) ; <nl> + TypeConverter : : SignatureConversion sig_conversion ( <nl> + entry_block . getNumArguments ( ) + 1 ) ; <nl> + for ( auto arg : entry_block . getArguments ( ) ) { <nl> + auto old_type = arg . getType ( ) . cast < TensorType > ( ) ; <nl> auto new_type = <nl> MemRefType : : get ( old_type . getShape ( ) , old_type . getElementType ( ) ) ; <nl> - auto new_arg = entry_block . addArgument ( new_type ) ; <nl> - rewriter . replaceUsesOfBlockArgument ( old_arg , new_arg ) ; <nl> + sig_conversion . addInputs ( arg . getArgNumber ( ) , new_type ) ; <nl> } <nl> - / / Add an argument for the result . <nl> - entry_block . addArgument ( <nl> - entry_block . getArgument ( original_arg_count ) . getType ( ) ) ; <nl> - / / Remove the old arguments . <nl> - for ( int i = original_arg_count - 1 ; i > = 0 ; - - i ) { <nl> - entry_block . eraseArgument ( i ) ; <nl> - } <nl> - / / Insert terminator at the end . <nl> - rewriter . setInsertionPointToEnd ( & entry_block ) ; <nl> - rewriter . create < lmhlo : : TerminatorOp > ( loc ) ; <nl> + auto return_op = cast < mhlo : : ReturnOp > ( entry_block . getTerminator ( ) ) ; <nl> + auto result_type = return_op . results ( ) . front ( ) . getType ( ) . cast < TensorType > ( ) ; <nl> + sig_conversion . addInputs ( { MemRefType : : get ( result_type . getShape ( ) , <nl> + result_type . getElementType ( ) ) } ) ; <nl> + rewriter . applySignatureConversion ( & new_op . body ( ) , sig_conversion ) ; <nl> <nl> rewriter . replaceOp ( op , ArrayRef < Value > ( buffer_args ) . slice ( operands . size ( ) ) ) ; <nl> <nl> struct HloToLhloReduceOpConverter : public BaseOpConversion < mhlo : : ReduceOp > { <nl> } <nl> } ; <nl> <nl> + / / Legalize mhlo . return to a lmhlo . copy and lmhlo . terminator . This functionality <nl> + / / is provided by mlir buffer assignment , so use the pattern from there . <nl> + / / TODO ( DFKI ) : Move this out of detail . <nl> + using HloToLhloReturnOpConverter = detail : : BufferAssignmentReturnOpConverter < <nl> + mhlo : : ReturnOp , lmhlo : : TerminatorOp , lmhlo : : CopyOp , false > ; <nl> + <nl> class HloToLhloTensorLoadOpConverter <nl> : public BaseOpConversion < mlir : : TensorLoadOp > { <nl> public : <nl> class HloToLhloTensorLoadOpConverter <nl> } <nl> } ; <nl> <nl> - / / TODO ( b / 137624192 ) : Rewrite into a copy and elide copy if possible . <nl> class HloToLhloTensorStoreOpConverter <nl> : public BaseOpConversion < mlir : : TensorStoreOp > { <nl> public : <nl> void populateHLOToLHLOConversionPattern ( <nl> HloToLhloOpConverter < mhlo : : SubOp > , <nl> HloToLhloOpConverter < mhlo : : TanhOp > , <nl> HloToLhloReduceOpConverter , <nl> + HloToLhloReturnOpConverter , <nl> HloToLhloTensorLoadOpConverter , <nl> HloToLhloTensorStoreOpConverter <nl> > ( context , bufferAssignment , converter ) ; <nl> mmm a / tensorflow / compiler / mlir / hlo / tests / hlo - legalize - to - lhlo . mlir <nl> ppp b / tensorflow / compiler / mlir / hlo / tests / hlo - legalize - to - lhlo . mlir <nl> func @ conv ( % input : tensor < 3x5x5x3xf32 > , % filter : tensor < 2x2x3x4xf32 > ) - > tensor <nl> } : ( tensor < 2x2x3x4xf32 > , tensor < 3x5x5x3xf32 > ) - > tensor < 3x5x5x4xf32 > <nl> return % out : tensor < 3x5x5x4xf32 > <nl> } <nl> + <nl> + / / mmm - - <nl> + <nl> + / / BOTH - LABEL : func @ reduce <nl> + func @ reduce ( % arg0 : tensor < 1x8xf32 > , % arg1 : tensor < f32 > ) - > tensor < 1xf32 > { <nl> + / / BOTH : % [ [ OUT : . * ] ] = alloc ( ) : memref < 1xf32 > <nl> + / / BOTH : " lmhlo . reduce " ( % { { . + } } , % { { . + } } , % [ [ OUT ] ] ) ( { <nl> + / / BOTH : ^ bb0 ( % [ [ ARG1 : . * ] ] : memref < f32 > , % [ [ ARG2 : . * ] ] : memref < f32 > , <nl> + / / BOTH - SAME : % [ [ ARG3 : . * ] ] : memref < f32 > ) : <nl> + / / BOTH : % [ [ TMP : . * ] ] = alloc ( ) : memref < f32 > <nl> + / / BOTH : " lmhlo . add " ( % [ [ ARG1 ] ] , % [ [ ARG2 ] ] , % [ [ TMP ] ] ) <nl> + / / BOTH : " lmhlo . copy " ( % [ [ TMP ] ] , % [ [ ARG3 ] ] ) <nl> + / / BOTH : " lmhlo . terminator " ( ) : ( ) - > ( ) <nl> + / / BOTH : } ) { dimensions = dense < 1 > : tensor < 1xi64 > } <nl> + / / BOTH - SAME : : ( memref < 1x8xf32 > , memref < f32 > , memref < 1xf32 > ) - > ( ) <nl> + % 0 = " mhlo . reduce " ( % arg0 , % arg1 ) ( { <nl> + ^ bb0 ( % arg2 : tensor < f32 > , % arg3 : tensor < f32 > ) : / / no predecessors <nl> + % 1 = mhlo . add % arg2 , % arg3 : tensor < f32 > <nl> + " mhlo . return " ( % 1 ) : ( tensor < f32 > ) - > ( ) <nl> + } ) { dimensions = dense < 1 > : tensor < 1xi64 > } <nl> + : ( tensor < 1x8xf32 > , tensor < f32 > ) - > tensor < 1xf32 > <nl> + return % 0 : tensor < 1xf32 > <nl> + } <nl> mmm a / tensorflow / compiler / mlir / hlo / tests / lhlo - copy - removal . mlir <nl> ppp b / tensorflow / compiler / mlir / hlo / tests / lhlo - copy - removal . mlir <nl> func @ must_be_removed_second ( % arg0 : memref < 2x2xf32 > , <nl> dealloc % 0 : memref < 2x2xf32 > <nl> " lmhlo . terminator " ( ) : ( ) - > ( ) <nl> } <nl> + <nl> + / / mmm - - <nl> + <nl> + / / CHECK - LABEL : func @ reduce <nl> + func @ reduce ( % arg0 : memref < 1x8xf32 > , % arg1 : memref < f32 > , % arg2 : memref < 1xf32 > ) { <nl> + % 0 = alloc ( ) : memref < 1xf32 > <nl> + " lmhlo . reduce " ( % arg0 , % arg1 , % 0 ) ( { <nl> + / / CHECK : ^ bb0 ( % [ [ ARG0 : . * ] ] : memref < f32 > , % [ [ ARG1 : . * ] ] : memref < f32 > , <nl> + / / CHECK - SAME : % [ [ ARG2 : . * ] ] : memref < f32 > ) <nl> + ^ bb0 ( % arg3 : memref < f32 > , % arg4 : memref < f32 > , % arg5 : memref < f32 > ) : <nl> + % 1 = alloc ( ) : memref < f32 > <nl> + / / CHECK : " lmhlo . add " ( % [ [ ARG0 ] ] , % [ [ ARG1 ] ] , % [ [ ARG2 ] ] ) <nl> + " lmhlo . add " ( % arg3 , % arg4 , % 1 ) <nl> + : ( memref < f32 > , memref < f32 > , memref < f32 > ) - > ( ) <nl> + / / CHECK - NOT ; lmhlo . copy <nl> + " lmhlo . copy " ( % 1 , % arg5 ) : ( memref < f32 > , memref < f32 > ) - > ( ) <nl> + " lmhlo . terminator " ( ) : ( ) - > ( ) <nl> + } ) { dimensions = dense < 1 > : tensor < 1xi64 > } <nl> + : ( memref < 1x8xf32 > , memref < f32 > , memref < 1xf32 > ) - > ( ) <nl> + " lmhlo . copy " ( % 0 , % arg2 ) : ( memref < 1xf32 > , memref < 1xf32 > ) - > ( ) <nl> + return <nl> + } <nl>
Fix mhlo to lmhlo conversion for ReduceOp .
tensorflow/tensorflow
33a4c1a0abaf93656992d11205eabcdb21980bc9
2020-07-16T11:44:00Z
mmm a / src / CMakeLists . txt <nl> ppp b / src / CMakeLists . txt <nl> set ( INTERNAL_HEADERS <nl> $ { SOURCES_DIR } / internal / catch_case_sensitive . hpp <nl> $ { SOURCES_DIR } / internal / catch_clara . hpp <nl> $ { SOURCES_DIR } / internal / catch_commandline . hpp <nl> - $ { SOURCES_DIR } / internal / catch_common . hpp <nl> + $ { SOURCES_DIR } / internal / catch_source_line_info . hpp <nl> $ { SOURCES_DIR } / internal / catch_compiler_capabilities . hpp <nl> $ { SOURCES_DIR } / catch_config . hpp <nl> $ { SOURCES_DIR } / internal / catch_config_uncaught_exceptions . hpp <nl> set ( IMPL_SOURCES <nl> $ { SOURCES_DIR } / matchers / internal / catch_matchers_combined_tu . cpp <nl> $ { SOURCES_DIR } / internal / catch_clara . cpp <nl> $ { SOURCES_DIR } / internal / catch_commandline . cpp <nl> - $ { SOURCES_DIR } / internal / catch_common . cpp <nl> + $ { SOURCES_DIR } / internal / catch_source_line_info . cpp <nl> $ { SOURCES_DIR } / catch_config . cpp <nl> $ { SOURCES_DIR } / internal / catch_console_colour . cpp <nl> $ { SOURCES_DIR } / internal / catch_context . cpp <nl> mmm a / src / catch2 / catch_all . hpp <nl> ppp b / src / catch2 / catch_all . hpp <nl> <nl> # include < catch2 / internal / catch_case_sensitive . hpp > <nl> # include < catch2 / internal / catch_clara . hpp > <nl> # include < catch2 / internal / catch_commandline . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / internal / catch_compiler_capabilities . hpp > <nl> # include < catch2 / internal / catch_config_uncaught_exceptions . hpp > <nl> # include < catch2 / internal / catch_console_colour . hpp > <nl> <nl> # include < catch2 / internal / catch_run_context . hpp > <nl> # include < catch2 / internal / catch_section . hpp > <nl> # include < catch2 / internal / catch_singletons . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_startup_exception_registry . hpp > <nl> # include < catch2 / internal / catch_stream . hpp > <nl> # include < catch2 / internal / catch_stream_end_stop . hpp > <nl> mmm a / src / catch2 / catch_assertion_info . hpp <nl> ppp b / src / catch2 / catch_assertion_info . hpp <nl> <nl> # define CATCH_ASSERTION_INFO_HPP_INCLUDED <nl> <nl> # include < catch2 / internal / catch_result_type . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> <nl> namespace Catch { <nl> mmm a / src / catch2 / catch_assertion_result . hpp <nl> ppp b / src / catch2 / catch_assertion_result . hpp <nl> <nl> # ifndef CATCH_ASSERTION_RESULT_HPP_INCLUDED <nl> # define CATCH_ASSERTION_RESULT_HPP_INCLUDED <nl> <nl> - # include < string > <nl> # include < catch2 / catch_assertion_info . hpp > <nl> # include < catch2 / internal / catch_result_type . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> # include < catch2 / internal / catch_lazy_expr . hpp > <nl> <nl> + # include < string > <nl> + <nl> namespace Catch { <nl> <nl> struct AssertionResultData <nl> mmm a / src / catch2 / catch_section_info . hpp <nl> ppp b / src / catch2 / catch_section_info . hpp <nl> <nl> # ifndef CATCH_SECTION_INFO_HPP_INCLUDED <nl> # define CATCH_SECTION_INFO_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> # include < catch2 / catch_totals . hpp > <nl> <nl> mmm a / src / catch2 / catch_tag_alias . hpp <nl> ppp b / src / catch2 / catch_tag_alias . hpp <nl> <nl> # ifndef CATCH_TAG_ALIAS_HPP_INCLUDED <nl> # define CATCH_TAG_ALIAS_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> <nl> # include < string > <nl> <nl> mmm a / src / catch2 / catch_tag_alias_autoregistrar . hpp <nl> ppp b / src / catch2 / catch_tag_alias_autoregistrar . hpp <nl> <nl> # ifndef CATCH_TAG_ALIAS_AUTOREGISTRAR_HPP_INCLUDED <nl> # define CATCH_TAG_ALIAS_AUTOREGISTRAR_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> <nl> namespace Catch { <nl> <nl> mmm a / src / catch2 / catch_test_case_info . hpp <nl> ppp b / src / catch2 / catch_test_case_info . hpp <nl> <nl> # ifndef CATCH_TEST_CASE_INFO_HPP_INCLUDED <nl> # define CATCH_TEST_CASE_INFO_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_noncopyable . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> # include < catch2 / internal / catch_test_registry . hpp > <nl> mmm a / src / catch2 / generators / catch_generators . hpp <nl> ppp b / src / catch2 / generators / catch_generators . hpp <nl> <nl> # define CATCH_GENERATORS_HPP_INCLUDED <nl> <nl> # include < catch2 / interfaces / catch_interfaces_generatortracker . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> <nl> # include < vector > <nl> mmm a / src / catch2 / interfaces / catch_interfaces_reporter . hpp <nl> ppp b / src / catch2 / interfaces / catch_interfaces_reporter . hpp <nl> <nl> # define CATCH_INTERFACES_REPORTER_HPP_INCLUDED <nl> <nl> # include < catch2 / catch_section_info . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / catch_totals . hpp > <nl> # include < catch2 / catch_assertion_result . hpp > <nl> # include < catch2 / internal / catch_message_info . hpp > <nl> mmm a / src / catch2 / internal / catch_console_colour . hpp <nl> ppp b / src / catch2 / internal / catch_console_colour . hpp <nl> <nl> # ifndef CATCH_CONSOLE_COLOUR_HPP_INCLUDED <nl> # define CATCH_CONSOLE_COLOUR_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < iosfwd > <nl> <nl> namespace Catch { <nl> <nl> mmm a / src / catch2 / internal / catch_enforce . hpp <nl> ppp b / src / catch2 / internal / catch_enforce . hpp <nl> <nl> # ifndef CATCH_ENFORCE_HPP_INCLUDED <nl> # define CATCH_ENFORCE_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_compiler_capabilities . hpp > <nl> # include < catch2 / internal / catch_stream . hpp > <nl> <nl> mmm a / src / catch2 / internal / catch_message_info . hpp <nl> ppp b / src / catch2 / internal / catch_message_info . hpp <nl> <nl> # define CATCH_MESSAGE_INFO_HPP_INCLUDED <nl> <nl> # include < catch2 / internal / catch_result_type . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / interfaces / catch_interfaces_capture . hpp > <nl> <nl> # include < string > <nl> similarity index 95 % <nl> rename from src / catch2 / internal / catch_common . cpp <nl> rename to src / catch2 / internal / catch_source_line_info . cpp <nl> mmm a / src / catch2 / internal / catch_common . cpp <nl> ppp b / src / catch2 / internal / catch_source_line_info . cpp <nl> <nl> / / https : / / www . boost . org / LICENSE_1_0 . txt ) <nl> <nl> / / SPDX - License - Identifier : BSL - 1 . 0 <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> <nl> # include < cstring > <nl> # include < ostream > <nl> similarity index 92 % <nl> rename from src / catch2 / internal / catch_common . hpp <nl> rename to src / catch2 / internal / catch_source_line_info . hpp <nl> mmm a / src / catch2 / internal / catch_common . hpp <nl> ppp b / src / catch2 / internal / catch_source_line_info . hpp <nl> <nl> / / https : / / www . boost . org / LICENSE_1_0 . txt ) <nl> <nl> / / SPDX - License - Identifier : BSL - 1 . 0 <nl> - # ifndef CATCH_COMMON_HPP_INCLUDED <nl> - # define CATCH_COMMON_HPP_INCLUDED <nl> + # ifndef CATCH_SOURCE_LINE_INFO_HPP_INCLUDED <nl> + # define CATCH_SOURCE_LINE_INFO_HPP_INCLUDED <nl> <nl> # include < catch2 / internal / catch_compiler_capabilities . hpp > <nl> <nl> + # include < cstddef > <nl> + # include < iosfwd > <nl> + <nl> # define INTERNAL_CATCH_UNIQUE_NAME_LINE2 ( name , line ) name # # line <nl> # define INTERNAL_CATCH_UNIQUE_NAME_LINE ( name , line ) INTERNAL_CATCH_UNIQUE_NAME_LINE2 ( name , line ) <nl> # ifdef CATCH_CONFIG_COUNTER <nl> <nl> # define INTERNAL_CATCH_UNIQUE_NAME ( name ) INTERNAL_CATCH_UNIQUE_NAME_LINE ( name , __LINE__ ) <nl> # endif <nl> <nl> - # include < iosfwd > <nl> - <nl> / / We need a dummy global operator < < so we can bring it into Catch namespace later <nl> struct Catch_global_namespace_dummy { } ; <nl> std : : ostream & operator < < ( std : : ostream & , Catch_global_namespace_dummy ) ; <nl> namespace Catch { <nl> # define CATCH_INTERNAL_LINEINFO \ <nl> : : Catch : : SourceLineInfo ( __FILE__ , static_cast < std : : size_t > ( __LINE__ ) ) <nl> <nl> - # endif / / CATCH_COMMON_HPP_INCLUDED <nl> + # endif / / CATCH_SOURCE_LINE_INFO_HPP_INCLUDED <nl> mmm a / src / catch2 / internal / catch_stream . cpp <nl> ppp b / src / catch2 / internal / catch_stream . cpp <nl> <nl> / / https : / / www . boost . org / LICENSE_1_0 . txt ) <nl> <nl> / / SPDX - License - Identifier : BSL - 1 . 0 <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / internal / catch_enforce . hpp > <nl> # include < catch2 / internal / catch_stream . hpp > <nl> # include < catch2 / internal / catch_debug_console . hpp > <nl> mmm a / src / catch2 / internal / catch_test_case_tracker . hpp <nl> ppp b / src / catch2 / internal / catch_test_case_tracker . hpp <nl> <nl> # define CATCH_TEST_CASE_TRACKER_HPP_INCLUDED <nl> <nl> # include < catch2 / internal / catch_compiler_capabilities . hpp > <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> <nl> # include < string > <nl> # include < vector > <nl> mmm a / src / catch2 / internal / catch_test_registry . hpp <nl> ppp b / src / catch2 / internal / catch_test_registry . hpp <nl> <nl> # ifndef CATCH_TEST_REGISTRY_HPP_INCLUDED <nl> # define CATCH_TEST_REGISTRY_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> # include < catch2 / internal / catch_noncopyable . hpp > <nl> # include < catch2 / interfaces / catch_interfaces_testcase . hpp > <nl> # include < catch2 / internal / catch_compiler_capabilities . hpp > <nl> mmm a / src / catch2 / internal / catch_xmlwriter . hpp <nl> ppp b / src / catch2 / internal / catch_xmlwriter . hpp <nl> <nl> / / and reports that calls to XmlEncode ' s op < < are ambiguous between <nl> / / the declaration and definition . <nl> / / It also has to be in the header . <nl> - # include < catch2 / internal / catch_common . hpp > <nl> + # include < catch2 / internal / catch_source_line_info . hpp > <nl> <nl> <nl> # include < vector > <nl> mmm a / src / catch2 / matchers / catch_matchers . hpp <nl> ppp b / src / catch2 / matchers / catch_matchers . hpp <nl> <nl> # ifndef CATCH_MATCHERS_HPP_INCLUDED <nl> # define CATCH_MATCHERS_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / matchers / internal / catch_matchers_impl . hpp > <nl> <nl> # include < string > <nl> mmm a / src / catch2 / matchers / catch_matchers_predicate . hpp <nl> ppp b / src / catch2 / matchers / catch_matchers_predicate . hpp <nl> <nl> # ifndef CATCH_MATCHERS_PREDICATE_HPP_INCLUDED <nl> # define CATCH_MATCHERS_PREDICATE_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / matchers / catch_matchers . hpp > <nl> # include < catch2 / internal / catch_meta . hpp > <nl> <nl> mmm a / src / catch2 / matchers / catch_matchers_templated . hpp <nl> ppp b / src / catch2 / matchers / catch_matchers_templated . hpp <nl> <nl> # ifndef CATCH_MATCHERS_TEMPLATED_HPP_INCLUDED <nl> # define CATCH_MATCHERS_TEMPLATED_HPP_INCLUDED <nl> <nl> - # include < catch2 / internal / catch_common . hpp > <nl> # include < catch2 / matchers / catch_matchers . hpp > <nl> # include < catch2 / internal / catch_stringref . hpp > <nl> <nl>
Rename catch_common . hpp - > catch_source_line_info . hpp
catchorg/Catch2
72525a3053f601f00e7d897d66750b1f1e8527c9
2020-11-28T10:18:51Z
mmm a / src / ProtoGen . Test / TestPreprocessing . cs <nl> ppp b / src / ProtoGen . Test / TestPreprocessing . cs <nl> public void TestOneProtoFileWithBufferFile ( ) <nl> a . GetType ( " nunit . simple . Proto . MyMessageList " , true , true ) ; <nl> } <nl> } <nl> + <nl> + [ Test ] <nl> + public void TestProtoFileWithService ( ) <nl> + { <nl> + string test = new StackFrame ( false ) . GetMethod ( ) . Name ; <nl> + Setup ( ) ; <nl> + using ( TempFile source = TempFile . Attach ( test + " . cs " ) ) <nl> + using ( ProtoFile proto = new ProtoFile ( test + " . proto " , <nl> + @ " <nl> + import " " google / protobuf / csharp_options . proto " " ; <nl> + option ( google . protobuf . csharp_file_options ) . service_generator_type = GENERIC ; <nl> + <nl> + package nunit . simple ; <nl> + / / Test a very simple message . <nl> + message MyMessage { <nl> + optional string name = 1 ; <nl> + } <nl> + / / test a very simple service . <nl> + service TestService { <nl> + rpc Execute ( MyMessage ) returns ( MyMessage ) ; <nl> + } " ) ) <nl> + { <nl> + CopyInGoogleProtoFiles ( ) ; <nl> + <nl> + RunProtoGen ( 0 , proto . TempPath , " - ignore_google_protobuf : true " , " - nest_classes = false " ) ; <nl> + Assert . AreEqual ( 1 , Directory . GetFiles ( TempPath , " * . cs " ) . Length ) ; <nl> + <nl> + Assembly a = RunCsc ( 0 , source . TempPath ) ; <nl> + / / assert that the service type is in the expected namespace <nl> + Type t1 = a . GetType ( " nunit . simple . TestService " , true , true ) ; <nl> + Assert . IsTrue ( typeof ( IService ) . IsAssignableFrom ( t1 ) , " Expect an IService " ) ; <nl> + Assert . IsTrue ( t1 . IsAbstract , " Expect abstract class " ) ; <nl> + / / assert that the Stub subclass type is in the expected namespace <nl> + Type t2 = a . GetType ( " nunit . simple . TestService + Stub " , true , true ) ; <nl> + Assert . IsTrue ( t1 . IsAssignableFrom ( t2 ) , " Expect a sub of TestService " ) ; <nl> + Assert . IsFalse ( t2 . IsAbstract , " Expect concrete class " ) ; <nl> + } <nl> + } <nl> + <nl> + [ Test ] <nl> + public void TestProtoFileWithServiceInternal ( ) <nl> + { <nl> + string test = new StackFrame ( false ) . GetMethod ( ) . Name ; <nl> + Setup ( ) ; <nl> + using ( TempFile source = TempFile . Attach ( test + " . cs " ) ) <nl> + using ( ProtoFile proto = new ProtoFile ( test + " . proto " , <nl> + @ " <nl> + import " " google / protobuf / csharp_options . proto " " ; <nl> + option ( google . protobuf . csharp_file_options ) . service_generator_type = GENERIC ; <nl> + <nl> + package nunit . simple ; <nl> + / / Test a very simple message . <nl> + message MyMessage { <nl> + optional string name = 1 ; <nl> + } <nl> + / / test a very simple service . <nl> + service TestService { <nl> + rpc Execute ( MyMessage ) returns ( MyMessage ) ; <nl> + } " ) ) <nl> + { <nl> + CopyInGoogleProtoFiles ( ) ; <nl> + <nl> + RunProtoGen ( 0 , proto . TempPath , " - ignore_google_protobuf : true " , " - nest_classes = false " , " - public_classes = false " ) ; <nl> + Assert . AreEqual ( 1 , Directory . GetFiles ( TempPath , " * . cs " ) . Length ) ; <nl> + <nl> + Assembly a = RunCsc ( 0 , source . TempPath ) ; <nl> + / / assert that the service type is in the expected namespace <nl> + Type t1 = a . GetType ( " nunit . simple . TestService " , true , true ) ; <nl> + Assert . IsTrue ( typeof ( IService ) . IsAssignableFrom ( t1 ) , " Expect an IService " ) ; <nl> + Assert . IsTrue ( t1 . IsAbstract , " Expect abstract class " ) ; <nl> + / / assert that the Stub subclass type is in the expected namespace <nl> + Type t2 = a . GetType ( " nunit . simple . TestService + Stub " , true , true ) ; <nl> + Assert . IsTrue ( t1 . IsAssignableFrom ( t2 ) , " Expect a sub of TestService " ) ; <nl> + Assert . IsFalse ( t2 . IsAbstract , " Expect concrete class " ) ; <nl> + } <nl> + } <nl> + <nl> + private static void CopyInGoogleProtoFiles ( ) <nl> + { <nl> + string google = Path . Combine ( TempPath , " google \ \ protobuf " ) ; <nl> + Directory . CreateDirectory ( google ) ; <nl> + foreach ( string file in Directory . GetFiles ( Path . Combine ( OriginalWorkingDirectory , " google \ \ protobuf " ) ) ) <nl> + { <nl> + File . Copy ( file , Path . Combine ( google , Path . GetFileName ( file ) ) ) ; <nl> + } <nl> + } <nl> } <nl> } <nl> \ No newline at end of file <nl> mmm a / src / ProtoGen / ServiceGenerator . cs <nl> ppp b / src / ProtoGen / ServiceGenerator . cs <nl> public void Generate ( TextGenerator writer ) <nl> DescriptorUtil . GetQualifiedUmbrellaClassName ( Descriptor . File . CSharpOptions ) , <nl> Descriptor . Index ) ; <nl> writer . WriteLine ( " } " ) ; <nl> - writer . WriteLine ( " { 0 } pbd : : ServiceDescriptor DescriptorForType { { " , ClassAccessLevel ) ; <nl> + writer . WriteLine ( " public pbd : : ServiceDescriptor DescriptorForType { " ) ; <nl> writer . WriteLine ( " get { return Descriptor ; } " ) ; <nl> writer . WriteLine ( " } " ) ; <nl> <nl> public void Generate ( TextGenerator writer ) <nl> private void GenerateCallMethod ( TextGenerator writer ) <nl> { <nl> writer . WriteLine ( ) ; <nl> - writer . WriteLine ( " public void CallMethod ( " , ClassAccessLevel ) ; <nl> + writer . WriteLine ( " public void CallMethod ( " ) ; <nl> writer . WriteLine ( " pbd : : MethodDescriptor method , " ) ; <nl> writer . WriteLine ( " pb : : IRpcController controller , " ) ; <nl> writer . WriteLine ( " pb : : IMessage request , " ) ; <nl> private void GenerateStub ( TextGenerator writer ) <nl> foreach ( MethodDescriptor method in Descriptor . Methods ) <nl> { <nl> writer . WriteLine ( ) ; <nl> - writer . WriteLine ( " public override void { 0 } ( " , NameHelpers . UnderscoresToPascalCase ( method . Name ) ) ; <nl> + writer . WriteLine ( " { 0 } override void { 1 } ( " , ClassAccessLevel , <nl> + NameHelpers . UnderscoresToPascalCase ( method . Name ) ) ; <nl> writer . WriteLine ( " pb : : IRpcController controller , " ) ; <nl> writer . WriteLine ( " { 0 } request , " , GetClassName ( method . InputType ) ) ; <nl> writer . WriteLine ( " global : : System . Action < { 0 } > done ) { { " , GetClassName ( method . OutputType ) ) ; <nl>
Merge
protocolbuffers/protobuf
25981d4007eb5df42a25d8847efe805e05126202
2011-08-09T20:44:13Z
mmm a / src / arm / code - stubs - arm . cc <nl> ppp b / src / arm / code - stubs - arm . cc <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> } <nl> <nl> <nl> - static void CallStubInRecordCallTarget ( MacroAssembler * masm , CodeStub * stub ) { <nl> + static void CallStubInRecordCallTarget ( MacroAssembler * masm , CodeStub * stub , <nl> + bool is_super ) { <nl> / / r0 : number of arguments to the construct function <nl> / / r1 : the function to call <nl> / / r2 : feedback vector <nl> / / r3 : slot in feedback vector ( Smi ) <nl> - / / r4 : original constructor <nl> + / / r4 : original constructor ( for IsSuperConstructorCall ) <nl> FrameAndConstantPoolScope scope ( masm , StackFrame : : INTERNAL ) ; <nl> <nl> / / Number - of - arguments register must be smi - tagged to call out . <nl> __ SmiTag ( r0 ) ; <nl> __ Push ( r3 , r2 , r1 , r0 ) ; <nl> - __ Push ( r4 ) ; <nl> + if ( is_super ) { <nl> + __ Push ( r4 ) ; <nl> + } <nl> <nl> __ CallStub ( stub ) ; <nl> <nl> - __ Pop ( r4 ) ; <nl> + if ( is_super ) { <nl> + __ Pop ( r4 ) ; <nl> + } <nl> __ Pop ( r3 , r2 , r1 , r0 ) ; <nl> __ SmiUntag ( r0 ) ; <nl> } <nl> <nl> <nl> - static void GenerateRecordCallTarget ( MacroAssembler * masm ) { <nl> + static void GenerateRecordCallTarget ( MacroAssembler * masm , bool is_super ) { <nl> / / Cache the called function in a feedback vector slot . Cache states <nl> / / are uninitialized , monomorphic ( indicated by a JSFunction ) , and <nl> / / megamorphic . <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm ) { <nl> / / r1 : the function to call <nl> / / r2 : feedback vector <nl> / / r3 : slot in feedback vector ( Smi ) <nl> - / / r4 : original constructor <nl> + / / r4 : original constructor ( for IsSuperConstructorCall ) <nl> Label initialize , done , miss , megamorphic , not_array_function ; <nl> <nl> DCHECK_EQ ( * TypeFeedbackVector : : MegamorphicSentinel ( masm - > isolate ( ) ) , <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm ) { <nl> / / Create an AllocationSite if we don ' t already have it , store it in the <nl> / / slot . <nl> CreateAllocationSiteStub create_stub ( masm - > isolate ( ) ) ; <nl> - CallStubInRecordCallTarget ( masm , & create_stub ) ; <nl> + CallStubInRecordCallTarget ( masm , & create_stub , is_super ) ; <nl> __ b ( & done ) ; <nl> <nl> __ bind ( & not_array_function ) ; <nl> } <nl> <nl> CreateWeakCellStub create_stub ( masm - > isolate ( ) ) ; <nl> - CallStubInRecordCallTarget ( masm , & create_stub ) ; <nl> + CallStubInRecordCallTarget ( masm , & create_stub , is_super ) ; <nl> __ bind ( & done ) ; <nl> } <nl> <nl> void CallConstructStub : : Generate ( MacroAssembler * masm ) { <nl> __ b ( ne , & slow ) ; <nl> <nl> if ( RecordCallTarget ( ) ) { <nl> - GenerateRecordCallTarget ( masm ) ; <nl> + GenerateRecordCallTarget ( masm , IsSuperConstructorCall ( ) ) ; <nl> <nl> __ add ( r5 , r2 , Operand : : PointerOffsetFromSmiKey ( r3 ) ) ; <nl> if ( FLAG_pretenuring_call_new ) { <nl> mmm a / src / arm64 / code - stubs - arm64 . cc <nl> ppp b / src / arm64 / code - stubs - arm64 . cc <nl> void RegExpExecStub : : Generate ( MacroAssembler * masm ) { <nl> static void CallStubInRecordCallTarget ( MacroAssembler * masm , CodeStub * stub , <nl> Register argc , Register function , <nl> Register feedback_vector , Register index , <nl> - Register orig_construct ) { <nl> + Register orig_construct , bool is_super ) { <nl> FrameScope scope ( masm , StackFrame : : INTERNAL ) ; <nl> <nl> / / Number - of - arguments register must be smi - tagged to call out . <nl> __ SmiTag ( argc ) ; <nl> - __ Push ( argc , function , feedback_vector , index , orig_construct ) ; <nl> + if ( is_super ) { <nl> + __ Push ( argc , function , feedback_vector , index , orig_construct ) ; <nl> + } else { <nl> + __ Push ( argc , function , feedback_vector , index ) ; <nl> + } <nl> <nl> DCHECK ( feedback_vector . Is ( x2 ) & & index . Is ( x3 ) ) ; <nl> __ CallStub ( stub ) ; <nl> <nl> - __ Pop ( orig_construct , index , feedback_vector , function , argc ) ; <nl> + if ( is_super ) { <nl> + __ Pop ( orig_construct , index , feedback_vector , function , argc ) ; <nl> + } else { <nl> + __ Pop ( index , feedback_vector , function , argc ) ; <nl> + } <nl> __ SmiUntag ( argc ) ; <nl> } <nl> <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm , Register argc , <nl> Register function , <nl> Register feedback_vector , Register index , <nl> Register orig_construct , Register scratch1 , <nl> - Register scratch2 , Register scratch3 ) { <nl> + Register scratch2 , Register scratch3 , <nl> + bool is_super ) { <nl> ASM_LOCATION ( " GenerateRecordCallTarget " ) ; <nl> DCHECK ( ! AreAliased ( scratch1 , scratch2 , scratch3 , argc , function , <nl> feedback_vector , index , orig_construct ) ) ; <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm , Register argc , <nl> / / function : the function to call <nl> / / feedback_vector : the feedback vector <nl> / / index : slot in feedback vector ( smi ) <nl> - / / orig_construct : original constructor <nl> + / / orig_construct : original constructor ( for IsSuperConstructorCall ) <nl> Label initialize , done , miss , megamorphic , not_array_function ; <nl> <nl> DCHECK_EQ ( * TypeFeedbackVector : : MegamorphicSentinel ( masm - > isolate ( ) ) , <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm , Register argc , <nl> / / slot . <nl> CreateAllocationSiteStub create_stub ( masm - > isolate ( ) ) ; <nl> CallStubInRecordCallTarget ( masm , & create_stub , argc , function , <nl> - feedback_vector , index , orig_construct ) ; <nl> + feedback_vector , index , orig_construct , <nl> + is_super ) ; <nl> __ B ( & done ) ; <nl> <nl> __ Bind ( & not_array_function ) ; <nl> static void GenerateRecordCallTarget ( MacroAssembler * masm , Register argc , <nl> <nl> CreateWeakCellStub create_stub ( masm - > isolate ( ) ) ; <nl> CallStubInRecordCallTarget ( masm , & create_stub , argc , function , <nl> - feedback_vector , index , orig_construct ) ; <nl> + feedback_vector , index , orig_construct , is_super ) ; <nl> __ Bind ( & done ) ; <nl> } <nl> <nl> void CallConstructStub : : Generate ( MacroAssembler * masm ) { <nl> & slow ) ; <nl> <nl> if ( RecordCallTarget ( ) ) { <nl> - GenerateRecordCallTarget ( masm , x0 , function , x2 , x3 , x4 , x5 , x11 , x12 ) ; <nl> + GenerateRecordCallTarget ( masm , x0 , function , x2 , x3 , x4 , x5 , x11 , x12 , <nl> + IsSuperConstructorCall ( ) ) ; <nl> <nl> __ Add ( x5 , x2 , Operand : : UntagSmiAndScale ( x3 , kPointerSizeLog2 ) ) ; <nl> if ( FLAG_pretenuring_call_new ) { <nl>
[ arm ] Fix pushing of stale register in CallConstructStub .
v8/v8
85d3b16386849be735dd5c189eeec9c9f70204ac
2015-07-22T08:49:59Z
mmm a / src / btree / get_distribution . cc <nl> ppp b / src / btree / get_distribution . cc <nl> class get_distribution_traversal_helper_t : public btree_traversal_helper_t , pub <nl> void postprocess_internal_node ( buf_lock_t * internal_node_buf ) { <nl> const internal_node_t * node = reinterpret_cast < const internal_node_t * > ( internal_node_buf - > get_data_read ( ) ) ; <nl> <nl> - for ( int i = 0 ; i < node - > npairs ; i + + ) { <nl> + / * Notice , we iterate all but the last pair because the last pair <nl> + * doesn ' t actually have a key and we ' re looking for the split points . <nl> + * * / <nl> + for ( int i = 0 ; i < ( node - > npairs - 1 ) ; i + + ) { <nl> const btree_internal_pair * pair = internal_node : : get_pair_by_index ( node , i ) ; <nl> keys - > push_back ( store_key_t ( pair - > key . size , pair - > key . contents ) ) ; <nl> } <nl> mmm a / src / containers / intrusive_list . hpp <nl> ppp b / src / containers / intrusive_list . hpp <nl> class intrusive_list_t { <nl> public : <nl> intrusive_list_t ( ) : _head ( NULL ) , _tail ( NULL ) , _size ( 0 ) { } <nl> ~ intrusive_list_t ( ) { <nl> - rassert ( empty ( ) ) ; <nl> + / / rassert ( empty ( ) ) ; <nl> } <nl> <nl> bool empty ( ) { <nl> mmm a / src / memcached / btree / distribution . cc <nl> ppp b / src / memcached / btree / distribution . cc <nl> distribution_result_t memcached_distribution_get ( btree_slice_t * slice , int max_d <nl> get_btree_key_distribution ( slice , txn . get ( ) , superblock , max_depth , & key_count_out , & key_splits ) ; <nl> <nl> distribution_result_t res ; <nl> - debugf ( " key_count = % d , splits size : % lu \ n " , key_count_out , key_splits . size ( ) ) ; <nl> int keys_per_bucket = std : : max ( key_count_out / key_splits . size ( ) , 1ul ) ; <nl> res . key_counts [ key_to_str ( left_key ) ] = keys_per_bucket ; <nl> <nl> mmm a / src / memcached / protocol . cc <nl> ppp b / src / memcached / protocol . cc <nl> <nl> # include < errors . hpp > <nl> # include < boost / variant . hpp > <nl> + # include < boost / bind . hpp > <nl> <nl> # include " btree / operations . hpp " <nl> # include " btree / slice . hpp " <nl> <nl> # include " stl_utils . hpp " <nl> # include " serializer / config . hpp " <nl> <nl> + # include " btree / keys . hpp " <nl> <nl> write_message_t & operator < < ( write_message_t & msg , const intrusive_ptr_t < data_buffer_t > & buf ) { <nl> if ( buf ) { <nl> int deserialize ( read_stream_t * s , rget_result_t * iter ) { <nl> <nl> RDB_IMPL_SERIALIZABLE_1 ( get_query_t , key ) ; <nl> RDB_IMPL_SERIALIZABLE_4 ( rget_query_t , left_mode , left_key , right_mode , right_key ) ; <nl> - RDB_IMPL_SERIALIZABLE_1 ( distribution_get_query_t , max_depth ) ; <nl> + RDB_IMPL_SERIALIZABLE_2 ( distribution_get_query_t , max_depth , range ) ; <nl> RDB_IMPL_SERIALIZABLE_3 ( get_result_t , value , flags , cas ) ; <nl> RDB_IMPL_SERIALIZABLE_3 ( key_with_data_buffer_t , key , mcflags , value_provider ) ; <nl> RDB_IMPL_SERIALIZABLE_1 ( distribution_result_t , key_counts ) ; <nl> struct read_get_region_visitor_t : public boost : : static_visitor < key_range_t > { <nl> rget . right_key <nl> ) ; <nl> } <nl> - key_range_t operator ( ) ( distribution_get_query_t ) { <nl> - return key_range_t : : universe ( ) ; <nl> + key_range_t operator ( ) ( distribution_get_query_t dst_get ) { <nl> + return dst_get . range ; <nl> } <nl> } ; <nl> <nl> struct read_shard_visitor_t : public boost : : static_visitor < memcached_protocol_t : <nl> return memcached_protocol_t : : read_t ( sub_rget , effective_time ) ; <nl> } <nl> memcached_protocol_t : : read_t operator ( ) ( distribution_get_query_t distribution_get ) { <nl> + distribution_get . range = region ; <nl> return memcached_protocol_t : : read_t ( distribution_get , effective_time ) ; <nl> } <nl> } ; <nl> struct read_visitor_t : public boost : : static_visitor < memcached_protocol_t : : read_ <nl> memcached_rget_slice ( btree , rget . left_mode , rget . left_key , rget . right_mode , rget . right_key , effective_time , txn , superblock ) ) ; <nl> } <nl> memcached_protocol_t : : read_response_t operator ( ) ( const distribution_get_query_t & dget ) { <nl> - return memcached_protocol_t : : read_response_t ( <nl> - memcached_distribution_get ( btree , dget . max_depth , dget . left_bound , effective_time , txn , superblock ) ) ; <nl> + distribution_result_t dstr = memcached_distribution_get ( btree , dget . max_depth , dget . range . left , effective_time , txn , superblock ) ; <nl> + <nl> + for ( std : : map < std : : string , int > : : iterator it = dstr . key_counts . begin ( ) ; <nl> + it ! = dstr . key_counts . end ( ) ; <nl> + / * increments done in loop * / ) { <nl> + if ( ! dget . range . contains_key ( store_key_t ( it - > first ) ) ) { <nl> + dstr . key_counts . erase ( it + + ) ; <nl> + } else { <nl> + + + it ; <nl> + } <nl> + } <nl> + <nl> + return memcached_protocol_t : : read_response_t ( dstr ) ; <nl> } <nl> <nl> <nl> mmm a / src / memcached / queries . hpp <nl> ppp b / src / memcached / queries . hpp <nl> typedef boost : : shared_ptr < one_way_iterator_t < key_with_data_buffer_t > > rget_resu <nl> <nl> / * ` distribution_get ` * / <nl> struct distribution_get_query_t { <nl> - distribution_get_query_t ( ) : max_depth ( 0 ) { } <nl> - explicit distribution_get_query_t ( int _max_depth ) : max_depth ( _max_depth ) { } <nl> + distribution_get_query_t ( ) <nl> + : max_depth ( 0 ) , range ( key_range_t : : universe ( ) ) <nl> + { } <nl> + explicit distribution_get_query_t ( int _max_depth ) <nl> + : max_depth ( _max_depth ) , range ( key_range_t : : universe ( ) ) <nl> + { } <nl> <nl> int max_depth ; <nl> - store_key_t left_bound ; <nl> + key_range_t range ; <nl> } ; <nl> <nl> struct distribution_result_t { <nl>
Fix a number of bugs with distribution queries .
rethinkdb/rethinkdb
a6e5db37da717f761554cb574f15c3988526302a
2012-04-26T22:32:46Z
mmm a / fdbserver / DataDistribution . actor . cpp <nl> ppp b / fdbserver / DataDistribution . actor . cpp <nl> struct TCServerInfo : public ReferenceCounted < TCServerInfo > { <nl> Promise < Void > wakeUpTracker ; <nl> bool inDesiredDC ; <nl> LocalityEntry localityEntry ; <nl> + Promise < Void > updated ; <nl> <nl> TCServerInfo ( StorageServerInterface ssi , ProcessClass processClass , bool inDesiredDC , Reference < LocalitySet > storageServerSet ) : id ( ssi . id ( ) ) , lastKnownInterface ( ssi ) , lastKnownClass ( processClass ) , dataInFlightToServer ( 0 ) , onInterfaceChanged ( interfaceChanged . getFuture ( ) ) , onRemoved ( removed . getFuture ( ) ) , inDesiredDC ( inDesiredDC ) { <nl> localityEntry = ( ( LocalityMap < UID > * ) storageServerSet . getPtr ( ) ) - > add ( ssi . locality , & id ) ; <nl> ACTOR Future < Void > updateServerMetrics ( TCServerInfo * server ) { <nl> when ( ErrorOr < GetPhysicalMetricsReply > rep = wait ( metricsRequest ) ) { <nl> if ( rep . present ( ) ) { <nl> server - > serverMetrics = rep ; <nl> + if ( server - > updated . canBeSet ( ) ) { <nl> + server - > updated . send ( Void ( ) ) ; <nl> + } <nl> return Void ( ) ; <nl> } <nl> metricsRequest = Never ( ) ; <nl> ACTOR Future < Void > storageServerTracker ( <nl> changes . get ( ) . send ( std : : make_pair ( server - > id , Optional < StorageServerInterface > ( ) ) ) ; <nl> } <nl> <nl> + if ( server - > updated . canBeSet ( ) ) { <nl> + server - > updated . send ( Void ( ) ) ; <nl> + } <nl> + <nl> / / Remove server from FF / serverList <nl> Void _ = wait ( removeStorageServer ( cx , server - > id , lock ) ) ; <nl> <nl> ACTOR Future < Void > storageRecruiter ( DDTeamCollection * self , Reference < AsyncVar < <nl> } <nl> <nl> ACTOR Future < Void > updateReplicasKey ( DDTeamCollection * self , Optional < Key > dcId ) { <nl> - Void _ = wait ( self - > initialFailureReactionDelay ) ; <nl> + std : : vector < Future < Void > > serverUpdates ; <nl> + <nl> + for ( auto & it : self - > server_info ) { <nl> + serverUpdates . push_back ( it . second - > updated . getFuture ( ) ) ; <nl> + } <nl> + <nl> + Void _ = wait ( self - > initialFailureReactionDelay & & waitForAll ( serverUpdates ) ) ; <nl> loop { <nl> while ( self - > zeroHealthyTeams - > get ( ) | | self - > processingUnhealthy - > get ( ) ) { <nl> TraceEvent ( " DDUpdatingStalled " , self - > masterId ) . detail ( " DcId " , printable ( dcId ) ) . detail ( " ZeroHealthy " , self - > zeroHealthyTeams - > get ( ) ) . detail ( " ProcessingUnhealthy " , self - > processingUnhealthy - > get ( ) ) ; <nl> ACTOR Future < Void > dataDistributionTeamCollection ( <nl> Void _ = wait ( self - > readyToStart | | error ) ; <nl> TraceEvent ( " DDTeamCollectionReadyToStart " , self - > masterId ) . detail ( " Primary " , self - > primary ) ; <nl> <nl> - self - > addActor . send ( storageRecruiter ( self , db ) ) ; <nl> - self - > addActor . send ( monitorStorageServerRecruitment ( self ) ) ; <nl> - self - > addActor . send ( waitServerListChange ( self , serverRemoved . getFuture ( ) ) ) ; <nl> - self - > addActor . send ( trackExcludedServers ( self ) ) ; <nl> - <nl> if ( self - > badTeamRemover . isReady ( ) ) { <nl> self - > badTeamRemover = removeBadTeams ( self ) ; <nl> self - > addActor . send ( self - > badTeamRemover ) ; <nl> } <nl> <nl> if ( self - > includedDCs . size ( ) ) { <nl> + / / start this actor before any potential recruitments can happen <nl> self - > addActor . send ( updateReplicasKey ( self , self - > includedDCs [ 0 ] ) ) ; <nl> } <nl> + <nl> + self - > addActor . send ( storageRecruiter ( self , db ) ) ; <nl> + self - > addActor . send ( monitorStorageServerRecruitment ( self ) ) ; <nl> + self - > addActor . send ( waitServerListChange ( self , serverRemoved . getFuture ( ) ) ) ; <nl> + self - > addActor . send ( trackExcludedServers ( self ) ) ; <nl> + <nl> / / SOMEDAY : Monitor FF / serverList for ( new ) servers that aren ' t in allServers and add or remove them <nl> <nl> loop choose { <nl>
fix : we do not know a region is fully replicated until all the initial storage servers have either been heard from or have been removed
apple/foundationdb
26c49f21be370f2c10d276c3fdc154adf4db461c
2018-11-13T01:39:40Z
mmm a / test / interface / log . py <nl> ppp b / test / interface / log . py <nl> <nl> log_1 = list ( r . db ( " rethinkdb " ) . table ( " logs " ) . order_by ( " timestamp " ) . run ( conn ) ) <nl> with open ( server . logfile_path , " a " ) as log_file : <nl> log_file . write ( " \ n " ) <nl> + log_file . flush ( ) <nl> log_2 = list ( r . db ( " rethinkdb " ) . table ( " logs " ) . order_by ( " timestamp " ) . run ( conn ) ) <nl> assert log_1 = = log_2 , ( log_1 , log_2 ) <nl> log_3 = list ( r . db ( " rethinkdb " ) . table ( " logs " ) . order_by ( " timestamp " ) . run ( conn ) ) <nl> log_4 = list ( r . db ( " rethinkdb " ) . table ( " logs " ) . order_by ( " timestamp " ) . run ( conn ) ) <nl> assert len ( log_3 ) > 0 <nl> - assert log_2 = = log_3 [ : - 1 ] , ( log_2 , log_3 ) <nl> + assert log_2 = = log_3 [ : - 1 ] , pprint . pformat ( { ' log_2 ' : log_2 , ' log_3 ' : log_3 } ) <nl> assert log_3 [ - 1 ] [ " level " ] = = " error " , log_3 [ - 1 ] [ " level " ] <nl> assert log_3 [ - 1 ] [ " message " ] = = " Failed to parse one or more lines from the log file , the contents of the ` logs ` system table will be incomplete . The following parse error occurred : cannot parse log message ( 2 ) while parsing \ " \ " " , log_3 [ - 1 ] [ " message " ] <nl> assert log_3 = = log_4 , ( log_3 , log_4 ) <nl>
flushing to make interface . log more reliable
rethinkdb/rethinkdb
5880e6aaebe151c0b222f06d92be28d33fe72e47
2015-12-07T23:02:29Z
mmm a / HISTORY . md <nl> ppp b / HISTORY . md <nl> <nl> * Add a new table property , " rocksdb . num . range - deletions " , which counts the number of range deletion tombstones in the table . <nl> * Improve the performance of iterators doing long range scans by using readahead , when using direct IO . <nl> * pin_top_level_index_and_filter ( default true ) in BlockBasedTableOptions can be used in combination with cache_index_and_filter_blocks to prefetch and pin the top - level index of partitioned index and filter blocks in cache . It has no impact when cache_index_and_filter_blocks is false . <nl> - * Avoid memcpy when reading mmap files with OpenReadOnly and max_open_files = = - 1 <nl> + * Avoid memcpy when reading mmap files with OpenReadOnly and max_open_files = = - 1 . <nl> + * Support dynamically changing ` ColumnFamilyOptions : : ttl ` via ` SetOptions ( ) ` . <nl> <nl> # # # Bug Fixes <nl> * fix deadlock with enable_pipelined_write = true and max_successive_merges > 0 <nl> mmm a / db / db_compaction_test . cc <nl> ppp b / db / db_compaction_test . cc <nl> TEST_F ( DBCompactionTest , LevelCompactExpiredTtlFiles ) { <nl> } <nl> Flush ( ) ; <nl> } <nl> - Flush ( ) ; <nl> dbfull ( ) - > TEST_WaitForCompact ( ) ; <nl> MoveFilesToLevel ( 3 ) ; <nl> ASSERT_EQ ( " 0 , 0 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> <nl> + / / Delete previously written keys . <nl> for ( int i = 0 ; i < kNumLevelFiles ; + + i ) { <nl> for ( int j = 0 ; j < kNumKeysPerFile ; + + j ) { <nl> - / / Overwrite previous keys with smaller , but predictable , values . <nl> ASSERT_OK ( Delete ( Key ( i * kNumKeysPerFile + j ) ) ) ; <nl> } <nl> Flush ( ) ; <nl> TEST_F ( DBCompactionTest , LevelCompactExpiredTtlFiles ) { <nl> env_ - > addon_time_ . fetch_add ( 36 * 60 * 60 ) ; / / 36 hours <nl> ASSERT_EQ ( " 0 , 2 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> <nl> - / / Just do a siimple write + flush so that the Ttl expired files get <nl> + / / Just do a simple write + flush so that the Ttl expired files get <nl> / / compacted . <nl> ASSERT_OK ( Put ( " a " , " 1 " ) ) ; <nl> Flush ( ) ; <nl> TEST_F ( DBCompactionTest , LevelCompactExpiredTtlFiles ) { <nl> / / All non - L0 files are deleted , as they contained only deleted data . <nl> ASSERT_EQ ( " 1 " , FilesPerLevel ( ) ) ; <nl> rocksdb : : SyncPoint : : GetInstance ( ) - > DisableProcessing ( ) ; <nl> + <nl> + / / Test dynamically changing ttl . <nl> + <nl> + env_ - > addon_time_ . store ( 0 ) ; <nl> + DestroyAndReopen ( options ) ; <nl> + <nl> + for ( int i = 0 ; i < kNumLevelFiles ; + + i ) { <nl> + for ( int j = 0 ; j < kNumKeysPerFile ; + + j ) { <nl> + ASSERT_OK ( <nl> + Put ( Key ( i * kNumKeysPerFile + j ) , RandomString ( & rnd , kValueSize ) ) ) ; <nl> + } <nl> + Flush ( ) ; <nl> + } <nl> + dbfull ( ) - > TEST_WaitForCompact ( ) ; <nl> + MoveFilesToLevel ( 3 ) ; <nl> + ASSERT_EQ ( " 0 , 0 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> + <nl> + / / Delete previously written keys . <nl> + for ( int i = 0 ; i < kNumLevelFiles ; + + i ) { <nl> + for ( int j = 0 ; j < kNumKeysPerFile ; + + j ) { <nl> + ASSERT_OK ( Delete ( Key ( i * kNumKeysPerFile + j ) ) ) ; <nl> + } <nl> + Flush ( ) ; <nl> + } <nl> + dbfull ( ) - > TEST_WaitForCompact ( ) ; <nl> + ASSERT_EQ ( " 2 , 0 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> + MoveFilesToLevel ( 1 ) ; <nl> + ASSERT_EQ ( " 0 , 2 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> + <nl> + / / Move time forward by 12 hours , and make sure that compaction still doesn ' t <nl> + / / trigger as ttl is set to 24 hours . <nl> + env_ - > addon_time_ . fetch_add ( 12 * 60 * 60 ) ; <nl> + ASSERT_OK ( Put ( " a " , " 1 " ) ) ; <nl> + Flush ( ) ; <nl> + dbfull ( ) - > TEST_WaitForCompact ( ) ; <nl> + ASSERT_EQ ( " 1 , 2 , 0 , 2 " , FilesPerLevel ( ) ) ; <nl> + <nl> + rocksdb : : SyncPoint : : GetInstance ( ) - > SetCallBack ( <nl> + " LevelCompactionPicker : : PickCompaction : Return " , [ & ] ( void * arg ) { <nl> + Compaction * compaction = reinterpret_cast < Compaction * > ( arg ) ; <nl> + ASSERT_TRUE ( compaction - > compaction_reason ( ) = = CompactionReason : : kTtl ) ; <nl> + } ) ; <nl> + rocksdb : : SyncPoint : : GetInstance ( ) - > EnableProcessing ( ) ; <nl> + <nl> + / / Dynamically change ttl to 10 hours . <nl> + / / This should trigger a ttl compaction , as 12 hours have already passed . <nl> + ASSERT_OK ( dbfull ( ) - > SetOptions ( { { " ttl " , " 36000 " } } ) ) ; <nl> + dbfull ( ) - > TEST_WaitForCompact ( ) ; <nl> + / / All non - L0 files are deleted , as they contained only deleted data . <nl> + ASSERT_EQ ( " 1 " , FilesPerLevel ( ) ) ; <nl> + rocksdb : : SyncPoint : : GetInstance ( ) - > DisableProcessing ( ) ; <nl> } <nl> <nl> TEST_F ( DBCompactionTest , CompactRangeDelayedByL0FileCount ) { <nl> mmm a / db / version_set . cc <nl> ppp b / db / version_set . cc <nl> void VersionStorageInfo : : ComputeCompactionScore ( <nl> } <nl> ComputeFilesMarkedForCompaction ( ) ; <nl> ComputeBottommostFilesMarkedForCompaction ( ) ; <nl> - if ( immutable_cf_options . ttl > 0 ) { <nl> - ComputeExpiredTtlFiles ( immutable_cf_options ) ; <nl> + if ( mutable_cf_options . ttl > 0 ) { <nl> + ComputeExpiredTtlFiles ( immutable_cf_options , mutable_cf_options . ttl ) ; <nl> } <nl> EstimateCompactionBytesNeeded ( mutable_cf_options ) ; <nl> } <nl> void VersionStorageInfo : : ComputeFilesMarkedForCompaction ( ) { <nl> } <nl> <nl> void VersionStorageInfo : : ComputeExpiredTtlFiles ( <nl> - const ImmutableCFOptions & ioptions ) { <nl> - assert ( ioptions . ttl > 0 ) ; <nl> + const ImmutableCFOptions & ioptions , const uint64_t ttl ) { <nl> + assert ( ttl > 0 ) ; <nl> <nl> expired_ttl_files_ . clear ( ) ; <nl> <nl> void VersionStorageInfo : : ComputeExpiredTtlFiles ( <nl> f - > fd . table_reader - > GetTableProperties ( ) ! = nullptr ) { <nl> auto creation_time = <nl> f - > fd . table_reader - > GetTableProperties ( ) - > creation_time ; <nl> - if ( creation_time > 0 & & <nl> - creation_time < ( current_time - ioptions . ttl ) ) { <nl> + if ( creation_time > 0 & & creation_time < ( current_time - ttl ) ) { <nl> expired_ttl_files_ . emplace_back ( level , f ) ; <nl> } <nl> } <nl> mmm a / db / version_set . h <nl> ppp b / db / version_set . h <nl> class VersionStorageInfo { <nl> <nl> / / This computes ttl_expired_files_ and is called by <nl> / / ComputeCompactionScore ( ) <nl> - void ComputeExpiredTtlFiles ( const ImmutableCFOptions & ioptions ) ; <nl> + void ComputeExpiredTtlFiles ( const ImmutableCFOptions & ioptions , <nl> + const uint64_t ttl ) ; <nl> <nl> / / This computes bottommost_files_marked_for_compaction_ and is called by <nl> / / ComputeCompactionScore ( ) or UpdateOldestSnapshot ( ) . <nl> mmm a / include / rocksdb / advanced_options . h <nl> ppp b / include / rocksdb / advanced_options . h <nl> struct AdvancedColumnFamilyOptions { <nl> / / Enabled only for level compaction for now . <nl> / / <nl> / / Default : 0 ( disabled ) <nl> + / / <nl> + / / Dynamically changeable through SetOptions ( ) API <nl> uint64_t ttl = 0 ; <nl> <nl> / / Create ColumnFamilyOptions with default values for all fields <nl> mmm a / options / cf_options . cc <nl> ppp b / options / cf_options . cc <nl> ImmutableCFOptions : : ImmutableCFOptions ( const ImmutableDBOptions & db_options , <nl> max_subcompactions ( db_options . max_subcompactions ) , <nl> memtable_insert_with_hint_prefix_extractor ( <nl> cf_options . memtable_insert_with_hint_prefix_extractor . get ( ) ) , <nl> - ttl ( cf_options . ttl ) , <nl> cf_paths ( cf_options . cf_paths ) { } <nl> <nl> / / Multiple two operands . If they overflow , return op1 . <nl> void MutableCFOptions : : Dump ( Logger * log ) const { <nl> max_bytes_for_level_base ) ; <nl> ROCKS_LOG_INFO ( log , " max_bytes_for_level_multiplier : % f " , <nl> max_bytes_for_level_multiplier ) ; <nl> + ROCKS_LOG_INFO ( log , " ttl : % " PRIu64 , <nl> + ttl ) ; <nl> std : : string result ; <nl> char buf [ 10 ] ; <nl> for ( const auto m : max_bytes_for_level_multiplier_additional ) { <nl> mmm a / options / cf_options . h <nl> ppp b / options / cf_options . h <nl> struct ImmutableCFOptions { <nl> <nl> const SliceTransform * memtable_insert_with_hint_prefix_extractor ; <nl> <nl> - uint64_t ttl ; <nl> - <nl> std : : vector < DbPath > cf_paths ; <nl> } ; <nl> <nl> struct MutableCFOptions { <nl> target_file_size_multiplier ( options . target_file_size_multiplier ) , <nl> max_bytes_for_level_base ( options . max_bytes_for_level_base ) , <nl> max_bytes_for_level_multiplier ( options . max_bytes_for_level_multiplier ) , <nl> + ttl ( options . ttl ) , <nl> max_bytes_for_level_multiplier_additional ( <nl> options . max_bytes_for_level_multiplier_additional ) , <nl> compaction_options_fifo ( options . compaction_options_fifo ) , <nl> struct MutableCFOptions { <nl> target_file_size_multiplier ( 0 ) , <nl> max_bytes_for_level_base ( 0 ) , <nl> max_bytes_for_level_multiplier ( 0 ) , <nl> + ttl ( 0 ) , <nl> compaction_options_fifo ( ) , <nl> max_sequential_skip_in_iterations ( 0 ) , <nl> paranoid_file_checks ( false ) , <nl> struct MutableCFOptions { <nl> int target_file_size_multiplier ; <nl> uint64_t max_bytes_for_level_base ; <nl> double max_bytes_for_level_multiplier ; <nl> + uint64_t ttl ; <nl> std : : vector < int > max_bytes_for_level_multiplier_additional ; <nl> CompactionOptionsFIFO compaction_options_fifo ; <nl> CompactionOptionsUniversal compaction_options_universal ; <nl> mmm a / options / options_helper . cc <nl> ppp b / options / options_helper . cc <nl> ColumnFamilyOptions BuildColumnFamilyOptions ( <nl> mutable_cf_options . max_bytes_for_level_base ; <nl> cf_opts . max_bytes_for_level_multiplier = <nl> mutable_cf_options . max_bytes_for_level_multiplier ; <nl> + cf_opts . ttl = mutable_cf_options . ttl ; <nl> <nl> cf_opts . max_bytes_for_level_multiplier_additional . clear ( ) ; <nl> for ( auto value : <nl> std : : unordered_map < std : : string , OptionTypeInfo > <nl> offsetof ( struct MutableCFOptions , compaction_options_universal ) } } , <nl> { " ttl " , <nl> { offset_of ( & ColumnFamilyOptions : : ttl ) , OptionType : : kUInt64T , <nl> - OptionVerificationType : : kNormal , false , 0 } } } ; <nl> + OptionVerificationType : : kNormal , true , <nl> + offsetof ( struct MutableCFOptions , ttl ) } } } ; <nl> <nl> std : : unordered_map < std : : string , OptionTypeInfo > <nl> OptionsHelper : : fifo_compaction_options_type_info = { <nl>
Allow ttl to be changed dynamically ( )
facebook/rocksdb
991120fa10716ea371463188f6638d0401c1a935
2018-07-16T21:27:53Z
mmm a / cocos / 2d / CCActionCamera . cpp <nl> ppp b / cocos / 2d / CCActionCamera . cpp <nl> void ActionCamera : : updateTransform ( ) <nl> / / OrbitCamera <nl> / / <nl> <nl> + OrbitCamera : : OrbitCamera ( ) <nl> + : _radius ( 0 . 0 ) <nl> + , _deltaRadius ( 0 . 0 ) <nl> + , _angleZ ( 0 . 0 ) <nl> + , _deltaAngleZ ( 0 . 0 ) <nl> + , _angleX ( 0 . 0 ) <nl> + , _deltaAngleX ( 0 . 0 ) <nl> + , _radZ ( 0 . 0 ) <nl> + , _radDeltaZ ( 0 . 0 ) <nl> + , _radX ( 0 . 0 ) <nl> + , _radDeltaX ( 0 . 0 ) <nl> + { <nl> + } <nl> + OrbitCamera : : ~ OrbitCamera ( ) <nl> + { <nl> + } <nl> + <nl> OrbitCamera * OrbitCamera : : create ( float t , float radius , float deltaRadius , float angleZ , float deltaAngleZ , float angleX , float deltaAngleX ) <nl> { <nl> OrbitCamera * obitCamera = new OrbitCamera ( ) ; <nl> mmm a / cocos / 2d / CCActionCamera . h <nl> ppp b / cocos / 2d / CCActionCamera . h <nl> class CC_DLL ActionCamera : public ActionInterval / / < NSCopying > <nl> * @ js NA <nl> * @ lua NA <nl> * / <nl> - virtual ~ ActionCamera ( ) { } <nl> + virtual ~ ActionCamera ( ) { } ; <nl> <nl> / / Overrides <nl> virtual void startWithTarget ( Node * target ) override ; <nl> class CC_DLL OrbitCamera : public ActionCamera / / < NSCopying > <nl> public : <nl> / * * creates a OrbitCamera action with radius , delta - radius , z , deltaZ , x , deltaX * / <nl> static OrbitCamera * create ( float t , float radius , float deltaRadius , float angleZ , float deltaAngleZ , float angleX , float deltaAngleX ) ; <nl> + <nl> + / * * positions the camera according to spherical coordinates * / <nl> + void sphericalRadius ( float * r , float * zenith , float * azimuth ) ; <nl> + <nl> + / / Overrides <nl> + OrbitCamera * clone ( ) const override ; <nl> + virtual void startWithTarget ( Node * target ) override ; <nl> + virtual void update ( float time ) override ; <nl> + <nl> + CC_CONSTRUCTOR_ACCESS : <nl> / * * <nl> * @ js ctor <nl> * / <nl> - OrbitCamera ( ) <nl> - : _radius ( 0 . 0 ) <nl> - , _deltaRadius ( 0 . 0 ) <nl> - , _angleZ ( 0 . 0 ) <nl> - , _deltaAngleZ ( 0 . 0 ) <nl> - , _angleX ( 0 . 0 ) <nl> - , _deltaAngleX ( 0 . 0 ) <nl> - , _radZ ( 0 . 0 ) <nl> - , _radDeltaZ ( 0 . 0 ) <nl> - , _radX ( 0 . 0 ) <nl> - , _radDeltaX ( 0 . 0 ) <nl> - { } <nl> + OrbitCamera ( ) ; <nl> / * * <nl> * @ js NA <nl> * @ lua NA <nl> * / <nl> - virtual ~ OrbitCamera ( ) { } <nl> + virtual ~ OrbitCamera ( ) ; <nl> <nl> / * * initializes a OrbitCamera action with radius , delta - radius , z , deltaZ , x , deltaX * / <nl> bool initWithDuration ( float t , float radius , float deltaRadius , float angleZ , float deltaAngleZ , float angleX , float deltaAngleX ) ; <nl> - / * * positions the camera according to spherical coordinates * / <nl> - void sphericalRadius ( float * r , float * zenith , float * azimuth ) ; <nl> - <nl> - / / Overrides <nl> - OrbitCamera * clone ( ) const override ; <nl> - virtual void startWithTarget ( Node * target ) override ; <nl> - virtual void update ( float time ) override ; <nl> <nl> protected : <nl> float _radius ; <nl> mmm a / cocos / 2d / CCActionEase . h <nl> ppp b / cocos / 2d / CCActionEase . h <nl> class CC_DLL ActionEase : public ActionInterval <nl> virtual void stop ( ) override ; <nl> virtual void update ( float time ) override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> ActionEase ( ) { } <nl> virtual ~ ActionEase ( ) ; <nl> / * * initializes the action * / <nl> bool initWithAction ( ActionInterval * action ) ; <nl> <nl> + protected : <nl> / * * The inner action * / <nl> ActionInterval * _inner ; <nl> private : <nl> class CC_DLL EaseRateAction : public ActionEase <nl> virtual EaseRateAction * clone ( ) const override = 0 ; <nl> virtual EaseRateAction * reverse ( ) const override = 0 ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseRateAction ( ) { } <nl> virtual ~ EaseRateAction ( ) ; <nl> / * * Initializes the action with the inner action and the rate parameter * / <nl> bool initWithAction ( ActionInterval * pAction , float fRate ) ; <nl> <nl> + protected : <nl> float _rate ; <nl> <nl> private : <nl> class CC_DLL EaseIn : public EaseRateAction <nl> virtual EaseIn * clone ( ) const override ; <nl> virtual EaseIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseIn ( ) { } <nl> virtual ~ EaseIn ( ) { } <nl> <nl> class CC_DLL EaseOut : public EaseRateAction <nl> virtual EaseOut * clone ( ) const override ; <nl> virtual EaseOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseOut ( ) { } <nl> virtual ~ EaseOut ( ) { } <nl> <nl> class CC_DLL EaseInOut : public EaseRateAction <nl> virtual EaseInOut * clone ( ) const override ; <nl> virtual EaseInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseInOut ( ) { } <nl> virtual ~ EaseInOut ( ) { } <nl> <nl> class CC_DLL EaseExponentialIn : public ActionEase <nl> virtual EaseExponentialIn * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseExponentialIn ( ) { } <nl> virtual ~ EaseExponentialIn ( ) { } <nl> <nl> class CC_DLL EaseExponentialOut : public ActionEase <nl> virtual EaseExponentialOut * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseExponentialOut ( ) { } <nl> virtual ~ EaseExponentialOut ( ) { } <nl> <nl> class CC_DLL EaseExponentialInOut : public ActionEase <nl> virtual EaseExponentialInOut * clone ( ) const override ; <nl> virtual EaseExponentialInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseExponentialInOut ( ) { } <nl> virtual ~ EaseExponentialInOut ( ) { } <nl> <nl> class CC_DLL EaseSineIn : public ActionEase <nl> virtual EaseSineIn * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseSineIn ( ) { } <nl> virtual ~ EaseSineIn ( ) { } <nl> <nl> class CC_DLL EaseSineOut : public ActionEase <nl> virtual EaseSineOut * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseSineOut ( ) { } <nl> virtual ~ EaseSineOut ( ) { } <nl> <nl> class CC_DLL EaseSineInOut : public ActionEase <nl> virtual EaseSineInOut * clone ( ) const override ; <nl> virtual EaseSineInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseSineInOut ( ) { } <nl> virtual ~ EaseSineInOut ( ) { } <nl> <nl> class CC_DLL EaseElastic : public ActionEase <nl> virtual EaseElastic * clone ( ) const override = 0 ; <nl> virtual EaseElastic * reverse ( ) const override = 0 ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseElastic ( ) { } <nl> virtual ~ EaseElastic ( ) { } <nl> / * * Initializes the action with the inner action and the period in radians ( default is 0 . 3 ) * / <nl> bool initWithAction ( ActionInterval * action , float period = 0 . 3f ) ; <nl> <nl> + protected : <nl> float _period ; <nl> <nl> private : <nl> class CC_DLL EaseElasticIn : public EaseElastic <nl> virtual EaseElasticIn * clone ( ) const override ; <nl> virtual EaseElastic * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseElasticIn ( ) { } <nl> virtual ~ EaseElasticIn ( ) { } <nl> <nl> class CC_DLL EaseElasticOut : public EaseElastic <nl> virtual EaseElasticOut * clone ( ) const override ; <nl> virtual EaseElastic * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseElasticOut ( ) { } <nl> virtual ~ EaseElasticOut ( ) { } <nl> <nl> class CC_DLL EaseElasticInOut : public EaseElastic <nl> virtual EaseElasticInOut * clone ( ) const override ; <nl> virtual EaseElasticInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseElasticInOut ( ) { } <nl> virtual ~ EaseElasticInOut ( ) { } <nl> <nl> class CC_DLL EaseBounce : public ActionEase <nl> virtual EaseBounce * clone ( ) const override = 0 ; <nl> virtual EaseBounce * reverse ( ) const override = 0 ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBounce ( ) { } <nl> virtual ~ EaseBounce ( ) { } <nl> <nl> class CC_DLL EaseBounceIn : public EaseBounce <nl> virtual EaseBounceIn * clone ( ) const override ; <nl> virtual EaseBounce * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBounceIn ( ) { } <nl> virtual ~ EaseBounceIn ( ) { } <nl> <nl> class CC_DLL EaseBounceOut : public EaseBounce <nl> virtual EaseBounceOut * clone ( ) const override ; <nl> virtual EaseBounce * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBounceOut ( ) { } <nl> virtual ~ EaseBounceOut ( ) { } <nl> <nl> class CC_DLL EaseBounceInOut : public EaseBounce <nl> virtual EaseBounceInOut * clone ( ) const override ; <nl> virtual EaseBounceInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBounceInOut ( ) { } <nl> virtual ~ EaseBounceInOut ( ) { } <nl> <nl> class CC_DLL EaseBackIn : public ActionEase <nl> virtual EaseBackIn * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBackIn ( ) { } <nl> virtual ~ EaseBackIn ( ) { } <nl> <nl> class CC_DLL EaseBackOut : public ActionEase <nl> virtual EaseBackOut * clone ( ) const override ; <nl> virtual ActionEase * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBackOut ( ) { } <nl> virtual ~ EaseBackOut ( ) { } <nl> <nl> class CC_DLL EaseBackInOut : public ActionEase <nl> virtual EaseBackInOut * clone ( ) const override ; <nl> virtual EaseBackInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBackInOut ( ) { } <nl> virtual ~ EaseBackInOut ( ) { } <nl> <nl> class EaseBezierAction : public cocos2d : : ActionEase <nl> <nl> virtual void setBezierParamer ( float p0 , float p1 , float p2 , float p3 ) ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseBezierAction ( ) { } <nl> virtual ~ EaseBezierAction ( ) { } <nl> <nl> + protected : <nl> float _p0 ; <nl> float _p1 ; <nl> float _p2 ; <nl> class EaseQuadraticActionIn : public cocos2d : : ActionEase <nl> virtual EaseQuadraticActionIn * clone ( ) const override ; <nl> virtual EaseQuadraticActionIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuadraticActionIn ( ) { } <nl> virtual ~ EaseQuadraticActionIn ( ) { } <nl> <nl> class EaseQuadraticActionOut : public cocos2d : : ActionEase <nl> virtual EaseQuadraticActionOut * clone ( ) const override ; <nl> virtual EaseQuadraticActionOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuadraticActionOut ( ) { } <nl> virtual ~ EaseQuadraticActionOut ( ) { } <nl> <nl> class EaseQuadraticActionInOut : public cocos2d : : ActionEase <nl> virtual EaseQuadraticActionInOut * clone ( ) const override ; <nl> virtual EaseQuadraticActionInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuadraticActionInOut ( ) { } <nl> virtual ~ EaseQuadraticActionInOut ( ) { } <nl> <nl> class EaseQuarticActionIn : public cocos2d : : ActionEase <nl> virtual EaseQuarticActionIn * clone ( ) const override ; <nl> virtual EaseQuarticActionIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuarticActionIn ( ) { } <nl> virtual ~ EaseQuarticActionIn ( ) { } <nl> <nl> class EaseQuarticActionOut : public cocos2d : : ActionEase <nl> virtual EaseQuarticActionOut * clone ( ) const override ; <nl> virtual EaseQuarticActionOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuarticActionOut ( ) { } <nl> virtual ~ EaseQuarticActionOut ( ) { } <nl> <nl> class EaseQuarticActionInOut : public cocos2d : : ActionEase <nl> virtual EaseQuarticActionInOut * clone ( ) const override ; <nl> virtual EaseQuarticActionInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuarticActionInOut ( ) { } <nl> virtual ~ EaseQuarticActionInOut ( ) { } <nl> <nl> class EaseQuinticActionIn : public cocos2d : : ActionEase <nl> virtual EaseQuinticActionIn * clone ( ) const override ; <nl> virtual EaseQuinticActionIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuinticActionIn ( ) { } <nl> virtual ~ EaseQuinticActionIn ( ) { } <nl> <nl> class EaseQuinticActionOut : public cocos2d : : ActionEase <nl> virtual EaseQuinticActionOut * clone ( ) const override ; <nl> virtual EaseQuinticActionOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuinticActionOut ( ) { } <nl> virtual ~ EaseQuinticActionOut ( ) { } <nl> <nl> class EaseQuinticActionInOut : public cocos2d : : ActionEase <nl> virtual EaseQuinticActionInOut * clone ( ) const override ; <nl> virtual EaseQuinticActionInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseQuinticActionInOut ( ) { } <nl> virtual ~ EaseQuinticActionInOut ( ) { } <nl> <nl> class EaseCircleActionIn : public cocos2d : : ActionEase <nl> virtual EaseCircleActionIn * clone ( ) const override ; <nl> virtual EaseCircleActionIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCircleActionIn ( ) { } <nl> virtual ~ EaseCircleActionIn ( ) { } <nl> <nl> class EaseCircleActionOut : public cocos2d : : ActionEase <nl> virtual EaseCircleActionOut * clone ( ) const override ; <nl> virtual EaseCircleActionOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCircleActionOut ( ) { } <nl> virtual ~ EaseCircleActionOut ( ) { } <nl> <nl> class EaseCircleActionInOut : public cocos2d : : ActionEase <nl> virtual EaseCircleActionInOut * clone ( ) const override ; <nl> virtual EaseCircleActionInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCircleActionInOut ( ) { } <nl> virtual ~ EaseCircleActionInOut ( ) { } <nl> <nl> class EaseCubicActionIn : public cocos2d : : ActionEase <nl> virtual EaseCubicActionIn * clone ( ) const override ; <nl> virtual EaseCubicActionIn * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCubicActionIn ( ) { } <nl> virtual ~ EaseCubicActionIn ( ) { } <nl> <nl> class EaseCubicActionOut : public cocos2d : : ActionEase <nl> virtual EaseCubicActionOut * clone ( ) const override ; <nl> virtual EaseCubicActionOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCubicActionOut ( ) { } <nl> virtual ~ EaseCubicActionOut ( ) { } <nl> <nl> class EaseCubicActionInOut : public cocos2d : : ActionEase <nl> virtual EaseCubicActionInOut * clone ( ) const override ; <nl> virtual EaseCubicActionInOut * reverse ( ) const override ; <nl> <nl> - protected : <nl> + CC_CONSTRUCTOR_ACCESS : <nl> EaseCubicActionInOut ( ) { } <nl> virtual ~ EaseCubicActionInOut ( ) { } <nl> <nl> mmm a / cocos / 2d / CCActionTween . h <nl> ppp b / cocos / 2d / CCActionTween . h <nl> class CC_DLL ActionTween : public ActionInterval <nl> public : <nl> / * * creates an initializes the action with the property name ( key ) , and the from and to parameters . * / <nl> static ActionTween * create ( float duration , const std : : string & key , float from , float to ) ; <nl> - / * * initializes the action with the property name ( key ) , and the from and to parameters . * / <nl> - bool initWithDuration ( float duration , const std : : string & key , float from , float to ) ; <nl> <nl> / / Overrides <nl> void startWithTarget ( Node * target ) override ; <nl> void update ( float dt ) override ; <nl> ActionTween * reverse ( ) const override ; <nl> ActionTween * clone ( ) const override ; <nl> + <nl> + CC_CONSTRUCTOR_ACCESS : <nl> + / * * initializes the action with the property name ( key ) , and the from and to parameters . * / <nl> + bool initWithDuration ( float duration , const std : : string & key , float from , float to ) ; <nl> <nl> protected : <nl> std : : string _key ; <nl>
Merge pull request from pandamicro / ConstructorAccess2
cocos2d/cocos2d-x
15e584a83cd549e12500ad4a83d3365126e32474
2014-03-28T10:03:35Z
mmm a / shell / mongo_vstudio . cpp <nl> ppp b / shell / mongo_vstudio . cpp <nl> const StringData _jscode_raw_utils = <nl> " if ( a = = b ) \ n " <nl> " return true ; \ n " <nl> " \ n " <nl> - " a = tojson ( a ) ; \ n " <nl> - " b = tojson ( b ) ; \ n " <nl> + " a = tojson ( a , false , true ) ; \ n " <nl> + " b = tojson ( b , false , true ) ; \ n " <nl> " \ n " <nl> " if ( a = = b ) \ n " <nl> " return true ; \ n " <nl> " \ n " <nl> " var clean = function ( s ) { \ n " <nl> - " s = s . replace ( / NumberInt \ \ ( ( \ \ d + ) \ \ ) / , \ " $ 1 \ " ) ; \ n " <nl> + " s = s . replace ( / NumberInt \ \ ( ( \ \ - ? \ \ d + ) \ \ ) / g , \ " $ 1 \ " ) ; \ n " <nl> " return s ; \ n " <nl> " } \ n " <nl> " \ n " <nl> mmm a / shell / utils . js <nl> ppp b / shell / utils . js <nl> friendlyEqual = function ( a , b ) { <nl> if ( a = = b ) <nl> return true ; <nl> <nl> - a = tojson ( a ) ; <nl> - b = tojson ( b ) ; <nl> + a = tojson ( a , false , true ) ; <nl> + b = tojson ( b , false , true ) ; <nl> <nl> if ( a = = b ) <nl> return true ; <nl> <nl> var clean = function ( s ) { <nl> - s = s . replace ( / NumberInt \ ( ( \ d + ) \ ) / , " $ 1 " ) ; <nl> + s = s . replace ( / NumberInt \ ( ( \ - ? \ d + ) \ ) / g , " $ 1 " ) ; <nl> return s ; <nl> } <nl> <nl>
more forgiving assert . eq
mongodb/mongo
91efa3fcd9881484d4bb5da14a307c0f06d4c4d5
2011-07-05T04:46:50Z
mmm a / test / cpp / tensorexpr / test_loopnest . cpp <nl> ppp b / test / cpp / tensorexpr / test_loopnest . cpp <nl> void testScheduleFunctionCall01 ( ) { <nl> ExpectAllNear ( d_v , d_ref , 1e - 5 ) ; <nl> } <nl> <nl> + void testScheduleInlineSimple ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + Buffer a_buf ( " a " , kFloat , { M , N } ) ; <nl> + Buffer b_buf ( " b " , kFloat , { N , K } ) ; <nl> + Buffer c_buf ( " c " , kFloat , { M , N } ) ; <nl> + Buffer d_buf ( " d " , kFloat , { M , K } ) ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return a_buf ( m , n ) * b_buf ( n , k ) ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return c_buf ( m , n ) * d_buf ( m , k ) + x - > call ( m , n , k ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + LoopNest l2 ( { y } ) ; <nl> + l2 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + l1 . prepareForCodegen ( ) ; <nl> + l2 . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + Stmt * stmt2 = IRSimplifier : : simplify ( l2 . root_stmt ( ) ) ; <nl> + <nl> + SimpleIREvaluator eval1 ( stmt1 , a_buf , b_buf , c_buf , d_buf , y ) ; <nl> + SimpleIREvaluator eval2 ( stmt2 , a_buf , b_buf , c_buf , d_buf , y ) ; <nl> + <nl> + PaddedBuffer < float > a_v ( M , N ) ; <nl> + PaddedBuffer < float > b_v ( N , K ) ; <nl> + PaddedBuffer < float > c_v ( M , N ) ; <nl> + PaddedBuffer < float > d_v ( M , K ) ; <nl> + <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + a_v ( i , j ) = i * i ; <nl> + } <nl> + } <nl> + for ( int i = 0 ; i < N ; i + + ) { <nl> + for ( int j = 0 ; j < K ; j + + ) { <nl> + b_v ( i , j ) = j * j ; <nl> + } <nl> + } <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + c_v ( i , j ) = i + j ; <nl> + } <nl> + } <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < K ; j + + ) { <nl> + d_v ( i , j ) = i * j ; <nl> + } <nl> + } <nl> + <nl> + PaddedBuffer < float > y_1 ( M , N , K ) ; <nl> + PaddedBuffer < float > y_2 ( M , N , K ) ; <nl> + <nl> + eval1 ( a_v , b_v , c_v , d_v , y_1 ) ; <nl> + eval2 ( a_v , b_v , c_v , d_v , y_2 ) ; <nl> + ExpectAllNear ( y_1 , y_2 , 1e - 5 ) ; <nl> + std : : ostringstream oss1 , oss2 ; <nl> + oss1 < < * stmt1 ; <nl> + oss2 < < * stmt2 ; <nl> + ASSERT_GT ( oss1 . str ( ) . size ( ) , oss2 . str ( ) . size ( ) ) ; <nl> + } <nl> + <nl> static std : : string remove_space ( const std : : string & str ) { <nl> std : : string str_new = str ; <nl> str_new . erase ( <nl> void InlineFunc01Helper ( const std : : vector < std : : string > & inline_order ) { <nl> LoopNest l ( { z } ) ; <nl> for ( const std : : string & order : inline_order ) { <nl> if ( order = = " x " ) { <nl> - l . computeInline ( l . getLoopBodyFor ( x ) ) ; <nl> + l . computeInline ( x - > buf ( ) ) ; <nl> } else if ( order = = " y " ) { <nl> - l . computeInline ( l . getLoopBodyFor ( y ) ) ; <nl> + l . computeInline ( y - > buf ( ) ) ; <nl> } else { <nl> throw std : : runtime_error ( " Invalid order : " + order ) ; <nl> } <nl> void InlineFunc01Helper ( const std : : vector < std : : string > & inline_order ) { <nl> } <nl> for ( int i = 0 ; i < N ; i + + ) { <nl> for ( int j = 0 ; j < K ; j + + ) { <nl> - a_v ( i , j ) = j * j ; <nl> + b_v ( i , j ) = j * j ; <nl> } <nl> } <nl> for ( int i = 0 ; i < M ; i + + ) { <nl> void testScheduleInlineFunc01 ( ) { <nl> InlineFunc01Helper ( { } ) ; <nl> } <nl> <nl> + / / Make sure we cache random vars if we should . <nl> + void testScheduleInlineRandom ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return Mod : : make ( Intrinsics : : make ( kRand , kInt ) , 5 ) ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return x - > call ( m , n , k ) + x - > call ( m , n , k ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + l1 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + / / would normally compare results but Rand isn ' t implemented in the <nl> + / / SimpleIREvaluator , even if we could seed it . <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + std : : ostringstream oss ; <nl> + oss < < * stmt1 ; <nl> + <nl> + / / Check the IR we produced <nl> + const std : : string & verification_pattern = <nl> + R " IR ( <nl> + # CHECK : for ( int m2 = 0 ; m2 < 4 ; m2 + + ) <nl> + # CHECK : for ( int n2 = 0 ; n2 < 5 ; n2 + + ) <nl> + # CHECK : for ( int k2 = 0 ; k2 < 6 ; k2 + + ) <nl> + # CHECK : int x = rand ( ) ; <nl> + # CHECK : y [ m2 , n2 , k2 ] = 2 * ( x % 5 ) ; ) IR " ; <nl> + torch : : jit : : testing : : FileCheck ( ) . run ( verification_pattern , oss . str ( ) ) ; <nl> + } <nl> + <nl> + / / Make sure we don ' t cache random vars that are not being inlined . <nl> + void testScheduleInlineRandomUnrelated ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return m * n * k ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return x - > call ( m , n , k ) + Intrinsics : : make ( kRand , kInt ) + <nl> + Intrinsics : : make ( kRand , kInt ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + l1 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + / / would normally compare results but Rand isn ' t implemented in the <nl> + / / SimpleIREvaluator , even if we could seed it . <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + std : : ostringstream oss ; <nl> + oss < < * stmt1 ; <nl> + <nl> + / / Check the IR we produced <nl> + const std : : string & verification_pattern = <nl> + R " IR ( <nl> + # CHECK : for ( int m2 = 0 ; m2 < 4 ; m2 + + ) <nl> + # CHECK : for ( int n2 = 0 ; n2 < 5 ; n2 + + ) <nl> + # CHECK : for ( int k2 = 0 ; k2 < 6 ; k2 + + ) <nl> + # CHECK : y [ m2 , n2 , k2 ] = ( ( n2 * m2 ) * k2 + ( rand ( ) ) ) + ( rand ( ) ) ; ) IR " ; <nl> + torch : : jit : : testing : : FileCheck ( ) . run ( verification_pattern , oss . str ( ) ) ; <nl> + } <nl> + <nl> + / / Make sure we generate the right number of random values = = the dimensionality <nl> + / / of the production tensor . <nl> + void testScheduleInlineRandomLowerDimensions ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Tensor * x = Compute ( " x " , { { M , " m1 " } } , [ & ] ( const VarHandle & m ) { <nl> + return Mod : : make ( Intrinsics : : make ( kRand , kInt ) , 5 ) ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return x - > call ( m ) + x - > call ( m ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + l1 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + / / would normally compare results but Rand isn ' t implemented in the <nl> + / / SimpleIREvaluator , even if we could seed it . <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + std : : ostringstream oss ; <nl> + oss < < * stmt1 ; <nl> + <nl> + / / Check the IR we produced <nl> + const std : : string & verification_pattern = <nl> + R " IR ( <nl> + # CHECK : for ( int m2 = 0 ; m2 < 4 ; m2 + + ) <nl> + # CHECK : int x = rand ( ) ; <nl> + # CHECK : for ( int n2 = 0 ; n2 < 5 ; n2 + + ) <nl> + # CHECK : for ( int k2 = 0 ; k2 < 6 ; k2 + + ) <nl> + # CHECK : y [ m2 , n2 , k2 ] = 2 * ( x % 5 ) ; ) IR " ; <nl> + torch : : jit : : testing : : FileCheck ( ) . run ( verification_pattern , oss . str ( ) ) ; <nl> + } <nl> + <nl> + / / Make sure we don ' t screw up intrinsics thinking they ' re rand . <nl> + void testScheduleInlineIntrinsics ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + Buffer a_buf ( " a " , kFloat , { M , N } ) ; <nl> + Buffer b_buf ( " b " , kFloat , { N , K } ) ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return a_buf ( m , n ) * b_buf ( n , k ) ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return Intrinsics : : make ( kSqrt , x - > call ( m , n , k ) ) ; <nl> + } ) ; <nl> + <nl> + PaddedBuffer < float > a_v ( M , N ) ; <nl> + PaddedBuffer < float > b_v ( N , K ) ; <nl> + <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + a_v ( i , j ) = i * i ; <nl> + } <nl> + } <nl> + for ( int i = 0 ; i < N ; i + + ) { <nl> + for ( int j = 0 ; j < K ; j + + ) { <nl> + b_v ( i , j ) = j * j ; <nl> + } <nl> + } <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + LoopNest l2 ( { y } ) ; <nl> + l2 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + l1 . prepareForCodegen ( ) ; <nl> + l2 . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + Stmt * stmt2 = IRSimplifier : : simplify ( l2 . root_stmt ( ) ) ; <nl> + <nl> + SimpleIREvaluator eval1 ( stmt1 , a_buf , b_buf , y ) ; <nl> + SimpleIREvaluator eval2 ( stmt2 , a_buf , b_buf , y ) ; <nl> + <nl> + PaddedBuffer < float > y_1 ( M , N , K ) ; <nl> + PaddedBuffer < float > y_2 ( M , N , K ) ; <nl> + <nl> + eval1 ( a_v , b_v , y_1 ) ; <nl> + eval2 ( a_v , b_v , y_2 ) ; <nl> + ExpectAllNear ( y_1 , y_2 , 1e - 5 ) ; <nl> + std : : ostringstream oss1 , oss2 ; <nl> + oss1 < < * stmt1 ; <nl> + oss2 < < * stmt2 ; <nl> + ASSERT_GT ( oss1 . str ( ) . size ( ) , oss2 . str ( ) . size ( ) ) ; <nl> + } <nl> + <nl> + / / Make sure we can handle rand and non - rand intrinsics . <nl> + void testScheduleInlineRandWithIntrinsics ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return Intrinsics : : make ( kRand , kFloat ) ; <nl> + } ) ; <nl> + Tensor * y = Compute ( <nl> + " y " , <nl> + { { M , " m2 " } , { N , " n2 " } , { K , " k2 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return Intrinsics : : make ( kSqrt , x - > call ( m , n , k ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + l1 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + <nl> + std : : ostringstream oss ; <nl> + oss < < * stmt1 ; <nl> + <nl> + / / Check the IR we produced <nl> + const std : : string & verification_pattern = <nl> + R " IR ( <nl> + # CHECK : for ( int m2 = 0 ; m2 < 4 ; m2 + + ) <nl> + # CHECK : for ( int n2 = 0 ; n2 < 5 ; n2 + + ) <nl> + # CHECK : for ( int k2 = 0 ; k2 < 6 ; k2 + + ) <nl> + # CHECK : float x = rand ( ) ; <nl> + # CHECK : y [ m2 , n2 , k2 ] = sqrt ( x ) ; ) IR " ; <nl> + torch : : jit : : testing : : FileCheck ( ) . run ( verification_pattern , oss . str ( ) ) ; <nl> + } <nl> + <nl> + / / Split a Compute then inline it into another compute . <nl> + void testScheduleSplitAThenInline ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 2 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 4 , & i_outer , & i_inner ) ; <nl> + ASSERT_THROWS_WITH ( l . computeInline ( a - > buf ( ) ) , " compound indices " ) ; <nl> + } <nl> + <nl> + / / Split a Compute then inline another Compute into it . <nl> + void testScheduleSplitBThenInline ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( b ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 3 , & i_outer , & i_inner ) ; <nl> + l . computeInline ( a - > buf ( ) ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + <nl> + std : : vector < int > output ( 6 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , b ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int i = 0 ; i < 6 ; + + i ) { <nl> + ASSERT_EQ ( output [ i ] , ( i + 8 ) * ( i + 8 ) ) ; <nl> + } <nl> + } <nl> + <nl> + / / Split a Compute twice then inline it . <nl> + void testScheduleSplitTwiceThenInline ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 2 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 4 , & i_outer , & i_inner ) ; <nl> + l . splitWithMask ( i_inner , 2 , & i_outer , & i_inner ) ; <nl> + ASSERT_THROWS_WITH ( l . computeInline ( a - > buf ( ) ) , " compound indices " ) ; <nl> + } <nl> + <nl> + / / Inline a Compute , then split . <nl> + void testScheduleInlineThenSplit ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + l . computeInline ( a - > buf ( ) ) ; <nl> + <nl> + std : : vector < For * > loops = NodeFinder < For > : : find ( l . root_stmt ( ) ) ; <nl> + l . splitWithMask ( loops . back ( ) , 3 , & i_outer , & i_inner ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + std : : vector < int > output ( 6 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , b ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int i = 0 ; i < 6 ; + + i ) { <nl> + ASSERT_EQ ( output [ i ] , ( i + 8 ) * ( i + 8 ) ) ; <nl> + } <nl> + } <nl> + <nl> + / / Split a Compute , inline it , then split the result . <nl> + void testScheduleSplitInlineThenSplit ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 16 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + auto loops = NodeFinder < For > : : find ( l . root_stmt ( ) ) ; <nl> + l . splitWithMask ( loops . back ( ) , 2 , & i_outer , & i_inner ) ; <nl> + l . computeInline ( a - > buf ( ) ) ; <nl> + <nl> + loops = NodeFinder < For > : : find ( l . root_stmt ( ) ) ; <nl> + l . splitWithMask ( loops . front ( ) , 2 , & i_outer , & i_inner ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + std : : vector < int > output ( 16 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , b ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int i = 0 ; i < 16 ; + + i ) { <nl> + ASSERT_EQ ( output [ i ] , ( i + 8 ) * ( i + 8 ) ) ; <nl> + } <nl> + } <nl> + <nl> + / / Oversplit a loop that is simplified out after inlining . <nl> + void testScheduleSplitInlineSimplify ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { <nl> + return ExprHandle ( 4 ) * i - ExprHandle ( 2 ) * i ; <nl> + } ) ; <nl> + Tensor * b = Compute ( " b " , { { 2 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j ) - ExprHandle ( 1 ) ; <nl> + } ) ; <nl> + <nl> + LoopNest loop ( { b } ) ; <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + <nl> + LoopNest l ( { b } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 4 , & i_outer , & i_inner ) ; <nl> + ASSERT_THROWS_WITH ( l . computeInline ( a - > buf ( ) ) , " compound indices " ) ; <nl> + } <nl> + <nl> + / / Inline a Compute with two consumers . <nl> + void testScheduleInlineThreeMixedOnce ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + Tensor * c = Compute ( <nl> + " c " , { { 4 , " k " } , { 3 , " l " } } , [ & ] ( const VarHandle & k , const VarHandle & l ) { <nl> + return a - > call ( k ) * b - > call ( l ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l ( { c } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . computeInline ( a - > buf ( ) ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + std : : vector < int > output ( 4 * 3 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , c ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int k = 0 ; k < 4 ; + + k ) { <nl> + for ( int l = 0 ; l < 3 ; + + l ) { <nl> + ASSERT_EQ ( output [ k * 3 + l ] , ( k ) * ( k ) * ( l + 8 ) * ( l + 8 ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Inline Compute A into B , then inline B into C . <nl> + void testScheduleInlineThreeMixedTwice ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + Tensor * c = Compute ( <nl> + " c " , { { 4 , " k " } , { 3 , " l " } } , [ & ] ( const VarHandle & k , const VarHandle & l ) { <nl> + return a - > call ( k ) * b - > call ( l ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l ( { c } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . computeInline ( a - > buf ( ) ) ; <nl> + l . computeInline ( b - > buf ( ) ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + std : : vector < int > output ( 4 * 3 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , c ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int k = 0 ; k < 4 ; + + k ) { <nl> + for ( int l = 0 ; l < 3 ; + + l ) { <nl> + ASSERT_EQ ( output [ k * 3 + l ] , ( k ) * ( k ) * ( l + 8 ) * ( l + 8 ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Inline a Compute that is both a producer and consumer . <nl> + void testScheduleInlineThreeMixedInner ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + Tensor * c = Compute ( <nl> + " c " , { { 4 , " k " } , { 3 , " l " } } , [ & ] ( const VarHandle & k , const VarHandle & l ) { <nl> + return a - > call ( k ) * b - > call ( l ) ; <nl> + } ) ; <nl> + <nl> + LoopNest l ( { c } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . computeInline ( b - > buf ( ) ) ; <nl> + l . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * s = IRSimplifier : : simplify ( l . root_stmt ( ) ) ; <nl> + std : : vector < int > output ( 4 * 3 , 0 ) ; <nl> + SimpleIREvaluator eval ( s , c ) ; <nl> + eval ( output ) ; <nl> + <nl> + for ( int k = 0 ; k < 4 ; + + k ) { <nl> + for ( int l = 0 ; l < 3 ; + + l ) { <nl> + ASSERT_EQ ( output [ k * 3 + l ] , ( k ) * ( k ) * ( l + 8 ) * ( l + 8 ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Split 3 Computes , then inline the first two into the last . <nl> + void testScheduleInlineThreeMixedSplit ( ) { <nl> + KernelScope kernel_scope ; <nl> + Tensor * a = <nl> + Compute ( " a " , { { 18 , " i " } } , [ & ] ( const VarHandle & i ) { return i * i ; } ) ; <nl> + Tensor * b = Compute ( " b " , { { 6 , " j " } } , [ & ] ( const VarHandle & j ) { <nl> + return a - > call ( j + ExprHandle ( 8 ) ) ; <nl> + } ) ; <nl> + Tensor * c = Compute ( <nl> + " c " , { { 4 , " k " } , { 3 , " l " } } , [ & ] ( const VarHandle & k , const VarHandle & l ) { <nl> + return a - > call ( k ) * b - > call ( l ) ; <nl> + } ) ; <nl> + <nl> + For * i_outer ; <nl> + For * i_inner ; <nl> + LoopNest l ( { c } ) ; <nl> + std : : vector < For * > loops = l . getLoopStmtsFor ( a ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 4 , & i_outer , & i_inner ) ; <nl> + loops = l . getLoopStmtsFor ( b ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 3 , & i_outer , & i_inner ) ; <nl> + loops = l . getLoopStmtsFor ( c ) ; <nl> + l . splitWithMask ( loops [ 0 ] , 2 , & i_outer , & i_inner ) ; <nl> + <nl> + ASSERT_THROWS_WITH ( l . computeInline ( a - > buf ( ) ) , " compound indices " ) ; <nl> + } <nl> + <nl> void testScheduleFuserStyle ( ) { <nl> KernelScope kernel_scope ; <nl> const int kVectorSize = 8 ; <nl> mmm a / test / cpp / tensorexpr / test_reductions . cpp <nl> ppp b / test / cpp / tensorexpr / test_reductions . cpp <nl> void testReduceRfactorLike ( ) { <nl> ASSERT_EQ ( out [ 0 ] , 99 * 50 ) ; <nl> } <nl> <nl> + void testReduceAsProducer ( ) { <nl> + KernelScope kernel_scope ; <nl> + <nl> + const int M = 10 ; <nl> + VarHandle m ( " m " , kInt ) ; <nl> + <nl> + Buffer a ( BufHandle ( " a " , { 2 , 3 } , kFloat ) ) ; <nl> + Buffer b ( BufHandle ( " b " , { 2 , 3 , m } , kFloat ) ) ; <nl> + <nl> + Tensor * c = Reduce ( " sum " , { { 2 , " l1 " } , { 3 , " n1 " } } , Sum ( ) , b , { { m , " m1 " } } ) ; <nl> + Tensor * d = Compute ( <nl> + " scale " , <nl> + { { 2 , " l2 " } , { 3 , " n1 " } } , <nl> + [ & ] ( const VarHandle & l , const VarHandle & n ) { <nl> + return c - > call ( l , n ) * a ( l , n ) ; <nl> + } ) ; <nl> + LoopNest loop ( { d } ) ; <nl> + loop . prepareForCodegen ( ) ; <nl> + Stmt * s = loop . root_stmt ( ) ; <nl> + s = IRSimplifier : : simplify ( s ) ; <nl> + <nl> + SimpleIREvaluator cg ( s , { a , b , d , m } ) ; <nl> + <nl> + std : : vector < float > aData ( 2 * 3 , 0 ) ; <nl> + std : : vector < float > bData ( 2 * 3 * M , 0 ) ; <nl> + std : : vector < float > dData ( 2 * 3 , 6 . 0f ) ; <nl> + <nl> + for ( int i = 0 ; i < 2 * 3 ; + + i ) { <nl> + aData [ i ] = 6 - i ; <nl> + for ( int j = 0 ; j < M ; + + j ) { <nl> + bData [ i * M + j ] = j ; <nl> + } <nl> + } <nl> + <nl> + cg . call ( { aData , bData , dData , M } ) ; <nl> + float expected = 0 ; <nl> + for ( int i = 0 ; i < M ; + + i ) { <nl> + expected + = i ; <nl> + } <nl> + for ( int i = 0 ; i < 2 * 3 ; + + i ) { <nl> + ASSERT_EQ ( dData [ i ] , expected * ( 6 - i ) ) ; <nl> + } <nl> + } <nl> + <nl> + void testReduceAsConsumer ( ) { <nl> + KernelScope kernel_scope ; <nl> + <nl> + const int M = 10 ; <nl> + VarHandle m ( " m " , kInt ) ; <nl> + <nl> + Buffer a ( BufHandle ( " a " , { 2 , 3 , m } , kFloat ) ) ; <nl> + Buffer b ( BufHandle ( " b " , { 2 , 3 , m } , kFloat ) ) ; <nl> + <nl> + Tensor * c = Compute ( <nl> + " scale " , <nl> + { { 2 , " l2 " } , { 3 , " n1 " } , { m , " m1 " } } , <nl> + [ & ] ( const VarHandle & l , const VarHandle & n , const VarHandle & m ) { <nl> + return b ( l , n , m ) * a ( l , n , m ) ; <nl> + } ) ; <nl> + Tensor * d = Reduce ( " sum " , { { 2 , " l1 " } } , Sum ( ) , c , { { 3 , " n1 " } , { m , " m1 " } } ) ; <nl> + LoopNest loop ( { d } ) ; <nl> + loop . prepareForCodegen ( ) ; <nl> + Stmt * s = loop . root_stmt ( ) ; <nl> + s = IRSimplifier : : simplify ( s ) ; <nl> + <nl> + SimpleIREvaluator cg ( s , { a , b , d , m } ) ; <nl> + <nl> + std : : vector < float > aData ( 2 * 3 * M , 0 ) ; <nl> + std : : vector < float > bData ( 2 * 3 * M , 0 ) ; <nl> + std : : vector < float > dData ( 2 , 6 . 0f ) ; <nl> + <nl> + for ( int i = 0 ; i < 2 * 3 ; + + i ) { <nl> + for ( int j = 0 ; j < M ; + + j ) { <nl> + bData [ i * M + j ] = j + 1 ; <nl> + aData [ i * M + j ] = 6 - i ; <nl> + } <nl> + } <nl> + <nl> + cg . call ( { aData , bData , dData , M } ) ; <nl> + float expected [ 2 ] = { 0 , 0 } ; <nl> + for ( int i = 0 ; i < 2 ; + + i ) { <nl> + for ( int j = 0 ; j < 3 ; + + j ) { <nl> + for ( int k = 0 ; k < M ; + + k ) { <nl> + expected [ i ] + = ( k + 1 ) * ( 6 - ( i * 3 + j ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + for ( int i = 0 ; i < 2 ; + + i ) { <nl> + ASSERT_EQ ( dData [ i ] , expected [ i ] ) ; <nl> + } <nl> + } <nl> + <nl> void testSplitReduceAxis ( ) { <nl> KernelScope kernel_scope ; <nl> <nl> void testReduceOverSplitRfactor ( ) { <nl> / / torch : : jit : : testing : : FileCheck ( ) . run ( verification_pattern , oss . str ( ) ) ; <nl> } <nl> <nl> + void testReduceInlineReduction ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Buffer a_buf ( " a " , kFloat , { M } ) ; <nl> + Buffer b_buf ( " b " , kFloat , { M , N , K } ) ; <nl> + <nl> + Tensor * x = Reduce ( " x " , { { M , " m1 " } } , Sum ( ) , b_buf , { { N , " n1 " } , { K , " k1 " } } ) ; <nl> + Tensor * y = Compute ( " y " , { { M , " m2 " } } , [ & ] ( const VarHandle & m ) { <nl> + return a_buf ( m ) + x - > call ( m ) ; <nl> + } ) ; <nl> + <nl> + PaddedBuffer < float > a_v ( M ) ; <nl> + PaddedBuffer < float > b_v ( M , N , K ) ; <nl> + <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + a_v ( i ) = i * i ; <nl> + } <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + for ( int k = 0 ; k < K ; k + + ) { <nl> + b_v ( i , j , k ) = j * j * k ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + ASSERT_THROWS_WITH ( <nl> + l1 . computeInline ( x - > buf ( ) ) , " cannot inline a reduction computation " ) ; <nl> + } <nl> + <nl> + void testReduceInlineConsumer ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Buffer a_buf ( " a " , kFloat , { M , N , K } ) ; <nl> + Buffer b_buf ( " b " , kFloat , { M , N , K } ) ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return a_buf ( m , n , k ) + b_buf ( m , n , k ) ; <nl> + } ) ; <nl> + Tensor * y = Reduce ( " y " , { { M , " m2 " } } , Sum ( ) , x , { { N , " n2 " } , { K , " k2 " } } ) ; <nl> + <nl> + PaddedBuffer < float > a_v ( M , N , K ) ; <nl> + PaddedBuffer < float > b_v ( M , N , K ) ; <nl> + <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + for ( int k = 0 ; k < K ; k + + ) { <nl> + a_v ( i , j , k ) = i * i + k ; <nl> + b_v ( i , j , k ) = j * j + k ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + LoopNest l2 ( { y } ) ; <nl> + l2 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + l1 . prepareForCodegen ( ) ; <nl> + l2 . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + Stmt * stmt2 = IRSimplifier : : simplify ( l2 . root_stmt ( ) ) ; <nl> + <nl> + SimpleIREvaluator eval1 ( stmt1 , a_buf , b_buf , y ) ; <nl> + SimpleIREvaluator eval2 ( stmt2 , a_buf , b_buf , y ) ; <nl> + <nl> + PaddedBuffer < float > y_1 ( M ) ; <nl> + PaddedBuffer < float > y_2 ( M ) ; <nl> + <nl> + eval1 ( a_v , b_v , y_1 ) ; <nl> + eval2 ( a_v , b_v , y_2 ) ; <nl> + ExpectAllNear ( y_1 , y_2 , 1e - 5 ) ; <nl> + std : : ostringstream oss1 , oss2 ; <nl> + oss1 < < * stmt1 ; <nl> + oss2 < < * stmt2 ; <nl> + ASSERT_GT ( oss1 . str ( ) . size ( ) , oss2 . str ( ) . size ( ) ) ; <nl> + } <nl> + <nl> + void testReduceInlineReducerInternal ( ) { <nl> + KernelScope kernel_scope ; <nl> + const int M = 4 ; <nl> + const int N = 5 ; <nl> + const int K = 6 ; <nl> + <nl> + Buffer a_buf ( " a " , kFloat , { M , N , K } ) ; <nl> + Buffer b_buf ( " b " , kFloat , { M , N , K } ) ; <nl> + <nl> + Tensor * x = Compute ( <nl> + " x " , <nl> + { { M , " m1 " } , { N , " n1 " } , { K , " k1 " } } , <nl> + [ & ] ( const VarHandle & m , const VarHandle & n , const VarHandle & k ) { <nl> + return a_buf ( m , n , k ) + b_buf ( m , n , k ) ; <nl> + } ) ; <nl> + <nl> + Reducer minimum ( ExprHandle ( 0 . f ) , [ & ] ( ExprHandle a , ExprHandle b ) { <nl> + return Add : : make ( ExprHandle ( 1 . f ) , Min : : make ( a , b , false ) ) ; <nl> + } ) ; <nl> + Tensor * y = Reduce ( " y " , { { M , " m2 " } } , minimum , x , { { N , " n2 " } , { K , " k2 " } } ) ; <nl> + <nl> + PaddedBuffer < float > a_v ( M , N , K ) ; <nl> + PaddedBuffer < float > b_v ( M , N , K ) ; <nl> + <nl> + for ( int i = 0 ; i < M ; i + + ) { <nl> + for ( int j = 0 ; j < N ; j + + ) { <nl> + for ( int k = 0 ; k < K ; k + + ) { <nl> + a_v ( i , j , k ) = i * i + k ; <nl> + b_v ( i , j , k ) = j * j + k ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + LoopNest l1 ( { y } ) ; <nl> + LoopNest l2 ( { y } ) ; <nl> + l2 . computeInline ( x - > buf ( ) ) ; <nl> + <nl> + l1 . prepareForCodegen ( ) ; <nl> + l2 . prepareForCodegen ( ) ; <nl> + <nl> + Stmt * stmt1 = IRSimplifier : : simplify ( l1 . root_stmt ( ) ) ; <nl> + Stmt * stmt2 = IRSimplifier : : simplify ( l2 . root_stmt ( ) ) ; <nl> + <nl> + SimpleIREvaluator eval1 ( stmt1 , a_buf , b_buf , y ) ; <nl> + SimpleIREvaluator eval2 ( stmt2 , a_buf , b_buf , y ) ; <nl> + <nl> + PaddedBuffer < float > y_1 ( M ) ; <nl> + PaddedBuffer < float > y_2 ( M ) ; <nl> + <nl> + eval1 ( a_v , b_v , y_1 ) ; <nl> + eval2 ( a_v , b_v , y_2 ) ; <nl> + ExpectAllNear ( y_1 , y_2 , 1e - 5 ) ; <nl> + std : : ostringstream oss1 , oss2 ; <nl> + oss1 < < * stmt1 ; <nl> + oss2 < < * stmt2 ; <nl> + ASSERT_GT ( oss1 . str ( ) . size ( ) , oss2 . str ( ) . size ( ) ) ; <nl> + } <nl> + <nl> } / / namespace jit <nl> } / / namespace torch <nl> mmm a / test / cpp / tensorexpr / test_simplify . cpp <nl> ppp b / test / cpp / tensorexpr / test_simplify . cpp <nl> using SimpleIRExprEval = ExprEval < SimpleIREvaluator > ; <nl> ASSERT_EQ ( node_ - > name_hint ( ) , name ) ; \ <nl> } <nl> <nl> + # define IS_RAND ( node ) \ <nl> + { \ <nl> + auto * node_ = dynamic_cast < const Intrinsics * > ( node ) ; \ <nl> + ASSERT_NE ( nullptr , node_ ) ; \ <nl> + ASSERT_EQ ( node_ - > op_type ( ) , kRand ) ; \ <nl> + } <nl> + <nl> void testConstantFoldSimple ( ) { <nl> KernelScope kernel_scope ; <nl> ExprHandle a ( 2 . 0f ) ; <nl> void testHashEquivalence ( ) { <nl> ASSERT_NE ( hasher . hash ( f5 . node ( ) ) , ( size_t ) 0 ) ; <nl> } <nl> <nl> + void testHashEquivalenceRand ( ) { <nl> + KernelScope kernel_scope ; <nl> + ExprHandle f = <nl> + Intrinsics : : make ( kRand , kFloat ) + Intrinsics : : make ( kRand , kInt ) ; <nl> + <nl> + const Add * root = f . AsNode < Add > ( ) ; <nl> + ASSERT_NE ( root , nullptr ) ; <nl> + <nl> + HashProvider hasher ; <nl> + auto hash_f = hasher . hash ( f . node ( ) ) ; <nl> + auto hash_l = hasher . hash ( root - > lhs ( ) ) ; <nl> + auto hash_r = hasher . hash ( root - > rhs ( ) ) ; <nl> + <nl> + / / Root not equal to either branch . <nl> + ASSERT_NE ( hash_f , hash_l ) ; <nl> + ASSERT_NE ( hash_f , hash_r ) ; <nl> + / / and branches are NOT equal . <nl> + ASSERT_NE ( hash_l , hash_r ) ; <nl> + } <nl> + <nl> void testHashEquivalenceAfterFolding ( ) { <nl> KernelScope kernel_scope ; <nl> VarHandle x ( " x " , kFloat ) ; <nl> void testSimplifyEliminateZeroLengthAlloc ( ) { <nl> } <nl> } <nl> <nl> + void testDontSimplifyRand ( ) { <nl> + KernelScope kernel_scope ; <nl> + <nl> + { <nl> + / / rand ( ) + rand ( ) = rand ( ) + rand ( ) NOT 2 * rand ( ) . <nl> + ExprHandle body = <nl> + Intrinsics : : make ( kRand , kInt ) + Intrinsics : : make ( kRand , kInt ) ; <nl> + ExprHandle simplified = IRSimplifier : : simplify ( body ) ; <nl> + IS_NODE_WITH_NAME ( Add , simplified . node ( ) , add ) ; <nl> + IS_RAND ( add - > lhs ( ) ) ; <nl> + IS_RAND ( add - > rhs ( ) ) ; <nl> + } <nl> + <nl> + { <nl> + / / rand ( ) - rand ( ) = rand ( ) - rand ( ) NOT 0 . <nl> + ExprHandle body = <nl> + Intrinsics : : make ( kRand , kFloat ) - Intrinsics : : make ( kRand , kFloat ) ; <nl> + ExprHandle simplified = IRSimplifier : : simplify ( body ) ; <nl> + IS_NODE_WITH_NAME ( Sub , simplified . node ( ) , sub ) ; <nl> + IS_RAND ( sub - > lhs ( ) ) ; <nl> + IS_RAND ( sub - > rhs ( ) ) ; <nl> + } <nl> + <nl> + { <nl> + / / rand ( ) * rand ( ) = rand ( ) * rand ( ) . <nl> + ExprHandle body = <nl> + Intrinsics : : make ( kRand , kInt ) * Intrinsics : : make ( kRand , kInt ) ; <nl> + ExprHandle simplified = IRSimplifier : : simplify ( body ) ; <nl> + IS_NODE_WITH_NAME ( Mul , simplified . node ( ) , mul ) ; <nl> + IS_RAND ( mul - > lhs ( ) ) ; <nl> + IS_RAND ( mul - > rhs ( ) ) ; <nl> + } <nl> + } <nl> + <nl> } / / namespace jit <nl> } / / namespace torch <nl> mmm a / test / cpp / tensorexpr / tests . h <nl> ppp b / test / cpp / tensorexpr / tests . h <nl> namespace jit { <nl> _ ( SplitWithMaskWithLoopOptions ) \ <nl> _ ( ScheduleBroadcastAddBuffer ) \ <nl> _ ( ScheduleFunctionCall01 ) \ <nl> + _ ( ScheduleInlineSimple ) \ <nl> _ ( ScheduleInlineFunc01 ) \ <nl> + _ ( ScheduleInlineRandom ) \ <nl> + _ ( ScheduleInlineRandomUnrelated ) \ <nl> + _ ( ScheduleInlineRandomLowerDimensions ) \ <nl> + _ ( ScheduleInlineIntrinsics ) \ <nl> + _ ( ScheduleInlineRandWithIntrinsics ) \ <nl> + _ ( ScheduleSplitAThenInline ) \ <nl> + _ ( ScheduleSplitBThenInline ) \ <nl> + _ ( ScheduleSplitTwiceThenInline ) \ <nl> + _ ( ScheduleInlineThenSplit ) \ <nl> + _ ( ScheduleSplitInlineThenSplit ) \ <nl> + _ ( ScheduleSplitInlineSimplify ) \ <nl> + _ ( ScheduleInlineThreeMixedOnce ) \ <nl> + _ ( ScheduleInlineThreeMixedTwice ) \ <nl> + _ ( ScheduleInlineThreeMixedInner ) \ <nl> + _ ( ScheduleInlineThreeMixedSplit ) \ <nl> _ ( ScheduleFuserStyle ) \ <nl> _ ( ScheduleFuserThreeArg ) \ <nl> _ ( ScheduleDynamicShape2D ) \ <nl> namespace jit { <nl> _ ( ReduceAnyAll ) \ <nl> _ ( ReduceMatmul2D ) \ <nl> _ ( ReduceRfactorLike ) \ <nl> + _ ( ReduceAsProducer ) \ <nl> + _ ( ReduceAsConsumer ) \ <nl> + _ ( SplitReduceAxis ) \ <nl> + _ ( SplitNonReduceAxis ) \ <nl> + _ ( ReorderedReductionInitializer ) \ <nl> _ ( ReduceRfactor ) \ <nl> _ ( Reduce3DRfactorInternal ) \ <nl> _ ( Reduce3DRfactorInner ) \ <nl> namespace jit { <nl> _ ( ReduceOverSplitMask ) \ <nl> _ ( ReduceSplitRfactor ) \ <nl> _ ( ReduceOverSplitRfactor ) \ <nl> - _ ( SplitReduceAxis ) \ <nl> - _ ( SplitNonReduceAxis ) \ <nl> + _ ( ReduceInlineReduction ) \ <nl> + _ ( ReduceInlineConsumer ) \ <nl> + _ ( ReduceInlineReducerInternal ) \ <nl> _ ( TypeTest01 ) \ <nl> _ ( TypePropagation ) \ <nl> _ ( Cond01 ) \ <nl> namespace jit { <nl> _ ( UnFoldableExpr ) \ <nl> _ ( HashSimple ) \ <nl> _ ( HashEquivalence ) \ <nl> + _ ( HashEquivalenceRand ) \ <nl> _ ( HashEquivalenceAfterFolding ) \ <nl> _ ( HashDifferenceTypes ) \ <nl> _ ( HashLargeExpression ) \ <nl> namespace jit { <nl> _ ( SimplifyEliminateEmptyFor ) \ <nl> _ ( SimplifyFlattenBlock ) \ <nl> _ ( SimplifyEliminateZeroLengthAlloc ) \ <nl> + _ ( DontSimplifyRand ) \ <nl> _ ( RegisterizerSimple ) \ <nl> _ ( RegisterizerLoop ) \ <nl> _ ( RegisterizerLoopFixedLoad ) \ <nl> mmm a / torch / csrc / jit / tensorexpr / analysis . h <nl> ppp b / torch / csrc / jit / tensorexpr / analysis . h <nl> class VarFinder : public IRVisitor { <nl> return nf . vars ( ) ; <nl> } <nl> <nl> + static std : : unordered_set < const Var * > find ( const Expr * e ) { <nl> + VarFinder nf ; <nl> + e - > accept ( & nf ) ; <nl> + return nf . vars ( ) ; <nl> + } <nl> + <nl> const std : : unordered_set < const Var * > & vars ( ) { <nl> return vars_ ; <nl> } <nl> mmm a / torch / csrc / jit / tensorexpr / function . cpp <nl> ppp b / torch / csrc / jit / tensorexpr / function . cpp <nl> Tensor * Reduce ( <nl> reduce_args ) ; <nl> } <nl> <nl> + Tensor * Reduce ( <nl> + const std : : string & func_name , <nl> + const std : : vector < DimArg > & dim_args , <nl> + const Reducer & reducer , <nl> + Tensor * tensor , <nl> + const std : : vector < DimArg > & reduce_args ) { <nl> + return Reduce ( <nl> + func_name , <nl> + dim_args , <nl> + reducer , <nl> + [ & ] ( ParameterList & p ) { return tensor - > call ( p ) ; } , <nl> + reduce_args ) ; <nl> + } <nl> + <nl> } / / namespace tensorexpr <nl> } / / namespace jit <nl> } / / namespace torch <nl> mmm a / torch / csrc / jit / tensorexpr / hash_provider . cpp <nl> ppp b / torch / csrc / jit / tensorexpr / hash_provider . cpp <nl> void HashProvider : : visit ( const BaseCallNode * v ) { <nl> putHash ( v , hash ) ; <nl> } <nl> <nl> + void HashProvider : : visit ( const Intrinsics * v ) { <nl> + CACHE_GUARD ( ) ; <nl> + / / calls to rand are not symbolic and have a different value each time , they <nl> + / / should not hash to anything and this is the best we can do . <nl> + if ( v - > op_type ( ) = = kRand ) { <nl> + putHash ( v , ( SimplifierHashType ) rand ( ) ) ; <nl> + return ; <nl> + } <nl> + <nl> + SimplifierHashType hash ( te_hash ( v - > func_name ( ) ) ) ; <nl> + for ( int i = 0 ; i < v - > nparams ( ) ; i + + ) { <nl> + v - > param ( i ) - > accept ( this ) ; <nl> + hash = hash_combine ( hash , hashOf ( v - > param ( i ) ) ) ; <nl> + } <nl> + <nl> + putHash ( v , hash ) ; <nl> + } <nl> + <nl> void HashProvider : : visit ( const Allocate * v ) { <nl> CACHE_GUARD ( ) ; <nl> const Var * buffer_var = v - > buffer_var ( ) ; <nl> mmm a / torch / csrc / jit / tensorexpr / hash_provider . h <nl> ppp b / torch / csrc / jit / tensorexpr / hash_provider . h <nl> class TORCH_API HashProvider : public IRVisitor { <nl> void visit ( const Broadcast * v ) override ; <nl> void visit ( const IfThenElse * v ) override ; <nl> void visit ( const BaseCallNode * v ) override ; <nl> + void visit ( const Intrinsics * v ) override ; <nl> void visit ( const Allocate * v ) override ; <nl> void visit ( const Free * v ) override ; <nl> void visit ( const Cond * v ) override ; <nl> mmm a / torch / csrc / jit / tensorexpr / kernel . cpp <nl> ppp b / torch / csrc / jit / tensorexpr / kernel . cpp <nl> Stmt * TensorExprKernel : : generateStmt ( BackendType backendType ) { <nl> if ( ! l . hasLoopBodyFor ( p . second ) | | hasReduction ) { <nl> continue ; <nl> } <nl> - Stmt * loop = l . getLoopBodyFor ( p . second ) ; <nl> - if ( torch : : jit : : tensorexpr : : HasRand ( loop ) . has_rand ( ) ) { <nl> - l . computeInlineWithRandom ( loop ) ; <nl> - } else { <nl> - l . computeInline ( loop ) ; <nl> - } <nl> + l . computeInline ( p . second - > buf ( ) ) ; <nl> } <nl> if ( backendType = = kCudaCodeGen ) { <nl> for ( size_t i = 0 ; i < flatTensorOutputs_ . size ( ) ; i + + ) { <nl> Stmt * TensorExprKernel : : generateStmt ( BackendType backendType ) { <nl> <nl> / / For every output tensor we ' ve created a flattened 1D tensor - let ' s <nl> / / mark the original output tensor with computeInline <nl> - l . computeInline ( l . getLoopBodyFor ( tensorOutputs_ [ i ] ) ) ; <nl> + l . computeInline ( tensorOutputs_ [ i ] - > buf ( ) ) ; <nl> <nl> int loopLevels = getTECudaPointwiseLoopLevels ( ) ; <nl> const int kDefaultLoopLevels = 2 ; <nl> mmm a / torch / csrc / jit / tensorexpr / loopnest . cpp <nl> ppp b / torch / csrc / jit / tensorexpr / loopnest . cpp <nl> <nl> <nl> # include < c10 / util / Logging . h > <nl> # include < c10 / util / string_utils . h > <nl> + <nl> + # include < torch / csrc / jit / tensorexpr / analysis . h > <nl> # include < torch / csrc / jit / tensorexpr / bounds_inference . h > <nl> # include < torch / csrc / jit / tensorexpr / eval . h > <nl> # include < torch / csrc / jit / tensorexpr / expr . h > <nl> class Flattener : public IRMutator { <nl> } <nl> } ; <nl> <nl> - class FunctionInliner : public IRMutator { <nl> - public : <nl> - FunctionInliner ( const std : : vector < Function * > & funcs ) : funcs_ ( funcs ) { <nl> - for ( Function * func : funcs ) { <nl> - / / TODO : Support multiple - output functions <nl> - if ( func - > func_vars ( ) . size ( ) ! = 1 ) { <nl> - throw unimplemented_lowering ( ) ; <nl> - } <nl> - func_var_set_ . insert ( func - > func_var ( 0 ) - > base_handle ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - protected : <nl> - bool should_inline ( Function * func ) const { <nl> - return func_var_set_ . count ( func - > func_var ( 0 ) - > base_handle ( ) ) > 0 ; <nl> - } <nl> - <nl> - / / For the target function , insert the caller / callee pair into the replacement <nl> - / / mapping . <nl> - const Expr * mutate ( const FunctionCall * v ) override { <nl> - Function * func = v - > tensor ( ) - > function ( ) ; <nl> - const Buf * buf = v - > tensor ( ) - > buf ( ) ; <nl> - / / TODO : Support multiple - output functions <nl> - if ( func - > func_vars ( ) . size ( ) ! = 1 ) { <nl> - throw unimplemented_lowering ( ) ; <nl> - } <nl> - <nl> - if ( should_inline ( func ) ) { <nl> - / / Insert the caller / callee pair into the mapping . <nl> - for ( size_t i = 0 ; i < buf - > ndim ( ) ; i + + ) { <nl> - const Var * func_callee_arg = dynamic_cast < const Var * > ( func - > arg ( i ) ) ; <nl> - const Expr * func_caller_param = v - > param ( i ) ; <nl> - auto iter = inline_mapping_ . find ( func_callee_arg ) ; <nl> - if ( iter ! = inline_mapping_ . end ( ) ) { <nl> - throw std : : runtime_error ( <nl> - " Duplicated variables : " + func_callee_arg - > name_hint ( ) ) ; <nl> - } <nl> - inline_mapping_ [ func_callee_arg ] = func_caller_param ; <nl> - } <nl> - <nl> - / / Call the actual replacement . <nl> - const Expr * body = func - > body ( v - > tensor ( ) - > output_index ( ) ) ; <nl> - const Expr * result = body - > accept_mutator ( this ) ; <nl> - <nl> - / / Remove the caller / callee relationship . <nl> - for ( size_t i = 0 ; i < buf - > ndim ( ) ; i + + ) { <nl> - const Var * func_callee_arg = dynamic_cast < const Var * > ( func - > arg ( i ) ) ; <nl> - auto iter = inline_mapping_ . find ( func_callee_arg ) ; <nl> - if ( iter = = inline_mapping_ . end ( ) ) { <nl> - throw std : : runtime_error ( <nl> - " Var already removed : " + func_callee_arg - > name_hint ( ) ) ; <nl> - } <nl> - inline_mapping_ . erase ( iter ) ; <nl> - } <nl> - return result ; <nl> - } else { <nl> - return IRMutator : : mutate ( v ) ; <nl> - } <nl> - } <nl> - <nl> - / / Replace the target variable with the caller expressions . <nl> - const Expr * mutate ( const Var * v ) override { <nl> - auto iter = inline_mapping_ . find ( v ) ; <nl> - if ( iter = = inline_mapping_ . end ( ) ) { <nl> - return IRMutator : : mutate ( v ) ; <nl> - } else { <nl> - const Expr * expr = iter - > second ; <nl> - / / Continue to transform the value from the lookup table . <nl> - return expr - > accept_mutator ( this ) ; <nl> - } <nl> - } <nl> - <nl> - / / Remove the buffer write the inlined function . <nl> - Stmt * mutate ( const Store * v ) override { <nl> - if ( func_var_set_ . count ( v - > base_handle ( ) ) > 0 ) { <nl> - return nullptr ; <nl> - } else { <nl> - return IRMutator : : mutate ( v ) ; <nl> - } <nl> - } <nl> - <nl> - private : <nl> - std : : unordered_map < const Var * , const Expr * > inline_mapping_ ; <nl> - std : : vector < Function * > funcs_ ; <nl> - std : : unordered_set < const Var * > func_var_set_ ; <nl> - } ; <nl> - <nl> - / / Inlining for functions containing rand ( ) . Since rand ( ) is stateful we can ' t <nl> - / / simply inline it everywhere , or else we may generate new randoms where we <nl> - / / should us a previously generated one . As a contrived example : <nl> - / / % 1 = rand ( ) <nl> - / / % 2 = % 1 + 1 <nl> - / / % 3 = % 1 - 1 <nl> - / / % 4 = % 2 - % 3 <nl> - / / Fully inlining this expr would , incorrectly , yield : <nl> - / / % 4 = ( rand ( ) + 1 ) - ( rand ( ) - 1 ) <nl> - / / when in fact the two uses of % 1 should cancel . To avoid this issue , we <nl> - / / instead generate : <nl> - / / % 4 = ( let x = rand ( ) ; ( x + 1 ) - ( x - 1 ) ) <nl> - / / <nl> - / / The overall approach is to replace every rand ( ) intrinsic with a newly <nl> - / / generated variable , and then bind those variables to rand ( ) calls in the <nl> - / / body of the innermost control structure . <nl> - class RandomInliner : public FunctionInliner { <nl> - public : <nl> - explicit RandomInliner ( const std : : vector < Function * > & funcs ) <nl> - : FunctionInliner ( funcs ) { } <nl> - <nl> - using FunctionInliner : : mutate ; <nl> - <nl> - / / Bind random vars in the true and false branches of a conditional . <nl> - Stmt * mutate ( const Cond * v ) override { <nl> - const Expr * cond = v - > condition ( ) ; <nl> - Stmt * true_stmt = v - > true_stmt ( ) ; <nl> - Stmt * false_stmt = v - > false_stmt ( ) ; <nl> - <nl> - const Expr * cond_new = cond - > accept_mutator ( this ) ; <nl> - Stmt * true_new = true_stmt ? true_stmt - > accept_mutator ( this ) : true_stmt ; <nl> - true_new = bind_random_vars ( true_new ) ; <nl> - Stmt * false_new = <nl> - false_stmt ? false_stmt - > accept_mutator ( this ) : false_stmt ; <nl> - false_new = bind_random_vars ( false_new ) ; <nl> - <nl> - if ( cond_new = = cond & & true_new = = true_stmt & & false_new = = false_stmt ) { <nl> - return const_cast < Cond * > ( v ) ; / / NOLINT <nl> - } <nl> - return new Cond ( cond_new , true_new , false_new ) ; <nl> - } <nl> - <nl> - / / Bind random vars in the innermost loop where they are used . <nl> - Stmt * mutate ( const For * v ) override { <nl> - const Var * var = v - > var ( ) ; <nl> - const Expr * start = v - > start ( ) ; <nl> - const Expr * stop = v - > stop ( ) ; <nl> - Stmt * body = v - > body ( ) ; <nl> - LoopOptions loop_options = v - > loop_options ( ) ; <nl> - <nl> - Stmt * orig_body = Stmt : : clone ( body ) ; <nl> - Stmt * new_body = orig_body - > accept_mutator ( this ) ; <nl> - new_body = bind_random_vars ( new_body ) ; <nl> - if ( new_body = = orig_body ) { <nl> - return const_cast < For * > ( v ) ; / / NOLINT <nl> - } <nl> - if ( new_body = = nullptr ) { <nl> - return nullptr ; <nl> - } <nl> - return new For ( var , start , stop , new_body , loop_options ) ; <nl> - } <nl> - <nl> - / / Inline calls containing rand ( ) . Create a new random variable for each <nl> - / / call being inlined , and remember which function is currently being inlined <nl> - / / so we can look up the right variable to replace it with . <nl> - const Expr * mutate ( const FunctionCall * v ) override { <nl> - if ( ! should_inline ( v - > tensor ( ) - > function ( ) ) ) { <nl> - return v ; <nl> - } <nl> - Function * prev_func = current_func_ ; <nl> - current_func_ = v - > tensor ( ) - > function ( ) ; <nl> - <nl> - / / Remember the calling args ; if we find another call with different args , <nl> - / / bail out because this case is too complicated . <nl> - auto it = call_args_ . find ( current_func_ ) ; <nl> - if ( it = = call_args_ . end ( ) ) { <nl> - call_args_ . emplace ( current_func_ , std : : cref ( v - > params ( ) ) ) ; <nl> - } else { <nl> - if ( v - > params ( ) ! = it - > second . get ( ) ) { <nl> - throw std : : runtime_error ( " Complex indexing pattern in rand ( ) tensor " ) ; <nl> - } <nl> - } <nl> - <nl> - / / Assign a new random variable for this function , if needed . <nl> - if ( ! random_vars_ . count ( current_func_ ) ) { <nl> - const std : : string & name = current_func_ - > func_var ( 0 ) - > name_hint ( ) ; <nl> - random_vars_ . emplace ( current_func_ , new Var ( name , v - > dtype ( ) ) ) ; <nl> - } <nl> - const Expr * result = FunctionInliner : : mutate ( v ) ; <nl> - current_func_ = prev_func ; <nl> - return result ; <nl> - } <nl> - <nl> - / / Replace rand ( ) intrinsics . <nl> - const Expr * mutate ( const Intrinsics * v ) override { <nl> - if ( v - > op_type ( ) ! = kRand ) { <nl> - return v ; <nl> - } <nl> - if ( ! current_func_ ) { <nl> - return v ; <nl> - } <nl> - auto it = random_vars_ . find ( current_func_ ) ; <nl> - if ( it = = random_vars_ . end ( ) ) { <nl> - return v ; <nl> - } <nl> - return it - > second ; <nl> - } <nl> - <nl> - private : <nl> - / / Emit let statements for all encountered random vars , thenclear them . <nl> - Stmt * bind_random_vars ( Stmt * s ) { <nl> - if ( random_vars_ . empty ( ) ) { <nl> - return s ; <nl> - } <nl> - <nl> - Block * b = dynamic_cast < Block * > ( s ) ; <nl> - if ( ! b ) { <nl> - b = new Block ( { s } ) ; <nl> - } <nl> - <nl> - / / Make sure theres something in the block , will be simplified out later . <nl> - if ( b - > empty ( ) ) { <nl> - b - > append_stmt ( new Block ( { } ) ) ; <nl> - } <nl> - <nl> - Stmt * first = b - > stmts ( ) . front ( ) ; <nl> - for ( auto const & p : random_vars_ ) { <nl> - Var * v = p . second ; <nl> - b - > insert_stmt_before ( <nl> - new Let ( v , new Intrinsics ( kRand , v - > dtype ( ) ) ) , first ) ; <nl> - } <nl> - random_vars_ . clear ( ) ; <nl> - return b ; <nl> - } <nl> - <nl> - / / Track the function currently being inlined . <nl> - Function * current_func_ = nullptr ; <nl> - <nl> - / / Map functions being inlined to the generated random variable . <nl> - std : : unordered_map < Function * , Var * > random_vars_ ; <nl> - <nl> - / / Remember arguments of calls containing rand , and force all calls to have <nl> - / / the same argument list . We use pointer equality of Exprs , which is <nl> - / / extremely strict but works for simple cases . <nl> - using ArgVec = std : : reference_wrapper < const std : : vector < const Expr * > > ; <nl> - std : : unordered_map < Function * , ArgVec > call_args_ ; <nl> - } ; <nl> - <nl> - static Stmt * InjectInlines ( <nl> - Stmt * stmt , <nl> - const std : : vector < Function * > & inlined_funcs ) { <nl> - FunctionInliner inliner ( inlined_funcs ) ; <nl> - Stmt * stmt_old = stmt ; <nl> - Stmt * stmt_new = stmt_old - > accept_mutator ( & inliner ) ; <nl> - return stmt_new ; <nl> - } <nl> - <nl> - static Stmt * InlineRandom ( Stmt * stmt , const std : : vector < Function * > & funcs ) { <nl> - RandomInliner inliner ( funcs ) ; <nl> - return stmt - > accept_mutator ( & inliner ) ; <nl> - } <nl> - <nl> class DepTracker : public IRVisitor { <nl> public : <nl> std : : vector < Tensor * > findUsedTensors ( Tensor * tensor ) { <nl> Stmt * LoopNest : : lowerToStmt ( Tensor * t ) { <nl> return body ; <nl> } <nl> <nl> + class FunctionInliner : public IRMutator { <nl> + public : <nl> + FunctionInliner ( Store * producer ) <nl> + : buf_ ( producer - > buf ( ) ) , producer_ ( producer ) { <nl> + for ( auto * i : producer - > indices ( ) ) { <nl> + const Var * index_var = dynamic_cast < const Var * > ( i ) ; <nl> + if ( index_var = = nullptr ) { <nl> + throw std : : logic_error ( " cannot inline Buf with compound indices " ) ; <nl> + } <nl> + index_vars_ . insert ( index_var ) ; <nl> + } <nl> + } <nl> + <nl> + protected : <nl> + / / For the target function , insert the caller / callee pair into the replacement <nl> + / / mapping . <nl> + const Expr * mutate ( const FunctionCall * v ) override { <nl> + Function * func = v - > tensor ( ) - > function ( ) ; <nl> + const Buf * buf = v - > tensor ( ) - > buf ( ) ; <nl> + if ( buf ! = buf_ ) { <nl> + return IRMutator : : mutate ( v ) ; <nl> + } <nl> + <nl> + / / TODO : Support multiple - output functions <nl> + if ( func - > func_vars ( ) . size ( ) ! = 1 ) { <nl> + throw unimplemented_lowering ( ) ; <nl> + } <nl> + <nl> + std : : vector < const Var * > index_vars ; <nl> + for ( size_t i = 0 ; i < buf - > ndim ( ) ; i + + ) { <nl> + const Var * func_callee_arg = dynamic_cast < const Var * > ( func - > arg ( i ) ) ; <nl> + const Expr * func_caller_param = v - > param ( i ) ; <nl> + auto iter = inline_mapping_ . find ( func_callee_arg ) ; <nl> + if ( iter ! = inline_mapping_ . end ( ) ) { <nl> + throw std : : runtime_error ( <nl> + " Duplicated variables : " + func_callee_arg - > name_hint ( ) ) ; <nl> + } <nl> + inline_mapping_ [ func_callee_arg ] = func_caller_param ; <nl> + index_vars . push_back ( func_callee_arg ) ; <nl> + } <nl> + <nl> + / / Call the actual replacement . <nl> + const Expr * body = producer_ - > value ( ) ; <nl> + const Expr * result = body - > accept_mutator ( this ) ; <nl> + <nl> + / / Remove the caller / callee relationship . <nl> + for ( auto * v : index_vars ) { <nl> + for ( auto & pair : random_bindings_ ) { <nl> + if ( pair . second . erase ( v ) ) { <nl> + const Expr * inlined = inline_mapping_ [ v ] ; <nl> + for ( auto * nv : VarFinder : : find ( inlined ) ) { <nl> + pair . second . insert ( nv ) ; <nl> + } <nl> + } <nl> + } <nl> + inline_mapping_ . erase ( v ) ; <nl> + } <nl> + return result ; <nl> + } <nl> + <nl> + / / Replace the target variable with the caller expressions . <nl> + const Expr * mutate ( const Var * v ) override { <nl> + auto iter = inline_mapping_ . find ( v ) ; <nl> + if ( iter = = inline_mapping_ . end ( ) ) { <nl> + return v ; <nl> + } else { <nl> + const Expr * expr = iter - > second ; <nl> + / / Continue to transform the value from the lookup table . <nl> + return expr - > accept_mutator ( this ) ; <nl> + } <nl> + } <nl> + <nl> + / / Handle random intrinsics which should be cached . <nl> + const Expr * mutate ( const Intrinsics * v ) override { <nl> + if ( ! in_producer_ | | v - > op_type ( ) ! = kRand ) { <nl> + return IRMutator : : mutate ( v ) ; <nl> + } <nl> + <nl> + const std : : string & name = buf_ - > name_hint ( ) ; <nl> + Var * new_var = new Var ( name , v - > dtype ( ) ) ; <nl> + random_bindings_ [ new Let ( new_var , v ) ] = index_vars_ ; <nl> + return new_var ; <nl> + } <nl> + <nl> + / / Remove the buffer write the inlined function . <nl> + Stmt * mutate ( const Store * v ) override { <nl> + if ( v = = producer_ ) { <nl> + in_producer_ = true ; <nl> + producer_ = dynamic_cast < const Store * > ( IRMutator : : mutate ( v ) ) ; <nl> + in_producer_ = false ; <nl> + return nullptr ; <nl> + } else { <nl> + return IRMutator : : mutate ( v ) ; <nl> + } <nl> + } <nl> + <nl> + / / Any Random Instrinsics that were turned into vars must be inserted here . <nl> + Stmt * mutate ( const Block * v ) override { <nl> + std : : vector < Stmt * > stmts ; <nl> + for ( Stmt * stmt : * v ) { <nl> + Stmt * stmt_new = stmt - > accept_mutator ( this ) ; <nl> + if ( ! stmt_new ) { <nl> + continue ; <nl> + } <nl> + <nl> + if ( stmt = = stmt_new ) { <nl> + stmt_new = Stmt : : clone ( stmt ) ; <nl> + } <nl> + <nl> + stmts . push_back ( stmt_new ) ; <nl> + } <nl> + <nl> + return Block : : make ( stmts ) ; <nl> + } <nl> + <nl> + Stmt * mutate ( const For * v ) override { <nl> + For * res = dynamic_cast < For * > ( IRMutator : : mutate ( v ) ) ; <nl> + if ( ! res ) { <nl> + return nullptr ; <nl> + } <nl> + <nl> + / / Find any random bindings that should be inserted in this loops body . <nl> + std : : vector < Let * > bindings_this_loop ; <nl> + const Var * fv = v - > var ( ) ; <nl> + for ( auto & pair : random_bindings_ ) { <nl> + auto & index_var = pair . second ; <nl> + if ( index_var . erase ( fv ) ) { <nl> + bindings_this_loop . push_back ( pair . first ) ; <nl> + } <nl> + } <nl> + <nl> + for ( auto * l : bindings_this_loop ) { <nl> + res - > body ( ) - > prepend_stmt ( l ) ; <nl> + random_bindings_ . erase ( l ) ; <nl> + } <nl> + return res ; <nl> + } <nl> + <nl> + private : <nl> + const Buf * buf_ ; <nl> + const Store * producer_ ; <nl> + <nl> + / / Index Vars present in the producer . <nl> + std : : unordered_set < const Var * > index_vars_ ; <nl> + <nl> + std : : unordered_map < const Var * , const Expr * > inline_mapping_ ; <nl> + <nl> + / / In the producer ' s scope - we need to bind any calls to rand ( ) . <nl> + bool in_producer_ = false ; <nl> + std : : unordered_map < Let * , std : : unordered_set < const Var * > > random_bindings_ ; <nl> + } ; <nl> + <nl> void LoopNest : : computeInline ( Stmt * s ) { <nl> - / / TODO : check if ` s ` is a body of a loop <nl> - inlined_functions_ . insert ( stmt_to_tensor_ . at ( s ) - > function ( ) ) ; <nl> + auto * s_store = dynamic_cast < Store * > ( s ) ; <nl> + if ( s_store = = nullptr ) { <nl> + throw std : : logic_error ( " Could not find buffer producer to inline " ) ; <nl> + } <nl> + computeInline ( s_store - > buf ( ) ) ; <nl> } <nl> <nl> - void LoopNest : : computeInlineWithRandom ( Stmt * s ) { <nl> - inlined_random_functions_ . insert ( stmt_to_tensor_ . at ( s ) - > function ( ) ) ; <nl> + void LoopNest : : computeInline ( const Buf * b ) { <nl> + for ( auto * t : output_tensors_ ) { <nl> + if ( b = = t - > buf ( ) ) { <nl> + throw std : : logic_error ( " Can ' t inline producers of output Tensors " ) ; <nl> + } <nl> + } <nl> + <nl> + / / Find producers . <nl> + Store * relevant_store { nullptr } ; <nl> + auto stores = NodeFinder < Store > : : find ( root_stmt_ ) ; <nl> + for ( auto * s : stores ) { <nl> + if ( s - > buf ( ) = = b ) { <nl> + auto reductions = NodeFinder < ReduceOp > : : find ( s ) ; <nl> + if ( ! reductions . empty ( ) ) { <nl> + throw std : : logic_error ( " cannot inline a reduction computation " ) ; <nl> + } <nl> + if ( relevant_store ! = nullptr ) { <nl> + throw std : : logic_error ( " cannot inline Buf with multiple Tensors " ) ; <nl> + } <nl> + relevant_store = s ; <nl> + } <nl> + } <nl> + <nl> + FunctionInliner inliner ( relevant_store ) ; <nl> + root_stmt_ = root_stmt_ - > accept_mutator ( & inliner ) ; <nl> + <nl> + / / No longer computing this intermediate tensor , so don ' t alloc it . <nl> + for ( auto * t : intermediate_tensors_ ) { <nl> + if ( b = = t - > buf ( ) ) { <nl> + intermediate_tensors_ . erase ( t ) ; <nl> + break ; <nl> + } <nl> + } <nl> + <nl> + for ( auto it = temp_bufs_ . begin ( ) ; it ! = temp_bufs_ . end ( ) ; + + it ) { <nl> + if ( b = = * it ) { <nl> + temp_bufs_ . erase ( it ) ; <nl> + break ; <nl> + } <nl> + } <nl> } <nl> <nl> / / TODO : Unify with DepTracker <nl> Stmt * LoopNest : : insertAllocFree ( Stmt * stmt ) { <nl> <nl> / / TODO : Fix the traversal , currently the order is non - deterministic <nl> for ( Tensor * tensor : intermediate_tensors_ ) { <nl> - if ( inlined_functions_ . count ( tensor - > function ( ) ) | | <nl> - inlined_random_functions_ . count ( tensor - > function ( ) ) ) { <nl> - / / No need to allocate memory for intermediate tensors . <nl> - continue ; <nl> - } <nl> if ( output_tensors_ . count ( tensor ) > 0 ) { <nl> / / No need to allocate memory if the tensors are given as input / output . <nl> continue ; <nl> Stmt * LoopNest : : insertAllocFree ( Stmt * stmt ) { <nl> } <nl> <nl> void LoopNest : : prepareForCodegen ( ) { <nl> - std : : vector < Function * > inlined_functions_vec ( <nl> - inlined_functions_ . begin ( ) , inlined_functions_ . end ( ) ) ; <nl> - std : : vector < Function * > inlined_randoms_vec ( <nl> - inlined_random_functions_ . begin ( ) , inlined_random_functions_ . end ( ) ) ; <nl> - root_stmt_ = InjectInlines ( root_stmt_ , inlined_functions_vec ) ; <nl> - root_stmt_ = InlineRandom ( root_stmt_ , inlined_randoms_vec ) ; <nl> - <nl> / / Expand reduction ops . <nl> ReductionExpander reduceExpander ; <nl> root_stmt_ = reduceExpander . expand ( root_stmt_ ) ; <nl> mmm a / torch / csrc / jit / tensorexpr / loopnest . h <nl> ppp b / torch / csrc / jit / tensorexpr / loopnest . h <nl> class TORCH_API LoopNest { <nl> return root_stmt_ ; <nl> } <nl> <nl> + / / These Tensor - based loop / stmt accessors are valid only as long as no <nl> + / / transformations have been made . <nl> std : : vector < For * > getLoopStmtsFor ( Tensor * ) const ; <nl> Stmt * getLoopBodyFor ( Tensor * ) const ; <nl> bool hasLoopBodyFor ( Tensor * ) const ; <nl> <nl> void vectorize ( Stmt * ) ; <nl> + <nl> void computeInline ( Stmt * s ) ; <nl> - void computeInlineWithRandom ( Stmt * s ) ; <nl> - void prepareForCodegen ( ) ; <nl> + void computeInline ( const Buf * b ) ; <nl> + <nl> void splitWithTail ( For * f , int factor , For * * outer , For * * inner , For * * tail ) ; <nl> void splitWithMask ( For * f , int factor , For * * outer , For * * inner ) ; <nl> + <nl> void reorderAxis ( For * a , For * b ) ; <nl> + <nl> static void unroll ( For * f , Stmt * * unrolled ) ; <nl> static void normalize ( For * f , For * * normalized ) ; <nl> <nl> void setGPUBlockIndex ( For * f , int idx ) ; <nl> void setGPUThreadIndex ( For * f , int idx ) ; <nl> - void setBufferMap ( <nl> - For * f , <nl> - const std : : unordered_map < std : : string , const Buf * > & map ) ; <nl> <nl> / / Insert a temporary computation of statement S in the scope of loop AT . <nl> / / S is assumed to be a Store or a Block containing a Store . Along with the <nl> class TORCH_API LoopNest { <nl> const Var * reduction_var , <nl> Block * insertion_point = nullptr / * optional * / ) ; <nl> <nl> + void setBufferMap ( <nl> + For * f , <nl> + const std : : unordered_map < std : : string , const Buf * > & map ) ; <nl> + <nl> + void prepareForCodegen ( ) ; <nl> + <nl> private : <nl> std : : vector < Tensor * > findAllNeededTensors ( <nl> const std : : vector < Tensor * > & tensors ) ; <nl> Stmt * lowerToStmt ( Tensor * t ) ; <nl> Stmt * insertAllocFree ( Stmt * stmt ) ; <nl> <nl> - std : : unordered_set < Function * > inlined_functions_ ; <nl> - std : : unordered_set < Function * > inlined_random_functions_ ; <nl> std : : unordered_map < Tensor * , Stmt * > tensor_to_stmt_ ; <nl> std : : unordered_map < Stmt * , Tensor * > stmt_to_tensor_ ; <nl> Stmt * root_stmt_ ; <nl> mmm a / torch / csrc / jit / tensorexpr / tensor . h <nl> ppp b / torch / csrc / jit / tensorexpr / tensor . h <nl> TORCH_API Tensor * Reduce ( <nl> const Buffer & buffer , <nl> const std : : vector < DimArg > & reduce_args ) ; <nl> <nl> + / / Overload for the common case of all dimensions of a prevously Computed <nl> + / / Tensor . <nl> + TORCH_API Tensor * Reduce ( <nl> + const std : : string & func_name , <nl> + const std : : vector < DimArg > & dim_args , <nl> + const Reducer & reducer , <nl> + Tensor * tensor , <nl> + const std : : vector < DimArg > & reduce_args ) ; <nl> + <nl> class FunctionCall : public CallNode < FunctionCall > { <nl> public : <nl> using BaseClass = CallNode < FunctionCall > ; <nl>
[ NNC ] make inlining immediate ( take 2 ) and fix bugs ( )
pytorch/pytorch
70aecd2a7f48dfe7180002fb1d9b9694ac1fcb7f
2020-09-03T23:49:24Z
mmm a / core / ustring . cpp <nl> ppp b / core / ustring . cpp <nl> String String : : get_extension ( ) const { <nl> } <nl> <nl> String String : : plus_file ( const String & p_file ) const { <nl> - <nl> - if ( length ( ) > 0 & & operator [ ] ( length ( ) - 1 ) = = ' / ' ) <nl> + if ( empty ( ) ) <nl> + return p_file ; <nl> + if ( operator [ ] ( length ( ) - 1 ) = = ' / ' | | p_file . operator [ ] ( 0 ) = = ' / ' ) <nl> return * this + p_file ; <nl> else <nl> return * this + " / " + p_file ; <nl>
Merge pull request from neikeq / pr - plus_file
godotengine/godot
52666b88b39a91655553ba6ab5bbe47c09861e28
2017-01-14T18:53:35Z
mmm a / tensorflow / core / graph / mkl_layout_pass . cc <nl> ppp b / tensorflow / core / graph / mkl_layout_pass . cc <nl> class MklLayoutRewritePass : public GraphOptimizationPass { <nl> CopyAttrsLRN , LrnRewrite } ) ; <nl> rinfo_ . push_back ( { csinfo_ . lrn_grad , <nl> mkl_op_registry : : GetMklOpName ( csinfo_ . lrn_grad ) , <nl> - CopyAttrsLRN , LrnRewrite } ) ; <nl> + CopyAttrsLRN , LrnGradRewrite } ) ; <nl> rinfo_ . push_back ( { csinfo_ . max_pool , <nl> mkl_op_registry : : GetMklOpName ( csinfo_ . max_pool ) , <nl> CopyAttrsPooling , NonDepthBatchWisePoolRewrite } ) ; <nl> rinfo_ . push_back ( { csinfo_ . max_pool_grad , <nl> mkl_op_registry : : GetMklOpName ( csinfo_ . max_pool_grad ) , <nl> - CopyAttrsPooling , AlwaysRewrite } ) ; <nl> + CopyAttrsPooling , MaxpoolGradRewrite } ) ; <nl> <nl> rinfo_ . push_back ( { csinfo_ . maximum , <nl> mkl_op_registry : : GetMklOpName ( csinfo_ . maximum ) , <nl> class MklLayoutRewritePass : public GraphOptimizationPass { <nl> return false ; <nl> } <nl> <nl> + static bool LrnGradRewrite ( const Node * n ) { <nl> + CHECK_NOTNULL ( n ) ; <nl> + bool do_rewrite = false ; <nl> + <nl> + for ( const Edge * e : n - > in_edges ( ) ) { <nl> + if ( e - > dst ( ) - > type_string ( ) = = csinfo_ . lrn_grad & & e - > dst_input ( ) = = 2 & & <nl> + e - > src ( ) - > type_string ( ) = = mkl_op_registry : : GetMklOpName ( csinfo_ . lrn ) & & e - > src_output ( ) = = 0 ) { <nl> + do_rewrite = true ; <nl> + break ; <nl> + } <nl> + } <nl> + return do_rewrite ; <nl> + } <nl> + <nl> + static bool MaxpoolGradRewrite ( const Node * n ) { <nl> + CHECK_NOTNULL ( n ) ; <nl> + bool do_rewrite = false ; <nl> + for ( const Edge * e : n - > in_edges ( ) ) { <nl> + if ( e - > dst ( ) - > type_string ( ) = = csinfo_ . max_pool_grad & & <nl> + e - > dst_input ( ) = = 1 & & e - > src ( ) - > type_string ( ) = = mkl_op_registry : : GetMklOpName ( csinfo_ . max_pool ) & & <nl> + e - > src_output ( ) = = 0 ) { <nl> + do_rewrite = true ; <nl> + break ; <nl> + } <nl> + } <nl> + return do_rewrite ; <nl> + } <nl> + <nl> static bool AddNRewrite ( const Node * n ) { <nl> CHECK_NOTNULL ( n ) ; <nl> <nl> Status MklLayoutRewritePass : : SetUpInputs ( <nl> / / TODO ( nhasabni ) We should move this to mkl_util . h . <nl> void MklLayoutRewritePass : : GetDummyWorkspaceTensorNode ( <nl> std : : unique_ptr < Graph > * g , Node * * out , Node * orig_node ) { <nl> - / / We use a tensor of shape { 1 } and value 0 to represent <nl> - / / dummy float tensor . We need this as a dummy workspace tensor . <nl> - / / Workspace tensor has type uint8 . <nl> - const DataType dt = DataTypeToEnum < uint8 > : : v ( ) ; <nl> - TensorProto proto ; <nl> - proto . set_dtype ( dt ) ; <nl> - float zero [ 1 ] = { 0 } ; <nl> - proto . set_tensor_content ( string ( reinterpret_cast < char * > ( & zero ) , 4 ) ) ; <nl> - TensorShape dummy_shape ( { 1 } ) ; <nl> - dummy_shape . AsProto ( proto . mutable_tensor_shape ( ) ) ; <nl> - TF_CHECK_OK ( NodeBuilder ( ( * g ) - > NewName ( " DMT " ) , " Const " ) <nl> - . Attr ( " value " , proto ) <nl> - . Attr ( " dtype " , dt ) <nl> - . Device ( orig_node - > def ( ) . device ( ) ) / / We place this node on <nl> - / / same the device as the <nl> - / / device of the original <nl> - / / node . <nl> - . Finalize ( & * * g , out ) ) ; <nl> - <nl> - / / If number of inputs to the original node is > 0 , then we add <nl> - / / control dependency between 1st input ( index 0 ) of the original node and <nl> - / / the dummy Mkl node . This is needed because control - flow ops such as Enter , <nl> - / / Merge , etc , require frame_name of the dummy Mkl node to be same as the <nl> - / / rewritten node . Adding control edge between 1st input of the original node <nl> - / / and the dummy Mkl node ensures that the dummy node is in the same frame <nl> - / / as the original node . Choosing 1st input is not necessary - any input of <nl> - / / the original node is fine because all the inputs of a node are always in <nl> - / / the same frame . <nl> - if ( orig_node - > num_inputs ( ) > 0 ) { <nl> - Node * orig_input0 = nullptr ; <nl> - TF_CHECK_OK ( <nl> - orig_node - > input_node ( 0 , const_cast < const Node * * > ( & orig_input0 ) ) ) ; <nl> - / / Allow duplicate while adding control edge as it would fail ( return <nl> - / / NULL ) if we try to add duplicate edge . <nl> - CHECK_NOTNULL ( ( * g ) - > AddControlEdge ( orig_input0 , * out , true ) ) ; <nl> - } <nl> - <nl> - ( * out ) - > set_assigned_device_name ( orig_node - > assigned_device_name ( ) ) ; <nl> + / / We use uint8 tensor of shape 8 with content { 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 } to represent <nl> + / / workspace tensor . <nl> + GetDummyMklTensorNode ( g , out , orig_node ) ; <nl> } <nl> <nl> void MklLayoutRewritePass : : AddWorkSpaceEdgeIfNeeded ( <nl> mmm a / tensorflow / core / graph / mkl_layout_pass_test . cc <nl> ppp b / tensorflow / core / graph / mkl_layout_pass_test . cc <nl> TEST_F ( MklLayoutPassTest , LRN_Negative2 ) { <nl> " node { name : ' E ' op : ' Zeta ' attr { key : ' T ' value { type : DT_FLOAT } } " <nl> " input : [ ' A ' , ' D ' ] } " ) ; <nl> EXPECT_EQ ( DoMklLayoutOptimizationPass ( ) , <nl> - " A ( Input ) ; B ( Input ) ; C ( Input ) ; D ( _MklLRNGrad ) ; DMT / _0 ( Const ) ; " <nl> - " DMT / _1 ( Const ) ; DMT / _2 ( Const ) ; DMT / _3 ( Const ) ; DMT / _4 ( Const ) ; E ( Zeta ) | " <nl> - " A - > D ; A - > E ; A : control - > DMT / _0 : control ; A : control - > DMT / _1 : control ; " <nl> - " A : control - > DMT / _2 : control ; A : control - > DMT / _3 : control ; " <nl> - " A : control - > DMT / _4 : control ; B - > D : 1 ; C - > D : 2 ; D - > E : 1 ; DMT / _0 - > D : 3 ; " <nl> - " DMT / _1 - > D : 7 ; DMT / _2 - > D : 4 ; DMT / _3 - > D : 5 ; DMT / _4 - > D : 6 " ) ; <nl> + " A ( Input ) ; B ( Input ) ; C ( Input ) ; D ( LRNGrad ) ; " <nl> + " E ( Zeta ) | A - > D ; A - > E ; B - > D : 1 ; C - > D : 2 ; D - > E : 1 " ) ; <nl> } <nl> <nl> / * Test LRN - > LRNGrad negative case , where single LRN feeds <nl> TEST_F ( MklLayoutPassTest , LRN_Negative3 ) { <nl> " input : [ ' E ' , ' F ' ] } " ) ; <nl> EXPECT_EQ ( DoMklLayoutOptimizationPass ( ) , <nl> " A ( Input ) ; B ( _MklLRN ) ; C ( Input ) ; D ( Input ) ; DMT / _0 ( Const ) ; DMT / _1 ( Const ) ; " <nl> - " DMT / _2 ( Const ) ; DMT / _3 ( Const ) ; DMT / _4 ( Const ) ; DMT / _5 ( Const ) ; " <nl> - " DMT / _6 ( Const ) ; E ( _MklLRNGrad ) ; F ( _MklLRNGrad ) ; G ( Zeta ) | A - > B ; " <nl> - " A : control - > DMT / _0 : control ; B - > E : 2 ; " <nl> - " B - > F : 1 ; B : 1 - > E : 3 ; B : 2 - > E : 6 ; B : 2 - > F : 5 ; B : 3 - > E : 7 ; C - > E ; C - > F ; " <nl> - " C : control - > DMT / _1 : control ; C : control - > DMT / _2 : control ; " <nl> - " C : control - > DMT / _3 : control ; C : control - > DMT / _4 : control ; " <nl> - " C : control - > DMT / _5 : control ; C : control - > DMT / _6 : control ; " <nl> - " D - > E : 1 ; D - > F : 2 ; DMT / _0 - > B : 1 ; DMT / _1 - > E : 4 ; DMT / _2 - > E : 5 ; DMT / _3 - > F : 3 ; " <nl> - " DMT / _4 - > F : 7 ; DMT / _5 - > F : 4 ; DMT / _6 - > F : 6 ; E - > G ; F - > G : 1 " ) ; <nl> + " DMT / _2 ( Const ) ; E ( _MklLRNGrad ) ; F ( LRNGrad ) ; G ( Zeta ) | A - > B ; " <nl> + " A : control - > DMT / _0 : control ; B - > E : 2 ; B - > F : 1 ; B : 1 - > E : 3 ; B : 2 - > E : 6 ; " <nl> + " B : 3 - > E : 7 ; C - > E ; C - > F ; C : control - > DMT / _1 : control ; " <nl> + " C : control - > DMT / _2 : control ; D - > E : 1 ; D - > F : 2 ; DMT / _0 - > B : 1 ; " <nl> + " DMT / _1 - > E : 4 ; DMT / _2 - > E : 5 ; E - > G ; F - > G : 1 " ) ; <nl> } <nl> <nl> / * Test MaxPool - > MaxPoolGrad replacement by workspace + rewrite nodes . * / <nl> TEST_F ( MklLayoutPassTest , NodeWorkspace_MaxPool_Negative2 ) { <nl> " node { name : ' E ' op : ' Zeta ' attr { key : ' T ' value { type : DT_FLOAT } } " <nl> " input : [ ' A ' , ' D ' ] } " ) ; <nl> EXPECT_EQ ( DoMklLayoutOptimizationPass ( ) , <nl> - " A ( Input ) ; B ( Input ) ; C ( Input ) ; D ( _MklMaxPoolGrad ) ; DMT / _0 ( Const ) ; " <nl> - " DMT / _1 ( Const ) ; DMT / _2 ( Const ) ; DMT / _3 ( Const ) ; DMT / _4 ( Const ) ; E ( Zeta ) | " <nl> - " A - > D ; A - > E ; A : control - > DMT / _0 : control ; A : control - > DMT / _1 : control ; " <nl> - " A : control - > DMT / _2 : control ; A : control - > DMT / _3 : control ; " <nl> - " A : control - > DMT / _4 : control ; B - > D : 1 ; C - > D : 2 ; D - > E : 1 ; DMT / _0 - > D : 3 ; " <nl> - " DMT / _1 - > D : 7 ; DMT / _2 - > D : 4 ; DMT / _3 - > D : 5 ; DMT / _4 - > D : 6 " ) ; <nl> + " A ( Input ) ; B ( Input ) ; C ( Input ) ; D ( MaxPoolGrad ) ; " <nl> + " E ( Zeta ) | A - > D ; A - > E ; B - > D : 1 ; C - > D : 2 ; D - > E : 1 " ) ; <nl> } <nl> <nl> / / Test MaxPool handling for batch - wise pooling ( NCHW ) <nl> mmm a / tensorflow / core / kernels / mkl_reshape_op . cc <nl> ppp b / tensorflow / core / kernels / mkl_reshape_op . cc <nl> class MklReshapeOp : public OpKernel { <nl> / / If Tensorflow ' s data format and the underlying format maintained by <nl> / / MKLDNN are equivalent ( both are NHWC or both are NCHW ) , then we can <nl> / / safely return true . <nl> + / / @ todo : Future do not force skip reorder for all blocked format . Use <nl> + / / blocking_desc_is_equal ( ) for checking all the stride arrays in <nl> + / / mkl - dnn / blob / master / src / common / type_helpers . hpp <nl> auto input_mkl_md = mkl_shape_input . GetMklLayout ( ) ; <nl> - if ( mkl_shape_input . GetTfDataFormat ( ) = = input_mkl_md . data . format ) { <nl> + if ( mkl_shape_input . GetTfDataFormat ( ) = = input_mkl_md . data . format & & <nl> + mkl_shape_input . GetTfDataFormat ( ) ! = memory : : format : : blocked ) { <nl> ret = true ; <nl> } <nl> <nl>
Don ' t rewrite LRNgrad and Maxpoolgrad to mkl versions if correspinding fwd operator are not present since these MKL grad operators require workspace
tensorflow/tensorflow
6743cb8ab362025229c79163c597dcec5ee70402
2018-07-20T23:15:24Z
mmm a / src / mongo / db / s / check_sharding_index_command . cpp <nl> ppp b / src / mongo / db / s / check_sharding_index_command . cpp <nl> <nl> # include " mongo / db / commands . h " <nl> # include " mongo / db / db_raii . h " <nl> # include " mongo / db / keypattern . h " <nl> - # include " mongo / db / s / collection_sharding_state . h " <nl> <nl> namespace mongo { <nl> - <nl> - using std : : string ; <nl> - using std : : unique_ptr ; <nl> - <nl> namespace { <nl> <nl> class CheckShardingIndex : public ErrmsgCommandDeprecated { <nl> class CheckShardingIndex : public ErrmsgCommandDeprecated { <nl> return " Internal command . \ n " ; <nl> } <nl> <nl> - virtual bool supportsWriteConcern ( const BSONObj & cmd ) const override { <nl> + bool supportsWriteConcern ( const BSONObj & cmd ) const override { <nl> return false ; <nl> } <nl> <nl> class CheckShardingIndex : public ErrmsgCommandDeprecated { <nl> return AllowedOnSecondary : : kNever ; <nl> } <nl> <nl> - virtual void addRequiredPrivileges ( const std : : string & dbname , <nl> - const BSONObj & cmdObj , <nl> - std : : vector < Privilege > * out ) const { <nl> + void addRequiredPrivileges ( const std : : string & dbname , <nl> + const BSONObj & cmdObj , <nl> + std : : vector < Privilege > * out ) const override { <nl> ActionSet actions ; <nl> actions . addAction ( ActionType : : find ) ; <nl> out - > push_back ( Privilege ( parseResourcePattern ( dbname , cmdObj ) , actions ) ) ; <nl> } <nl> <nl> - virtual std : : string parseNs ( const std : : string & dbname , const BSONObj & cmdObj ) const { <nl> + std : : string parseNs ( const std : : string & dbname , const BSONObj & cmdObj ) const override { <nl> return CommandHelpers : : parseNsFullyQualified ( cmdObj ) ; <nl> } <nl> <nl> class CheckShardingIndex : public ErrmsgCommandDeprecated { <nl> const std : : string & dbname , <nl> const BSONObj & jsobj , <nl> std : : string & errmsg , <nl> - BSONObjBuilder & result ) { <nl> + BSONObjBuilder & result ) override { <nl> const NamespaceString nss = NamespaceString ( parseNs ( dbname , jsobj ) ) ; <nl> <nl> BSONObj keyPattern = jsobj . getObjectField ( " keyPattern " ) ; <nl> class CheckShardingIndex : public ErrmsgCommandDeprecated { <nl> } <nl> <nl> AutoGetCollectionForReadCommand autoColl ( opCtx , nss ) ; <nl> - CollectionShardingState : : get ( opCtx , nss ) - > checkShardVersionOrThrow_DEPRECATED ( opCtx ) ; <nl> - <nl> Collection * const collection = autoColl . getCollection ( ) ; <nl> if ( ! collection ) { <nl> errmsg = " ns not found " ; <nl>
SERVER - 47474 Remove direct call to checkShardVersionOrThrow from CheckShardingIndex
mongodb/mongo
dd910a6ed371aaa41a04611ab3b0cf79ac1fce7b
2020-04-13T11:54:53Z
mmm a / tensorflow / core / profiler / profiler_service . proto <nl> ppp b / tensorflow / core / profiler / profiler_service . proto <nl> <nl> syntax = " proto3 " ; <nl> + <nl> package tensorflow ; <nl> <nl> import " tensorflow / core / framework / graph . proto " ; <nl> - import " tensorflow / core / protobuf / config . proto " ; <nl> import " tensorflow / core / profiler / op_profile . proto " ; <nl> + import " tensorflow / core / protobuf / config . proto " ; <nl> <nl> / / The ProfilerService service retrieves performance information about <nl> / / the programs running on connected devices over a period of time . <nl> service ProfilerService { <nl> / / Starts a profiling session , blocks until it completes , and returns data . <nl> - rpc Profile ( ProfileRequest ) returns ( ProfileResponse ) { <nl> - } <nl> + rpc Profile ( ProfileRequest ) returns ( ProfileResponse ) { } <nl> / / Collects profiling data and returns user - friendly metrics . <nl> - rpc Monitor ( MonitorRequest ) returns ( MonitorResponse ) { <nl> - } <nl> + rpc Monitor ( MonitorRequest ) returns ( MonitorResponse ) { } <nl> } <nl> <nl> message ProfileOptions { <nl> message MonitorRequest { <nl> / / information , step time information , etc . Do not use this option if the TPU <nl> / / host is being very heavily used . <nl> int32 monitoring_level = 2 ; <nl> + / / True to display timestamp in monitoring result . <nl> + bool timestamp = 3 ; <nl> <nl> - / / next - field : 3 <nl> + / / next - field : 4 <nl> } <nl> <nl> message MonitorResponse { <nl> mmm a / tensorflow / core / profiler / rpc / client / capture_profile . cc <nl> ppp b / tensorflow / core / profiler / rpc / client / capture_profile . cc <nl> Status StartTracing ( const tensorflow : : string & service_addr , <nl> return status ; <nl> } <nl> <nl> - MonitorRequest PopulateMonitorRequest ( int duration_ms , int monitoring_level ) { <nl> + MonitorRequest PopulateMonitorRequest ( int duration_ms , int monitoring_level , <nl> + bool timestamp ) { <nl> MonitorRequest request ; <nl> request . set_duration_ms ( duration_ms ) ; <nl> request . set_monitoring_level ( monitoring_level ) ; <nl> + request . set_timestamp ( timestamp ) ; <nl> return request ; <nl> } <nl> <nl> - / / Repeatedly collects profiles and shows user - friendly metrics for <nl> - / / ' num_queries ' time ( s ) . <nl> void StartMonitoring ( const tensorflow : : string & service_addr , int duration_ms , <nl> int monitoring_level , int num_queries ) { <nl> for ( int query = 0 ; query < num_queries ; + + query ) { <nl> MonitorRequest request = <nl> - PopulateMonitorRequest ( duration_ms , monitoring_level ) ; <nl> + PopulateMonitorRequest ( duration_ms , monitoring_level , <nl> + / * timestamp = * / false ) ; <nl> <nl> : : grpc : : ClientContext context ; <nl> : : grpc : : ChannelArguments channel_args ; <nl>
Added timestamp feature in TensorFlow TPU profiler monitoring result .
tensorflow/tensorflow
ee2cbcfe298cce108cce9c7a3f8456d798d230fa
2019-04-10T23:54:44Z
mmm a / stdlib / public / SDK / Foundation / URLComponents . swift <nl> ppp b / stdlib / public / SDK / Foundation / URLComponents . swift <nl> public struct URLComponents : ReferenceConvertible , Hashable , Equatable , _Mutabl <nl> set { _applyMutation { $ 0 . queryItems = newValue } } <nl> } <nl> <nl> + / / / Returns an array of query items for this ` URLComponents ` , in the order in which they appear in the original query string . Any percent - encoding in a query item name or value is retained <nl> + / / / <nl> + / / / The setter combines an array containing any number of ` URLQueryItem ` s , each of which represents a single key - value pair , into a query string and sets the ` URLComponents ` query property . This property assumes the query item names and values are already correctly percent - encoded , and that the query item names do not contain the query item delimiter characters ' & ' and ' = ' . Attempting to set an incorrectly percent - encoded query item or a query item name with the query item delimiter characters ' & ' and ' = ' will cause a ` fatalError ` . <nl> + @ available ( macOS 10 . 13 , iOS 11 . 0 , tvOS 11 . 0 , watchOS 4 . 0 , * ) <nl> + public var percentEncodedQueryItems : [ URLQueryItem ] ? { <nl> + get { return _handle . map { $ 0 . percentEncodedQueryItems } } <nl> + set { _applyMutation { $ 0 . percentEncodedQueryItems = newValue } } <nl> + } <nl> + <nl> public var hashValue : Int { <nl> return _handle . map { $ 0 . hash } <nl> } <nl> mmm a / test / stdlib / TestURL . swift <nl> ppp b / test / stdlib / TestURL . swift <nl> class TestURL : TestURLSuper { <nl> <nl> func testURLComponents ( ) { <nl> / / Not meant to be a test of all URL components functionality , just some basic bridging stuff <nl> - let s = " http : / / www . apple . com / us / search / ipad ? src = globalnav " <nl> - let components = URLComponents ( string : s ) ! <nl> + let s = " http : / / www . apple . com / us / search / ipad ? src = global % 7Cnav " <nl> + var components = URLComponents ( string : s ) ! <nl> expectNotNil ( components ) <nl> <nl> expectNotNil ( components . host ) <nl> class TestURL : TestURLSuper { <nl> <nl> expectEqual ( " src " , first . name ) <nl> expectNotNil ( first . value ) <nl> - expectEqual ( " globalnav " , first . value ) <nl> + expectEqual ( " global | nav " , first . value ) <nl> } <nl> - } <nl> + <nl> + if # available ( OSX 10 . 13 , iOS 11 . 0 , * ) { <nl> + components . percentEncodedQuery = " name1 % E2 % 80 % A2 = value1 % E2 % 80 % A2 & name2 % E2 % 80 % A2 = value2 % E2 % 80 % A2 " <nl> + var qi = components . queryItems ! <nl> + expectNotNil ( qi ) <nl> + <nl> + expectEqual ( 2 , qi . count ) <nl> + <nl> + expectEqual ( " name1 • " , qi [ 0 ] . name ) <nl> + expectNotNil ( qi [ 0 ] . value ) <nl> + expectEqual ( " value1 • " , qi [ 0 ] . value ) <nl> + <nl> + expectEqual ( " name2 • " , qi [ 1 ] . name ) <nl> + expectNotNil ( qi [ 1 ] . value ) <nl> + expectEqual ( " value2 • " , qi [ 1 ] . value ) <nl> + <nl> + qi = components . percentEncodedQueryItems ! <nl> + expectNotNil ( qi ) <nl> + <nl> + expectEqual ( 2 , qi . count ) <nl> + <nl> + expectEqual ( " name1 % E2 % 80 % A2 " , qi [ 0 ] . name ) <nl> + expectNotNil ( qi [ 0 ] . value ) <nl> + expectEqual ( " value1 % E2 % 80 % A2 " , qi [ 0 ] . value ) <nl> + <nl> + expectEqual ( " name2 % E2 % 80 % A2 " , qi [ 1 ] . name ) <nl> + expectNotNil ( qi [ 0 ] . value ) <nl> + expectEqual ( " value2 % E2 % 80 % A2 " , qi [ 1 ] . value ) <nl> + <nl> + qi [ 0 ] . name = " % E2 % 80 % A2name1 " <nl> + qi [ 0 ] . value = " % E2 % 80 % A2value1 " <nl> + qi [ 1 ] . name = " % E2 % 80 % A2name2 " <nl> + qi [ 1 ] . value = " % E2 % 80 % A2value2 " <nl> + <nl> + components . percentEncodedQueryItems = qi <nl> + <nl> + expectEqual ( " % E2 % 80 % A2name1 = % E2 % 80 % A2value1 & % E2 % 80 % A2name2 = % E2 % 80 % A2value2 " , components . percentEncodedQuery ) <nl> + } <nl> + } <nl> <nl> func testURLResourceValues ( ) { <nl> <nl>
URLComponents is missing percentEncodedQueryItems . I added it and modified the URLComponents test code to make sure it works .
apple/swift
a7d3efd6f8d08102957393609e4662c7df17aed9
2018-06-28T21:10:47Z
mmm a / stdlib / public / core / CMakeLists . txt <nl> ppp b / stdlib / public / core / CMakeLists . txt <nl> set ( SWIFTLIB_SOURCES <nl> CollectionOfOne . swift <nl> DiscontiguousSlice . swift <nl> Diffing . swift <nl> + FloatingPointRandom . swift <nl> Mirror . swift <nl> PlaygroundDisplay . swift <nl> CommandLine . swift <nl> mmm a / stdlib / public / core / FloatingPoint . swift <nl> ppp b / stdlib / public / core / FloatingPoint . swift <nl> extension BinaryFloatingPoint where Self . RawSignificand : FixedWidthInteger { <nl> guard exact else { return nil } <nl> self = value_ <nl> } <nl> - <nl> - / / / Returns a random value within the specified range , using the given <nl> - / / / generator as a source for randomness . <nl> - / / / <nl> - / / / Use this method to generate a floating - point value within a specific <nl> - / / / range when you are using a custom random number generator . This example <nl> - / / / creates three new values in the range ` 10 . 0 . . < 20 . 0 ` . <nl> - / / / <nl> - / / / for _ in 1 . . . 3 { <nl> - / / / print ( Double . random ( in : 10 . 0 . . < 20 . 0 , using : & myGenerator ) ) <nl> - / / / } <nl> - / / / / / Prints " 18 . 1900709259179 " <nl> - / / / / / Prints " 14 . 2286325689993 " <nl> - / / / / / Prints " 13 . 1485686260762 " <nl> - / / / <nl> - / / / The ` random ( in : using : ) ` static method chooses a random value from a <nl> - / / / continuous uniform distribution in ` range ` , and then converts that value <nl> - / / / to the nearest representable value in this type . Depending on the size <nl> - / / / and span of ` range ` , some concrete values may be represented more <nl> - / / / frequently than others . <nl> - / / / <nl> - / / / - Note : The algorithm used to create random values may change in a future <nl> - / / / version of Swift . If you ' re passing a generator that results in the <nl> - / / / same sequence of floating - point values each time you run your program , <nl> - / / / that sequence may change when your program is compiled using a <nl> - / / / different version of Swift . <nl> - / / / <nl> - / / / - Parameters : <nl> - / / / - range : The range in which to create a random value . <nl> - / / / ` range ` must be finite and non - empty . <nl> - / / / - generator : The random number generator to use when creating the <nl> - / / / new random value . <nl> - / / / - Returns : A random value within the bounds of ` range ` . <nl> - @ inlinable <nl> - public static func random < T : RandomNumberGenerator > ( <nl> - in range : Range < Self > , <nl> - using generator : inout T <nl> - ) - > Self { <nl> - _precondition ( <nl> - ! range . isEmpty , <nl> - " Can ' t get random value with an empty range " <nl> - ) <nl> - let delta = range . upperBound - range . lowerBound <nl> - / / TODO : this still isn ' t quite right , because the computation of delta <nl> - / / can overflow ( e . g . if . upperBound = . maximumFiniteMagnitude and <nl> - / / . lowerBound = - . upperBound ) ; this should be re - written with an <nl> - / / algorithm that handles that case correctly , but this precondition <nl> - / / is an acceptable short - term fix . <nl> - _precondition ( <nl> - delta . isFinite , <nl> - " There is no uniform distribution on an infinite range " <nl> - ) <nl> - let rand : Self . RawSignificand <nl> - if Self . RawSignificand . bitWidth = = Self . significandBitCount + 1 { <nl> - rand = generator . next ( ) <nl> - } else { <nl> - let significandCount = Self . significandBitCount + 1 <nl> - let maxSignificand : Self . RawSignificand = 1 < < significandCount <nl> - / / Rather than use . next ( upperBound : ) , which has to work with arbitrary <nl> - / / upper bounds , and therefore does extra work to avoid bias , we can take <nl> - / / a shortcut because we know that maxSignificand is a power of two . <nl> - rand = generator . next ( ) & ( maxSignificand - 1 ) <nl> - } <nl> - let unitRandom = Self . init ( rand ) * ( Self . ulpOfOne / 2 ) <nl> - let randFloat = delta * unitRandom + range . lowerBound <nl> - if randFloat = = range . upperBound { <nl> - return Self . random ( in : range , using : & generator ) <nl> - } <nl> - return randFloat <nl> - } <nl> - <nl> - / / / Returns a random value within the specified range . <nl> - / / / <nl> - / / / Use this method to generate a floating - point value within a specific <nl> - / / / range . This example creates three new values in the range <nl> - / / / ` 10 . 0 . . < 20 . 0 ` . <nl> - / / / <nl> - / / / for _ in 1 . . . 3 { <nl> - / / / print ( Double . random ( in : 10 . 0 . . < 20 . 0 ) ) <nl> - / / / } <nl> - / / / / / Prints " 18 . 1900709259179 " <nl> - / / / / / Prints " 14 . 2286325689993 " <nl> - / / / / / Prints " 13 . 1485686260762 " <nl> - / / / <nl> - / / / The ` random ( ) ` static method chooses a random value from a continuous <nl> - / / / uniform distribution in ` range ` , and then converts that value to the <nl> - / / / nearest representable value in this type . Depending on the size and span <nl> - / / / of ` range ` , some concrete values may be represented more frequently than <nl> - / / / others . <nl> - / / / <nl> - / / / This method is equivalent to calling ` random ( in : using : ) ` , passing in the <nl> - / / / system ' s default random generator . <nl> - / / / <nl> - / / / - Parameter range : The range in which to create a random value . <nl> - / / / ` range ` must be finite and non - empty . <nl> - / / / - Returns : A random value within the bounds of ` range ` . <nl> - @ inlinable <nl> - public static func random ( in range : Range < Self > ) - > Self { <nl> - var g = SystemRandomNumberGenerator ( ) <nl> - return Self . random ( in : range , using : & g ) <nl> - } <nl> - <nl> - / / / Returns a random value within the specified range , using the given <nl> - / / / generator as a source for randomness . <nl> - / / / <nl> - / / / Use this method to generate a floating - point value within a specific <nl> - / / / range when you are using a custom random number generator . This example <nl> - / / / creates three new values in the range ` 10 . 0 . . . 20 . 0 ` . <nl> - / / / <nl> - / / / for _ in 1 . . . 3 { <nl> - / / / print ( Double . random ( in : 10 . 0 . . . 20 . 0 , using : & myGenerator ) ) <nl> - / / / } <nl> - / / / / / Prints " 18 . 1900709259179 " <nl> - / / / / / Prints " 14 . 2286325689993 " <nl> - / / / / / Prints " 13 . 1485686260762 " <nl> - / / / <nl> - / / / The ` random ( in : using : ) ` static method chooses a random value from a <nl> - / / / continuous uniform distribution in ` range ` , and then converts that value <nl> - / / / to the nearest representable value in this type . Depending on the size <nl> - / / / and span of ` range ` , some concrete values may be represented more <nl> - / / / frequently than others . <nl> - / / / <nl> - / / / - Note : The algorithm used to create random values may change in a future <nl> - / / / version of Swift . If you ' re passing a generator that results in the <nl> - / / / same sequence of floating - point values each time you run your program , <nl> - / / / that sequence may change when your program is compiled using a <nl> - / / / different version of Swift . <nl> - / / / <nl> - / / / - Parameters : <nl> - / / / - range : The range in which to create a random value . Must be finite . <nl> - / / / - generator : The random number generator to use when creating the <nl> - / / / new random value . <nl> - / / / - Returns : A random value within the bounds of ` range ` . <nl> - @ inlinable <nl> - public static func random < T : RandomNumberGenerator > ( <nl> - in range : ClosedRange < Self > , <nl> - using generator : inout T <nl> - ) - > Self { <nl> - _precondition ( <nl> - ! range . isEmpty , <nl> - " Can ' t get random value with an empty range " <nl> - ) <nl> - let delta = range . upperBound - range . lowerBound <nl> - / / TODO : this still isn ' t quite right , because the computation of delta <nl> - / / can overflow ( e . g . if . upperBound = . maximumFiniteMagnitude and <nl> - / / . lowerBound = - . upperBound ) ; this should be re - written with an <nl> - / / algorithm that handles that case correctly , but this precondition <nl> - / / is an acceptable short - term fix . <nl> - _precondition ( <nl> - delta . isFinite , <nl> - " There is no uniform distribution on an infinite range " <nl> - ) <nl> - let rand : Self . RawSignificand <nl> - if Self . RawSignificand . bitWidth = = Self . significandBitCount + 1 { <nl> - rand = generator . next ( ) <nl> - let tmp : UInt8 = generator . next ( ) & 1 <nl> - if rand = = Self . RawSignificand . max & & tmp = = 1 { <nl> - return range . upperBound <nl> - } <nl> - } else { <nl> - let significandCount = Self . significandBitCount + 1 <nl> - let maxSignificand : Self . RawSignificand = 1 < < significandCount <nl> - rand = generator . next ( upperBound : maxSignificand + 1 ) <nl> - if rand = = maxSignificand { <nl> - return range . upperBound <nl> - } <nl> - } <nl> - let unitRandom = Self . init ( rand ) * ( Self . ulpOfOne / 2 ) <nl> - let randFloat = delta * unitRandom + range . lowerBound <nl> - return randFloat <nl> - } <nl> - <nl> - / / / Returns a random value within the specified range . <nl> - / / / <nl> - / / / Use this method to generate a floating - point value within a specific <nl> - / / / range . This example creates three new values in the range <nl> - / / / ` 10 . 0 . . . 20 . 0 ` . <nl> - / / / <nl> - / / / for _ in 1 . . . 3 { <nl> - / / / print ( Double . random ( in : 10 . 0 . . . 20 . 0 ) ) <nl> - / / / } <nl> - / / / / / Prints " 18 . 1900709259179 " <nl> - / / / / / Prints " 14 . 2286325689993 " <nl> - / / / / / Prints " 13 . 1485686260762 " <nl> - / / / <nl> - / / / The ` random ( ) ` static method chooses a random value from a continuous <nl> - / / / uniform distribution in ` range ` , and then converts that value to the <nl> - / / / nearest representable value in this type . Depending on the size and span <nl> - / / / of ` range ` , some concrete values may be represented more frequently than <nl> - / / / others . <nl> - / / / <nl> - / / / This method is equivalent to calling ` random ( in : using : ) ` , passing in the <nl> - / / / system ' s default random generator . <nl> - / / / <nl> - / / / - Parameter range : The range in which to create a random value . Must be finite . <nl> - / / / - Returns : A random value within the bounds of ` range ` . <nl> - @ inlinable <nl> - public static func random ( in range : ClosedRange < Self > ) - > Self { <nl> - var g = SystemRandomNumberGenerator ( ) <nl> - return Self . random ( in : range , using : & g ) <nl> - } <nl> } <nl> new file mode 100644 <nl> index 000000000000 . . 8aeb16eb3603 <nl> mmm / dev / null <nl> ppp b / stdlib / public / core / FloatingPointRandom . swift <nl> <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2020 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See https : / / swift . org / LICENSE . txt for license information <nl> + / / See https : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + extension BinaryFloatingPoint where Self . RawSignificand : FixedWidthInteger { <nl> + <nl> + / / / Returns a random value within the specified range , using the given <nl> + / / / generator as a source for randomness . <nl> + / / / <nl> + / / / Use this method to generate a floating - point value within a specific <nl> + / / / range when you are using a custom random number generator . This example <nl> + / / / creates three new values in the range ` 10 . 0 . . < 20 . 0 ` . <nl> + / / / <nl> + / / / for _ in 1 . . . 3 { <nl> + / / / print ( Double . random ( in : 10 . 0 . . < 20 . 0 , using : & myGenerator ) ) <nl> + / / / } <nl> + / / / / / Prints " 18 . 1900709259179 " <nl> + / / / / / Prints " 14 . 2286325689993 " <nl> + / / / / / Prints " 13 . 1485686260762 " <nl> + / / / <nl> + / / / The ` random ( in : using : ) ` static method chooses a random value from a <nl> + / / / continuous uniform distribution in ` range ` , and then converts that value <nl> + / / / to the nearest representable value in this type . Depending on the size <nl> + / / / and span of ` range ` , some concrete values may be represented more <nl> + / / / frequently than others . <nl> + / / / <nl> + / / / - Note : The algorithm used to create random values may change in a future <nl> + / / / version of Swift . If you ' re passing a generator that results in the <nl> + / / / same sequence of floating - point values each time you run your program , <nl> + / / / that sequence may change when your program is compiled using a <nl> + / / / different version of Swift . <nl> + / / / <nl> + / / / - Parameters : <nl> + / / / - range : The range in which to create a random value . <nl> + / / / ` range ` must be finite and non - empty . <nl> + / / / - generator : The random number generator to use when creating the <nl> + / / / new random value . <nl> + / / / - Returns : A random value within the bounds of ` range ` . <nl> + @ inlinable <nl> + public static func random < T : RandomNumberGenerator > ( <nl> + in range : Range < Self > , <nl> + using generator : inout T <nl> + ) - > Self { <nl> + _precondition ( <nl> + ! range . isEmpty , <nl> + " Can ' t get random value with an empty range " <nl> + ) <nl> + let delta = range . upperBound - range . lowerBound <nl> + / / TODO : this still isn ' t quite right , because the computation of delta <nl> + / / can overflow ( e . g . if . upperBound = . maximumFiniteMagnitude and <nl> + / / . lowerBound = - . upperBound ) ; this should be re - written with an <nl> + / / algorithm that handles that case correctly , but this precondition <nl> + / / is an acceptable short - term fix . <nl> + _precondition ( <nl> + delta . isFinite , <nl> + " There is no uniform distribution on an infinite range " <nl> + ) <nl> + let rand : Self . RawSignificand <nl> + if Self . RawSignificand . bitWidth = = Self . significandBitCount + 1 { <nl> + rand = generator . next ( ) <nl> + } else { <nl> + let significandCount = Self . significandBitCount + 1 <nl> + let maxSignificand : Self . RawSignificand = 1 < < significandCount <nl> + / / Rather than use . next ( upperBound : ) , which has to work with arbitrary <nl> + / / upper bounds , and therefore does extra work to avoid bias , we can take <nl> + / / a shortcut because we know that maxSignificand is a power of two . <nl> + rand = generator . next ( ) & ( maxSignificand - 1 ) <nl> + } <nl> + let unitRandom = Self . init ( rand ) * ( Self . ulpOfOne / 2 ) <nl> + let randFloat = delta * unitRandom + range . lowerBound <nl> + if randFloat = = range . upperBound { <nl> + return Self . random ( in : range , using : & generator ) <nl> + } <nl> + return randFloat <nl> + } <nl> + <nl> + / / / Returns a random value within the specified range . <nl> + / / / <nl> + / / / Use this method to generate a floating - point value within a specific <nl> + / / / range . This example creates three new values in the range <nl> + / / / ` 10 . 0 . . < 20 . 0 ` . <nl> + / / / <nl> + / / / for _ in 1 . . . 3 { <nl> + / / / print ( Double . random ( in : 10 . 0 . . < 20 . 0 ) ) <nl> + / / / } <nl> + / / / / / Prints " 18 . 1900709259179 " <nl> + / / / / / Prints " 14 . 2286325689993 " <nl> + / / / / / Prints " 13 . 1485686260762 " <nl> + / / / <nl> + / / / The ` random ( ) ` static method chooses a random value from a continuous <nl> + / / / uniform distribution in ` range ` , and then converts that value to the <nl> + / / / nearest representable value in this type . Depending on the size and span <nl> + / / / of ` range ` , some concrete values may be represented more frequently than <nl> + / / / others . <nl> + / / / <nl> + / / / This method is equivalent to calling ` random ( in : using : ) ` , passing in the <nl> + / / / system ' s default random generator . <nl> + / / / <nl> + / / / - Parameter range : The range in which to create a random value . <nl> + / / / ` range ` must be finite and non - empty . <nl> + / / / - Returns : A random value within the bounds of ` range ` . <nl> + @ inlinable <nl> + public static func random ( in range : Range < Self > ) - > Self { <nl> + var g = SystemRandomNumberGenerator ( ) <nl> + return Self . random ( in : range , using : & g ) <nl> + } <nl> + <nl> + / / / Returns a random value within the specified range , using the given <nl> + / / / generator as a source for randomness . <nl> + / / / <nl> + / / / Use this method to generate a floating - point value within a specific <nl> + / / / range when you are using a custom random number generator . This example <nl> + / / / creates three new values in the range ` 10 . 0 . . . 20 . 0 ` . <nl> + / / / <nl> + / / / for _ in 1 . . . 3 { <nl> + / / / print ( Double . random ( in : 10 . 0 . . . 20 . 0 , using : & myGenerator ) ) <nl> + / / / } <nl> + / / / / / Prints " 18 . 1900709259179 " <nl> + / / / / / Prints " 14 . 2286325689993 " <nl> + / / / / / Prints " 13 . 1485686260762 " <nl> + / / / <nl> + / / / The ` random ( in : using : ) ` static method chooses a random value from a <nl> + / / / continuous uniform distribution in ` range ` , and then converts that value <nl> + / / / to the nearest representable value in this type . Depending on the size <nl> + / / / and span of ` range ` , some concrete values may be represented more <nl> + / / / frequently than others . <nl> + / / / <nl> + / / / - Note : The algorithm used to create random values may change in a future <nl> + / / / version of Swift . If you ' re passing a generator that results in the <nl> + / / / same sequence of floating - point values each time you run your program , <nl> + / / / that sequence may change when your program is compiled using a <nl> + / / / different version of Swift . <nl> + / / / <nl> + / / / - Parameters : <nl> + / / / - range : The range in which to create a random value . Must be finite . <nl> + / / / - generator : The random number generator to use when creating the <nl> + / / / new random value . <nl> + / / / - Returns : A random value within the bounds of ` range ` . <nl> + @ inlinable <nl> + public static func random < T : RandomNumberGenerator > ( <nl> + in range : ClosedRange < Self > , <nl> + using generator : inout T <nl> + ) - > Self { <nl> + _precondition ( <nl> + ! range . isEmpty , <nl> + " Can ' t get random value with an empty range " <nl> + ) <nl> + let delta = range . upperBound - range . lowerBound <nl> + / / TODO : this still isn ' t quite right , because the computation of delta <nl> + / / can overflow ( e . g . if . upperBound = . maximumFiniteMagnitude and <nl> + / / . lowerBound = - . upperBound ) ; this should be re - written with an <nl> + / / algorithm that handles that case correctly , but this precondition <nl> + / / is an acceptable short - term fix . <nl> + _precondition ( <nl> + delta . isFinite , <nl> + " There is no uniform distribution on an infinite range " <nl> + ) <nl> + let rand : Self . RawSignificand <nl> + if Self . RawSignificand . bitWidth = = Self . significandBitCount + 1 { <nl> + rand = generator . next ( ) <nl> + let tmp : UInt8 = generator . next ( ) & 1 <nl> + if rand = = Self . RawSignificand . max & & tmp = = 1 { <nl> + return range . upperBound <nl> + } <nl> + } else { <nl> + let significandCount = Self . significandBitCount + 1 <nl> + let maxSignificand : Self . RawSignificand = 1 < < significandCount <nl> + rand = generator . next ( upperBound : maxSignificand + 1 ) <nl> + if rand = = maxSignificand { <nl> + return range . upperBound <nl> + } <nl> + } <nl> + let unitRandom = Self . init ( rand ) * ( Self . ulpOfOne / 2 ) <nl> + let randFloat = delta * unitRandom + range . lowerBound <nl> + return randFloat <nl> + } <nl> + <nl> + / / / Returns a random value within the specified range . <nl> + / / / <nl> + / / / Use this method to generate a floating - point value within a specific <nl> + / / / range . This example creates three new values in the range <nl> + / / / ` 10 . 0 . . . 20 . 0 ` . <nl> + / / / <nl> + / / / for _ in 1 . . . 3 { <nl> + / / / print ( Double . random ( in : 10 . 0 . . . 20 . 0 ) ) <nl> + / / / } <nl> + / / / / / Prints " 18 . 1900709259179 " <nl> + / / / / / Prints " 14 . 2286325689993 " <nl> + / / / / / Prints " 13 . 1485686260762 " <nl> + / / / <nl> + / / / The ` random ( ) ` static method chooses a random value from a continuous <nl> + / / / uniform distribution in ` range ` , and then converts that value to the <nl> + / / / nearest representable value in this type . Depending on the size and span <nl> + / / / of ` range ` , some concrete values may be represented more frequently than <nl> + / / / others . <nl> + / / / <nl> + / / / This method is equivalent to calling ` random ( in : using : ) ` , passing in the <nl> + / / / system ' s default random generator . <nl> + / / / <nl> + / / / - Parameter range : The range in which to create a random value . Must be finite . <nl> + / / / - Returns : A random value within the bounds of ` range ` . <nl> + @ inlinable <nl> + public static func random ( in range : ClosedRange < Self > ) - > Self { <nl> + var g = SystemRandomNumberGenerator ( ) <nl> + return Self . random ( in : range , using : & g ) <nl> + } <nl> + } <nl> mmm a / stdlib / public / core / GroupInfo . json <nl> ppp b / stdlib / public / core / GroupInfo . json <nl> <nl> " Floating " : [ <nl> " FloatingPoint . swift " , <nl> " FloatingPointParsing . swift " , <nl> - " FloatingPointTypes . swift " ] , <nl> + " FloatingPointTypes . swift " , <nl> + " FloatingPointRandom . swift " ] , <nl> " Vector " : [ <nl> " SIMDVector . swift " , <nl> " SIMDVectorTypes . swift " ] } <nl>
[ stdlib ] [ NFC ] FloatingPointRandom . swift created ( )
apple/swift
a62bf48e68956adc9b423fc03c6faac283512d36
2020-08-18T22:12:35Z
mmm a / hphp / hack / src / typing / typing_unify . ml <nl> ppp b / hphp / hack / src / typing / typing_unify . ml <nl> module TURecursive = Typing_unify_recursive <nl> ( * Most code - - notably the cases in unify_ - - do * not * need to thread through <nl> * unwrappedToptionX , since for example just because we know an array < foo , bar > <nl> * can ' t itself be null , that doesn ' t mean that foo and bar can ' t be null . <nl> + * <nl> + * If follow_bounds = false , only match generic parameters with themselves . <nl> + * If follow_bounds = true , look in lower and upper bounds of generic parameters , <nl> + * for example , to unify T and t if there are bounds T as t and T super t . <nl> * ) <nl> - let rec unify env ty1 ty2 = <nl> - unify_unwrapped env ~ unwrappedToption1 : false ty1 ~ unwrappedToption2 : false ty2 <nl> + let rec unify ? follow_bounds : ( follow_bounds = true ) env ty1 ty2 = <nl> + unify_unwrapped ~ follow_bounds env <nl> + ~ unwrappedToption1 : false ty1 ~ unwrappedToption2 : false ty2 <nl> <nl> ( * If result is ( env ' , ty ) then env ' extends env , <nl> * and ty1 < : ty and ty2 < : ty under env ' <nl> * <nl> * If unwrappedToptionX = true then elide Toption before recursing . <nl> * ) <nl> - and unify_unwrapped env ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 = <nl> + and unify_unwrapped ? follow_bounds : ( follow_bounds = true ) env <nl> + ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 = <nl> if ty1 = = ty2 then env , ty1 else <nl> match ty1 , ty2 with <nl> | ( _ , Tany ) , ty | ty , ( _ , Tany ) - > env , ty <nl> and unify_unwrapped env ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 = <nl> let n ' = Env . fresh ( ) in <nl> let env = Env . rename env n1 n ' in <nl> let env = Env . rename env n2 n ' in <nl> - let env , ty = <nl> - unify_unwrapped env unwrappedToption1 ty1 unwrappedToption2 ty2 in <nl> + let env , ty = unify_unwrapped ~ follow_bounds env <nl> + ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 in <nl> let env = TURecursive . add env n ' ty in <nl> env , ( r , Tvar n ' ) <nl> | ( r , Tvar n ) , ty2 <nl> and unify_unwrapped env ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 = <nl> let env , ty1 = Env . get_type env r n in <nl> let n ' = Env . fresh ( ) in <nl> let env = Env . rename env n n ' in <nl> - let env , ty = <nl> - unify_unwrapped env unwrappedToption1 ty1 unwrappedToption2 ty2 in <nl> + let env , ty = unify_unwrapped ~ follow_bounds env <nl> + ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 in <nl> let env = TURecursive . add env n ty in <nl> env , ( r , Tvar n ' ) <nl> | ( r1 , Tunresolved tyl1 ) , ( r2 , Tunresolved tyl2 ) - > <nl> and unify_unwrapped env ~ unwrappedToption1 ty1 ~ unwrappedToption2 ty2 = <nl> unify env ty1 ( r2 , Tmixed ) <nl> | ( r1 , ty1 ) , ( r2 , ty2 ) - > <nl> let r = unify_reason r1 r2 in <nl> - let env , ty = unify_ env r1 ty1 r2 ty2 in <nl> + let env , ty = unify_ ~ follow_bounds env r1 ty1 r2 ty2 in <nl> env , ( r , ty ) <nl> <nl> - and unify_ env r1 ty1 r2 ty2 = <nl> + and unify_ ? follow_bounds : ( follow_bounds = true ) env r1 ty1 r2 ty2 = <nl> match ty1 , ty2 with <nl> | Tprim x , Tprim y - > <nl> if x = = y then env , Tprim x <nl> and unify_ env r1 ty1 r2 ty2 = <nl> let env , ty2 = unify env ty2 ty4 in <nl> env , Tarraykind ( AKmap ( ty1 , ty2 ) ) <nl> | Tarraykind ( AKvec _ | AKmap _ ) , Tarraykind ( AKshape _ | AKtuple _ ) - > <nl> - unify_ env r2 ty2 r1 ty1 <nl> + unify_ ~ follow_bounds env r2 ty2 r1 ty1 <nl> | Tarraykind AKshape fdm1 , Tarraykind ( AKvec _ | AKmap _ ) - > <nl> Typing_arrays . fold_akshape_as_akmap_with_acc begin fun env ty2 ( r1 , ty1 ) - > <nl> unify_ env r1 ty1 r2 ty2 <nl> and unify_ env r1 ty1 r2 ty2 = <nl> | None - > env , None <nl> | Some ( env , cstr ) - > env , Some cstr in <nl> env , Tabstract ( ak1 , tcstr ) <nl> + <nl> | Tabstract ( AKdependent ( expr_dep , _ ) , <nl> Some ( _ , Tclass ( ( _ , x ) as id , _ ) as ty ) ) , _ - > <nl> let class_ = Env . get_class env x in <nl> and unify_ env r1 ty1 r2 ty2 = <nl> env , Tclass ( ( post , class_name ) , tylist ) <nl> | ( Tclass ( ( post , class_name ) , tylist ) , Tabstract ( AKenum enum_name , _ ) ) <nl> when String . compare enum_name class_name = 0 - > <nl> - env , Tclass ( ( post , class_name ) , tylist ) <nl> + env , Tclass ( ( post , class_name ) , tylist ) <nl> + <nl> + ( * If we are trying to unify a type parameter T with another type t it ' s <nl> + possible that we can get there through subtyping in both directions . <nl> + For example we might have T as C , T super C , and we ' re asked <nl> + to unify T with C . This should succeed . We don ' t apply this <nl> + transitively , but assume that the type parameter environment is <nl> + already closed under transitivity . This is ensured by <nl> + Typing_subtype . add_constraint . * ) <nl> + <nl> + | Tabstract ( AKgeneric x , _ ) , _ <nl> + when generic_param_matches ~ follow_bounds env x ( r2 , ty2 ) - > <nl> + env , ty2 <nl> + <nl> + | _ , Tabstract ( AKgeneric x , _ ) <nl> + when generic_param_matches ~ follow_bounds env x ( r1 , ty1 ) - > <nl> + env , ty1 <nl> + <nl> | ( Tany | Tmixed | Tarraykind _ | Tprim _ | Toption _ <nl> | Tvar _ | Tabstract ( _ , _ ) | Tclass ( _ , _ ) | Ttuple _ | Tanon ( _ , _ ) <nl> | Tfun _ | Tunresolved _ | Tobject | Tshape _ ) , _ - > <nl> and unify_ env r1 ty1 r2 ty2 = <nl> TUtils . simplified_uerror env ( r1 , ty1 ) ( r2 , ty2 ) ; <nl> env , Tany <nl> <nl> + ( * Use unify to check if two types are the same . We use this in <nl> + * generic_param_matches below , but we set follow_bounds = false so that we <nl> + * don ' t end up recursing back through generic_param_matches from unify * ) <nl> + and is_same_type env ty_sub ty_super = <nl> + Errors . try_ <nl> + ( fun ( ) - > ignore ( unify ~ follow_bounds : false env ty_sub ty_super ) ; true ) <nl> + ( fun _ - > false ) <nl> + <nl> + ( * This deals with the situation where we have an implied equality between <nl> + * type parameters in the type parameter environment . We ' re trying to unify <nl> + * x ( a type parameter ) with ty , and checking to see if we have ty as both <nl> + * an upper and lower bound of x in the environment . We don ' t need to look <nl> + * any further ( e . g . consider a cycle T1 as T2 as T3 as T1 ) because <nl> + * Typing_subtype . add_constraint already computes transitive closure . <nl> + * ) <nl> + and generic_param_matches ~ follow_bounds env x ty = <nl> + let lower = Env . get_lower_bounds env x in <nl> + let upper = Env . get_upper_bounds env x in <nl> + let mem_bounds = List . exists ~ f : ( fun ty ' - > is_same_type env ty ty ' ) in <nl> + follow_bounds & & mem_bounds lower & & mem_bounds upper <nl> + <nl> and unify_arities ~ ellipsis_is_variadic anon_arity func_arity : bool = <nl> match anon_arity , func_arity with <nl> | Fellipsis a_min , Fvariadic ( f_min , _ ) when ellipsis_is_variadic - > <nl> new file mode 100644 <nl> index 00000000000 . . f811aa389e8 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / constraints_transitive . php <nl> <nl> + < ? hh / / strict <nl> + / / Copyright 2004 - present Facebook . All Rights Reserved . <nl> + <nl> + class MyList < T > { } <nl> + class Two < T1 , T2 > { } <nl> + <nl> + / * We need to use SubType . add_constraint not Env . add_constraint to make this <nl> + work <nl> + function foo < T1 as T2 , T2 as T3 , T3 as T1 > ( MyList < T1 > $ x ) : MyList < T3 > { <nl> + hh_show_env ( ) ; <nl> + return $ x ; <nl> + } <nl> + * / <nl> + <nl> + class Boo { <nl> + public static function bar < T1 , T2 , T3 > ( MyList < T1 > $ x ) : MyList < T3 > <nl> + where T1 as T2 , T2 as T3 , T3 as T1 { <nl> + / / hh_show_env ( ) ; <nl> + return $ x ; <nl> + } <nl> + } <nl> + <nl> + class Bee { <nl> + public static function hey < T1 , T2 , T3 , Ta , Tb > ( MyList < Two < T3 , T1 > > $ x ) : MyList < Tb > <nl> + where T1 as T3 , T3 as T1 , Two < T1 , T1 > as Tb , Tb as Two < T3 , T1 > { <nl> + / / hh_show_env ( ) ; <nl> + return $ x ; <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / constraints_transitive . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl> new file mode 100644 <nl> index 00000000000 . . e69de29bb2d <nl> new file mode 100644 <nl> index 00000000000 . . 497c6624107 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / eq_constraint . php <nl> <nl> + < ? hh / / strict <nl> + / / Copyright 2004 - present Facebook . All Rights Reserved . <nl> + <nl> + class Inv < Ti > { } <nl> + class B { } <nl> + class C < T > { <nl> + public function Foo1 < Tu as T super T > ( Inv < Tu > $ x ) : Inv < T > { <nl> + return $ x ; <nl> + } <nl> + public function Foo2 < Tu as T super T > ( Inv < T > $ x ) : Inv < Tu > { <nl> + return $ x ; <nl> + } <nl> + public function Boo1 < Tu as B super B > ( Inv < Tu > $ x ) : Inv < B > { <nl> + return $ x ; <nl> + } <nl> + public function Boo2 < Tu as B super B > ( Inv < B > $ x ) : Inv < Tu > { <nl> + return $ x ; <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / eq_constraint . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl> new file mode 100644 <nl> index 00000000000 . . 97c3fc47dd9 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / instanceof_invariant . php <nl> <nl> + < ? hh / / strict <nl> + / / Copyright 2004 - present Facebook . All Rights Reserved . <nl> + <nl> + class GeneralWidget < T > { } <nl> + class Widget < T > extends GeneralWidget < T > { } <nl> + function expect_int_widget ( Widget < int > $ w ) : void { } <nl> + function not_broken ( GeneralWidget < int > $ m ) : void { <nl> + if ( $ m instanceof Widget ) { <nl> + / / hh_show_env ( ) ; <nl> + expect_int_widget ( $ m ) ; <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / constraints / instanceof_invariant . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl>
Extend unification to respect implied equality constraints
facebook/hhvm
259c5f11707ed0cd44371f8c8ecd9f0ef061a79f
2016-12-01T23:43:34Z
mmm a / imgui . h <nl> ppp b / imgui . h <nl> <nl> - / / dear imgui , v1 . 60 WIP <nl> + / / dear imgui , v1 . 60 <nl> / / ( headers ) <nl> <nl> / / See imgui . cpp file for documentation . <nl> <nl> # include < stddef . h > / / ptrdiff_t , NULL <nl> # include < string . h > / / memset , memmove , memcpy , strlen , strchr , strcpy , strcmp <nl> <nl> - # define IMGUI_VERSION " 1 . 60 WIP " <nl> + # define IMGUI_VERSION " 1 . 60 " <nl> <nl> / / Define attributes of all API symbols declarations , e . g . for DLL under Windows . <nl> # ifndef IMGUI_API <nl>
Version 1 . 60 ( missed the string ) . Will retag .
ocornut/imgui
dd079fe6e6aec0c7a71d8a441079b8de23f4ca56
2018-04-09T11:52:24Z
mmm a / src / buffer_cache / mock . cc <nl> ppp b / src / buffer_cache / mock . cc <nl> void mock_buf_lock_t : : release ( ) { <nl> internal_buf - > lock . unlock ( ) ; <nl> if ( deleted ) internal_buf - > destroy ( ) ; <nl> acquired = false ; <nl> - delete this ; <nl> } <nl> <nl> void mock_buf_lock_t : : release_if_acquired ( ) { <nl> mock_buf_lock_t : : mock_buf_lock_t ( mock_transaction_t * txn , block_id_t block_id , a <nl> } <nl> <nl> mock_buf_lock_t : : ~ mock_buf_lock_t ( ) { <nl> + release_if_acquired ( ) ; <nl> } <nl> <nl> mock_buf_lock_t : : mock_buf_lock_t ( mock_transaction_t * txn ) : <nl> internal_buf ( NULL ) , <nl> access ( txn - > access ) , <nl> dirty ( false ) , <nl> - deleted ( false ) <nl> + deleted ( false ) , <nl> + acquired ( true ) <nl> { <nl> rassert ( access = = rwi_write ) ; <nl> <nl>
Fixed some very broken stuff about mock_buf_t .
rethinkdb/rethinkdb
d9607a12c16fb32aed55b3b523a48dd086763a4f
2012-04-11T23:53:06Z
mmm a / include / swift / SIL / SILConstants . h <nl> ppp b / include / swift / SIL / SILConstants . h <nl> class SILValue ; <nl> class SILBuilder ; <nl> class SerializedSILLoader ; <nl> <nl> - struct APIntSymbolicValue ; <nl> - struct ArraySymbolicValue ; <nl> + struct SymbolicArrayStorage ; <nl> struct DerivedAddressValue ; <nl> struct EnumWithPayloadSymbolicValue ; <nl> struct SymbolicValueMemoryObject ; <nl> class SymbolicValue { <nl> <nl> / / / This represents an index * into * a memory object . <nl> RK_DerivedAddress , <nl> + <nl> + / / / This represents the internal storage of an array . <nl> + RK_ArrayStorage , <nl> + <nl> + / / / This represents an array . <nl> + RK_Array , <nl> } ; <nl> <nl> union { <nl> class SymbolicValue { <nl> / / / When this SymbolicValue is of " DerivedAddress " kind , this pointer stores <nl> / / / information about the memory object and access path of the access . <nl> DerivedAddressValue * derivedAddress ; <nl> + <nl> + / / The following fields are for representing an Array . <nl> + / / <nl> + / / In Swift , an array is a non - trivial struct that stores a reference to an <nl> + / / internal storage : _ContiguousArrayStorage . Though arrays have value <nl> + / / semantics in Swift , it is not the case in SIL . In SIL , an array can be <nl> + / / mutated by taking the address of the internal storage i . e . , through a <nl> + / / shared , mutable pointer to the internal storage of the array . In fact , <nl> + / / this is how an array initialization is lowered in SIL . Therefore , the <nl> + / / symbolic representation of an array is an addressable " memory cell " <nl> + / / ( i . e . , a SymbolicValueMemoryObject ) containing the array storage . The <nl> + / / array storage is modeled by the type : SymbolicArrayStorage . This <nl> + / / representation of the array enables obtaining the address of the internal <nl> + / / storage and modifying the array through that address . Array operations <nl> + / / such as ` append ` that mutate an array must clone the internal storage of <nl> + / / the array , following the semantics of the Swift implementation of those <nl> + / / operations . <nl> + <nl> + / / / Representation of array storage ( RK_ArrayStorage ) . SymbolicArrayStorage <nl> + / / / is a container for a sequence of symbolic values . <nl> + SymbolicArrayStorage * arrayStorage ; <nl> + <nl> + / / / When this symbolic value is of an " Array " kind , this stores a memory <nl> + / / / object that contains a SymbolicArrayStorage value . <nl> + SymbolicValueMemoryObject * array ; <nl> } value ; <nl> <nl> RepresentationKind representationKind : 8 ; <nl> class SymbolicValue { <nl> / / / This value represents the address of , or into , a memory object . <nl> Address , <nl> <nl> + / / / This represents an internal array storage . <nl> + ArrayStorage , <nl> + <nl> + / / / This represents an array value . <nl> + Array , <nl> + <nl> / / / These values are generally only seen internally to the system , external <nl> / / / clients shouldn ' t have to deal with them . <nl> UninitMemory <nl> class SymbolicValue { <nl> / / / Return just the memory object for an address value . <nl> SymbolicValueMemoryObject * getAddressValueMemoryObject ( ) const ; <nl> <nl> + / / / Create a symbolic array storage containing \ c elements . <nl> + static SymbolicValue <nl> + getSymbolicArrayStorage ( ArrayRef < SymbolicValue > elements , CanType elementType , <nl> + SymbolicValueAllocator & allocator ) ; <nl> + <nl> + / / / Create a symbolic array using the given symbolic array storage , which <nl> + / / / contains the array elements . <nl> + static SymbolicValue getArray ( Type arrayType , SymbolicValue arrayStorage , <nl> + SymbolicValueAllocator & allocator ) ; <nl> + <nl> + / / / Return the elements stored in this SymbolicValue of " ArrayStorage " kind . <nl> + ArrayRef < SymbolicValue > getStoredElements ( CanType & elementType ) const ; <nl> + <nl> + / / / Return the symbolic value representing the internal storage of this array . <nl> + SymbolicValue getStorageOfArray ( ) const ; <nl> + <nl> + / / / Return the symbolic value representing the address of the element of this <nl> + / / / array at the given \ c index . The return value is a derived address whose <nl> + / / / base is the memory object \ c value . array ( which contains the array <nl> + / / / storage ) and whose accesspath is \ c index . <nl> + SymbolicValue getAddressOfArrayElement ( SymbolicValueAllocator & allocator , <nl> + unsigned index ) const ; <nl> + <nl> + / / / Return the type of this array symbolic value . <nl> + Type getArrayType ( ) const ; <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - = = = / / <nl> / / Helpers <nl> <nl> struct SymbolicValueMemoryObject { <nl> SymbolicValueMemoryObject ( const SymbolicValueMemoryObject & ) = delete ; <nl> void operator = ( const SymbolicValueMemoryObject & ) = delete ; <nl> } ; <nl> - <nl> } / / end namespace swift <nl> <nl> # endif <nl> mmm a / lib / SIL / SILConstants . cpp <nl> ppp b / lib / SIL / SILConstants . cpp <nl> void SymbolicValue : : print ( llvm : : raw_ostream & os , unsigned indent ) const { <nl> case RK_DerivedAddress : { <nl> SmallVector < unsigned , 4 > accessPath ; <nl> SymbolicValueMemoryObject * memObject = getAddressValue ( accessPath ) ; <nl> - os < < " Address [ " < < memObject - > getType ( ) < < " ] " ; <nl> + os < < " address [ " < < memObject - > getType ( ) < < " ] " ; <nl> interleave ( accessPath . begin ( ) , accessPath . end ( ) , <nl> [ & ] ( unsigned idx ) { os < < idx ; } , [ & ] ( ) { os < < " , " ; } ) ; <nl> os < < " \ n " ; <nl> break ; <nl> } <nl> + case RK_ArrayStorage : { <nl> + CanType elementType ; <nl> + ArrayRef < SymbolicValue > elements = getStoredElements ( elementType ) ; <nl> + os < < " elements type : " < < elementType < < " size : " < < elements . size ( ) ; <nl> + switch ( elements . size ( ) ) { <nl> + case 0 : <nl> + os < < " contents [ ] \ n " ; <nl> + return ; <nl> + default : <nl> + os < < " contents [ \ n " ; <nl> + for ( auto elt : elements ) <nl> + elt . print ( os , indent + 2 ) ; <nl> + os . indent ( indent ) < < " ] \ n " ; <nl> + return ; <nl> + } <nl> + } <nl> + case RK_Array : { <nl> + os < < getArrayType ( ) < < " : \ n " ; <nl> + getStorageOfArray ( ) . print ( os , indent ) ; <nl> + } <nl> } <nl> } <nl> <nl> SymbolicValue : : Kind SymbolicValue : : getKind ( ) const { <nl> case RK_DirectAddress : <nl> case RK_DerivedAddress : <nl> return Address ; <nl> + case RK_ArrayStorage : <nl> + return ArrayStorage ; <nl> + case RK_Array : <nl> + return Array ; <nl> } <nl> llvm_unreachable ( " covered switch " ) ; <nl> } <nl> <nl> - / / / Clone this SymbolicValue into the specified ASTContext and return the new <nl> + / / / Clone this SymbolicValue into the specified allocator and return the new <nl> / / / version . This only works for valid constants . <nl> SymbolicValue <nl> SymbolicValue : : cloneInto ( SymbolicValueAllocator & allocator ) const { <nl> SymbolicValue : : cloneInto ( SymbolicValueAllocator & allocator ) const { <nl> results . push_back ( elt . cloneInto ( allocator ) ) ; <nl> return getAggregate ( results , allocator ) ; <nl> } <nl> - case RK_EnumWithPayload : <nl> - return getEnumWithPayload ( getEnumValue ( ) , getEnumPayloadValue ( ) , allocator ) ; <nl> + case RK_EnumWithPayload : { <nl> + return getEnumWithPayload ( <nl> + getEnumValue ( ) , getEnumPayloadValue ( ) . cloneInto ( allocator ) , allocator ) ; <nl> + } <nl> case RK_DirectAddress : <nl> case RK_DerivedAddress : { <nl> SmallVector < unsigned , 4 > accessPath ; <nl> auto * memObject = getAddressValue ( accessPath ) ; <nl> auto * newMemObject = SymbolicValueMemoryObject : : create ( <nl> - memObject - > getType ( ) , memObject - > getValue ( ) , allocator ) ; <nl> + memObject - > getType ( ) , memObject - > getValue ( ) . cloneInto ( allocator ) , <nl> + allocator ) ; <nl> return getAddress ( newMemObject , accessPath , allocator ) ; <nl> } <nl> + case RK_ArrayStorage : { <nl> + CanType elementType ; <nl> + ArrayRef < SymbolicValue > oldElements = getStoredElements ( elementType ) ; <nl> + SmallVector < SymbolicValue , 4 > clonedElements ; <nl> + clonedElements . reserve ( oldElements . size ( ) ) ; <nl> + for ( auto elem : oldElements ) <nl> + clonedElements . push_back ( elem . cloneInto ( allocator ) ) ; <nl> + return getSymbolicArrayStorage ( clonedElements , elementType , allocator ) ; <nl> + } <nl> + case RK_Array : { <nl> + SymbolicValue clonedStorage = getStorageOfArray ( ) . cloneInto ( allocator ) ; <nl> + return getArray ( getArrayType ( ) , clonedStorage , allocator ) ; <nl> + } <nl> } <nl> llvm_unreachable ( " covered switch " ) ; <nl> } <nl> SymbolicValueMemoryObject * SymbolicValue : : getAddressValueMemoryObject ( ) const { <nl> return value . derivedAddress - > memoryObject ; <nl> } <nl> <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / Arrays <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + namespace swift { <nl> + <nl> + / / / Representation of the internal storage of an array . This is a container for <nl> + / / / a sequence of symbolic values corresponding to the elements of an array . <nl> + struct SymbolicArrayStorage final <nl> + : private llvm : : TrailingObjects < SymbolicArrayStorage , SymbolicValue > { <nl> + friend class llvm : : TrailingObjects < SymbolicArrayStorage , SymbolicValue > ; <nl> + <nl> + const CanType elementType ; <nl> + <nl> + const unsigned numElements ; <nl> + <nl> + static SymbolicArrayStorage * create ( ArrayRef < SymbolicValue > elements , <nl> + CanType elementType , <nl> + SymbolicValueAllocator & allocator ) { <nl> + auto byteSize = <nl> + SymbolicArrayStorage : : totalSizeToAlloc < SymbolicValue > ( elements . size ( ) ) ; <nl> + auto rawMem = allocator . allocate ( byteSize , alignof ( SymbolicArrayStorage ) ) ; <nl> + <nl> + / / Placement initialize the object . <nl> + auto * storage = <nl> + : : new ( rawMem ) SymbolicArrayStorage ( elementType , elements . size ( ) ) ; <nl> + std : : uninitialized_copy ( elements . begin ( ) , elements . end ( ) , <nl> + storage - > getTrailingObjects < SymbolicValue > ( ) ) ; <nl> + return storage ; <nl> + } <nl> + <nl> + / / / Return the stored elements . <nl> + ArrayRef < SymbolicValue > getElements ( ) const { <nl> + return { getTrailingObjects < SymbolicValue > ( ) , numElements } ; <nl> + } <nl> + <nl> + / / This is used by the llvm : : TrailingObjects base class . <nl> + size_t numTrailingObjects ( OverloadToken < SymbolicValue > ) const { <nl> + return numElements ; <nl> + } <nl> + <nl> + private : <nl> + SymbolicArrayStorage ( ) = delete ; <nl> + SymbolicArrayStorage ( const SymbolicArrayStorage & ) = delete ; <nl> + SymbolicArrayStorage ( CanType elementType , unsigned numElements ) <nl> + : elementType ( elementType ) , numElements ( numElements ) { } <nl> + } ; <nl> + } / / namespace swift <nl> + / / end namespace swift <nl> + <nl> + SymbolicValue <nl> + SymbolicValue : : getSymbolicArrayStorage ( ArrayRef < SymbolicValue > elements , <nl> + CanType elementType , <nl> + SymbolicValueAllocator & allocator ) { <nl> + / / TODO : Could compress the empty array representation if there were a reason <nl> + / / to . <nl> + auto * arrayStorage = <nl> + SymbolicArrayStorage : : create ( elements , elementType , allocator ) ; <nl> + SymbolicValue result ; <nl> + result . representationKind = RK_ArrayStorage ; <nl> + result . value . arrayStorage = arrayStorage ; <nl> + return result ; <nl> + } <nl> + <nl> + ArrayRef < SymbolicValue > <nl> + SymbolicValue : : getStoredElements ( CanType & elementType ) const { <nl> + assert ( getKind ( ) = = ArrayStorage ) ; <nl> + elementType = value . arrayStorage - > elementType ; <nl> + return value . arrayStorage - > getElements ( ) ; <nl> + } <nl> + <nl> + SymbolicValue SymbolicValue : : getArray ( Type arrayType , <nl> + SymbolicValue arrayStorage , <nl> + SymbolicValueAllocator & allocator ) { <nl> + assert ( arrayStorage . getKind ( ) = = ArrayStorage ) ; <nl> + SymbolicValue result ; <nl> + result . representationKind = RK_Array ; <nl> + result . value . array = <nl> + SymbolicValueMemoryObject : : create ( arrayType , arrayStorage , allocator ) ; <nl> + return result ; <nl> + } <nl> + <nl> + SymbolicValue <nl> + SymbolicValue : : getAddressOfArrayElement ( SymbolicValueAllocator & allocator , <nl> + unsigned index ) const { <nl> + assert ( getKind ( ) = = Array ) ; <nl> + return SymbolicValue : : getAddress ( value . array , { index } , allocator ) ; <nl> + } <nl> + <nl> + SymbolicValue SymbolicValue : : getStorageOfArray ( ) const { <nl> + assert ( getKind ( ) = = Array ) ; <nl> + return value . array - > getValue ( ) ; <nl> + } <nl> + <nl> + Type SymbolicValue : : getArrayType ( ) const { <nl> + assert ( getKind ( ) = = Array ) ; <nl> + return value . array - > getType ( ) ; <nl> + } <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / Higher level code <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> static SymbolicValue getIndexedElement ( SymbolicValue aggregate , <nl> if ( aggregate . getKind ( ) = = SymbolicValue : : UninitMemory ) <nl> return SymbolicValue : : getUninitMemory ( ) ; <nl> <nl> - assert ( aggregate . getKind ( ) = = SymbolicValue : : Aggregate & & <nl> + assert ( ( aggregate . getKind ( ) = = SymbolicValue : : Aggregate | | <nl> + aggregate . getKind ( ) = = SymbolicValue : : ArrayStorage ) & & <nl> " the accessPath is invalid for this type " ) ; <nl> <nl> unsigned elementNo = accessPath . front ( ) ; <nl> <nl> - SymbolicValue elt = aggregate . getAggregateValue ( ) [ elementNo ] ; <nl> + SymbolicValue elt ; <nl> Type eltType ; <nl> - if ( auto * decl = type - > getStructOrBoundGenericStruct ( ) ) { <nl> - eltType = decl - > getStoredProperties ( ) [ elementNo ] - > getType ( ) ; <nl> - } else if ( auto tuple = type - > getAs < TupleType > ( ) ) { <nl> - assert ( elementNo < tuple - > getNumElements ( ) & & " invalid index " ) ; <nl> - eltType = tuple - > getElement ( elementNo ) . getType ( ) ; <nl> + <nl> + if ( aggregate . getKind ( ) = = SymbolicValue : : ArrayStorage ) { <nl> + CanType arrayEltTy ; <nl> + elt = aggregate . getStoredElements ( arrayEltTy ) [ elementNo ] ; <nl> + eltType = arrayEltTy ; <nl> } else { <nl> - llvm_unreachable ( " the accessPath is invalid for this type " ) ; <nl> + elt = aggregate . getAggregateValue ( ) [ elementNo ] ; <nl> + if ( auto * decl = type - > getStructOrBoundGenericStruct ( ) ) { <nl> + eltType = decl - > getStoredProperties ( ) [ elementNo ] - > getType ( ) ; <nl> + } else if ( auto tuple = type - > getAs < TupleType > ( ) ) { <nl> + assert ( elementNo < tuple - > getNumElements ( ) & & " invalid index " ) ; <nl> + eltType = tuple - > getElement ( elementNo ) . getType ( ) ; <nl> + } else { <nl> + llvm_unreachable ( " the accessPath is invalid for this type " ) ; <nl> + } <nl> } <nl> <nl> return getIndexedElement ( elt , accessPath . drop_front ( ) , eltType ) ; <nl> static SymbolicValue setIndexedElement ( SymbolicValue aggregate , <nl> aggregate = SymbolicValue : : getAggregate ( newElts , allocator ) ; <nl> } <nl> <nl> - assert ( aggregate . getKind ( ) = = SymbolicValue : : Aggregate & & <nl> + assert ( ( aggregate . getKind ( ) = = SymbolicValue : : Aggregate | | <nl> + aggregate . getKind ( ) = = SymbolicValue : : ArrayStorage ) & & <nl> " the accessPath is invalid for this type " ) ; <nl> <nl> unsigned elementNo = accessPath . front ( ) ; <nl> <nl> - ArrayRef < SymbolicValue > oldElts = aggregate . getAggregateValue ( ) ; <nl> + ArrayRef < SymbolicValue > oldElts ; <nl> Type eltType ; <nl> - if ( auto * decl = type - > getStructOrBoundGenericStruct ( ) ) { <nl> - eltType = decl - > getStoredProperties ( ) [ elementNo ] - > getType ( ) ; <nl> - } else if ( auto tuple = type - > getAs < TupleType > ( ) ) { <nl> - assert ( elementNo < tuple - > getNumElements ( ) & & " invalid index " ) ; <nl> - eltType = tuple - > getElement ( elementNo ) . getType ( ) ; <nl> + <nl> + if ( aggregate . getKind ( ) = = SymbolicValue : : ArrayStorage ) { <nl> + CanType arrayEltTy ; <nl> + oldElts = aggregate . getStoredElements ( arrayEltTy ) ; <nl> + eltType = arrayEltTy ; <nl> } else { <nl> - llvm_unreachable ( " the accessPath is invalid for this type " ) ; <nl> + oldElts = aggregate . getAggregateValue ( ) ; <nl> + if ( auto * decl = type - > getStructOrBoundGenericStruct ( ) ) { <nl> + eltType = decl - > getStoredProperties ( ) [ elementNo ] - > getType ( ) ; <nl> + } else if ( auto tuple = type - > getAs < TupleType > ( ) ) { <nl> + assert ( elementNo < tuple - > getNumElements ( ) & & " invalid index " ) ; <nl> + eltType = tuple - > getElement ( elementNo ) . getType ( ) ; <nl> + } else { <nl> + llvm_unreachable ( " the accessPath is invalid for this type " ) ; <nl> + } <nl> } <nl> <nl> / / Update the indexed element of the aggregate . <nl> static SymbolicValue setIndexedElement ( SymbolicValue aggregate , <nl> setIndexedElement ( newElts [ elementNo ] , accessPath . drop_front ( ) , newElement , <nl> eltType , allocator ) ; <nl> <nl> - aggregate = SymbolicValue : : getAggregate ( newElts , allocator ) ; <nl> + if ( aggregate . getKind ( ) = = SymbolicValue : : Aggregate ) <nl> + return SymbolicValue : : getAggregate ( newElts , allocator ) ; <nl> + <nl> + return aggregate = SymbolicValue : : getSymbolicArrayStorage ( <nl> + newElts , eltType - > getCanonicalType ( ) , allocator ) ; <nl> return aggregate ; <nl> } <nl> <nl> mmm a / lib / SILOptimizer / Utils / ConstExpr . cpp <nl> ppp b / lib / SILOptimizer / Utils / ConstExpr . cpp <nl> evaluateAndCacheCall ( SILFunction & fn , SubstitutionMap substitutionMap , <nl> / / general framework . <nl> <nl> enum class WellKnownFunction { <nl> + / / Array . init ( ) <nl> + ArrayInitEmpty , <nl> + / / Array . _allocateUninitializedArray <nl> + AllocateUninitializedArray , <nl> + / / Array . append ( _ : ) <nl> + ArrayAppendElement , <nl> / / String . init ( ) <nl> StringInitEmpty , <nl> / / String . init ( _builtinStringLiteral : utf8CodeUnitCount : isASCII : ) <nl> enum class WellKnownFunction { <nl> } ; <nl> <nl> static llvm : : Optional < WellKnownFunction > classifyFunction ( SILFunction * fn ) { <nl> + if ( fn - > hasSemanticsAttr ( " array . init . empty " ) ) <nl> + return WellKnownFunction : : ArrayInitEmpty ; <nl> + if ( fn - > hasSemanticsAttr ( " array . uninitialized_intrinsic " ) ) <nl> + return WellKnownFunction : : AllocateUninitializedArray ; <nl> + if ( fn - > hasSemanticsAttr ( " array . append_element " ) ) <nl> + return WellKnownFunction : : ArrayAppendElement ; <nl> if ( fn - > hasSemanticsAttr ( " string . init_empty " ) ) <nl> return WellKnownFunction : : StringInitEmpty ; <nl> / / There are two string initializers in the standard library with the <nl> SymbolicValue ConstExprFunctionState : : computeConstantValue ( SILValue value ) { <nl> if ( isa < CopyValueInst > ( value ) | | isa < BeginBorrowInst > ( value ) ) <nl> return getConstantValue ( cast < SingleValueInstruction > ( value ) - > getOperand ( 0 ) ) ; <nl> <nl> + / / Builtin . RawPointer and addresses have the same representation . <nl> + if ( auto * p2ai = dyn_cast < PointerToAddressInst > ( value ) ) <nl> + return getConstantValue ( p2ai - > getOperand ( ) ) ; <nl> + <nl> + / / Indexing a pointer moves the deepest index of the access path it represents <nl> + / / within a memory object . For example , if a pointer p represents the access <nl> + / / path [ 1 , 2 ] within a memory object , p + 1 represents [ 1 , 3 ] <nl> + if ( auto * ia = dyn_cast < IndexAddrInst > ( value ) ) { <nl> + auto index = getConstantValue ( ia - > getOperand ( 1 ) ) ; <nl> + if ( ! index . isConstant ( ) ) <nl> + return index ; <nl> + auto basePtr = getConstantValue ( ia - > getOperand ( 0 ) ) ; <nl> + if ( basePtr . getKind ( ) ! = SymbolicValue : : Address ) <nl> + return basePtr ; <nl> + <nl> + SmallVector < unsigned , 4 > accessPath ; <nl> + auto * memObject = basePtr . getAddressValue ( accessPath ) ; <nl> + assert ( ! accessPath . empty ( ) & & " Can ' t index a non - indexed address " ) ; <nl> + accessPath . back ( ) + = index . getIntegerValue ( ) . getLimitedValue ( ) ; <nl> + return SymbolicValue : : getAddress ( memObject , accessPath , <nl> + evaluator . getAllocator ( ) ) ; <nl> + } <nl> + <nl> LLVM_DEBUG ( llvm : : dbgs ( ) < < " ConstExpr Unknown simple : " < < * value < < " \ n " ) ; <nl> <nl> / / Otherwise , we don ' t know how to handle this . <nl> extractStaticStringValue ( SymbolicValue staticString ) { <nl> return staticStringProps [ 0 ] . getStringValue ( ) ; <nl> } <nl> <nl> + / / / If the specified type is a Swift . Array of some element type , then return the <nl> + / / / element type . Otherwise , return a null Type . <nl> + static Type getArrayElementType ( Type ty ) { <nl> + if ( auto bgst = ty - > getAs < BoundGenericStructType > ( ) ) <nl> + if ( bgst - > getDecl ( ) = = bgst - > getASTContext ( ) . getArrayDecl ( ) ) <nl> + return bgst - > getGenericArgs ( ) [ 0 ] ; <nl> + return Type ( ) ; <nl> + } <nl> + <nl> / / / Given a call to a well known function , collect its arguments as constants , <nl> / / / fold it , and return None . If any of the arguments are not constants , marks <nl> / / / the call ' s results as Unknown , and return an Unknown with information about <nl> ConstExprFunctionState : : computeWellKnownCallResult ( ApplyInst * apply , <nl> ( SILInstruction * ) apply , <nl> UnknownReason : : createTrap ( message , evaluator . getAllocator ( ) ) ) ; <nl> } <nl> + case WellKnownFunction : : ArrayInitEmpty : { / / Array . init ( ) <nl> + assert ( conventions . getNumDirectSILResults ( ) = = 1 & & <nl> + conventions . getNumIndirectSILResults ( ) = = 0 & & <nl> + " unexpected Array . init ( ) signature " ) ; <nl> + <nl> + auto typeValue = getConstantValue ( apply - > getOperand ( 1 ) ) ; <nl> + if ( typeValue . getKind ( ) ! = SymbolicValue : : Metatype ) { <nl> + return typeValue . isConstant ( ) <nl> + ? getUnknown ( evaluator , ( SILInstruction * ) apply , <nl> + UnknownReason : : InvalidOperandValue ) <nl> + : typeValue ; <nl> + } <nl> + Type arrayType = typeValue . getMetatypeValue ( ) ; <nl> + <nl> + / / Create an empty SymbolicArrayStorage and then create a SymbolicArray <nl> + / / using it . <nl> + SymbolicValue arrayStorage = SymbolicValue : : getSymbolicArrayStorage ( <nl> + { } , getArrayElementType ( arrayType ) - > getCanonicalType ( ) , <nl> + evaluator . getAllocator ( ) ) ; <nl> + auto arrayVal = SymbolicValue : : getArray ( arrayType , arrayStorage , <nl> + evaluator . getAllocator ( ) ) ; <nl> + setValue ( apply , arrayVal ) ; <nl> + return None ; <nl> + } <nl> + case WellKnownFunction : : AllocateUninitializedArray : { <nl> + / / This function has this signature : <nl> + / / func _allocateUninitializedArray < Element > ( _ builtinCount : Builtin . Word ) <nl> + / / - > ( Array < Element > , Builtin . RawPointer ) <nl> + assert ( conventions . getNumParameters ( ) = = 1 & & <nl> + conventions . getNumDirectSILResults ( ) = = 2 & & <nl> + conventions . getNumIndirectSILResults ( ) = = 0 & & <nl> + " unexpected _allocateUninitializedArray signature " ) ; <nl> + <nl> + / / Figure out the allocation size . <nl> + auto numElementsSV = getConstantValue ( apply - > getOperand ( 1 ) ) ; <nl> + if ( ! numElementsSV . isConstant ( ) ) <nl> + return numElementsSV ; <nl> + <nl> + unsigned numElements = numElementsSV . getIntegerValue ( ) . getLimitedValue ( ) ; <nl> + <nl> + / / Allocating uninitialized arrays is supported only in flow - sensitive mode . <nl> + / / TODO : the top - level mode in the interpreter should be phased out . <nl> + if ( ! fn ) <nl> + return getUnknown ( evaluator , ( SILInstruction * ) apply , <nl> + UnknownReason : : Default ) ; <nl> + <nl> + SmallVector < SymbolicValue , 8 > elementConstants ; <nl> + / / Set array elements to uninitialized state . Subsequent stores through <nl> + / / their addresses will initialize the elements . <nl> + elementConstants . assign ( numElements , SymbolicValue : : getUninitMemory ( ) ) ; <nl> + <nl> + Type arrayType = apply - > getType ( ) . castTo < TupleType > ( ) - > getElementType ( 0 ) ; <nl> + Type arrayEltType = getArrayElementType ( arrayType ) ; <nl> + assert ( arrayEltType & & " Couldn ' t understand Swift . Array type ? " ) ; <nl> + <nl> + / / Create a SymbolicArrayStorage with \ c elements and then create a <nl> + / / SymbolicArray using it . <nl> + SymbolicValueAllocator & allocator = evaluator . getAllocator ( ) ; <nl> + SymbolicValue arrayStorage = SymbolicValue : : getSymbolicArrayStorage ( <nl> + elementConstants , arrayEltType - > getCanonicalType ( ) , allocator ) ; <nl> + SymbolicValue array = <nl> + SymbolicValue : : getArray ( arrayType , arrayStorage , allocator ) ; <nl> + <nl> + / / Construct return value for this call , which is a pair consisting of the <nl> + / / address of the first element of the array and the array . <nl> + SymbolicValue storageAddress = array . getAddressOfArrayElement ( allocator , 0 ) ; <nl> + setValue ( apply , <nl> + SymbolicValue : : getAggregate ( { array , storageAddress } , allocator ) ) ; <nl> + return None ; <nl> + } <nl> + case WellKnownFunction : : ArrayAppendElement : { <nl> + / / This function has the following signature in SIL : <nl> + / / ( @ in Element , @ inout Array < Element > ) - > ( ) <nl> + assert ( conventions . getNumParameters ( ) = = 2 & & <nl> + conventions . getNumDirectSILResults ( ) = = 0 & & <nl> + conventions . getNumIndirectSILResults ( ) = = 0 & & <nl> + " unexpected Array . append ( _ : ) signature " ) ; <nl> + / / Get the element to be appended which is passed indirectly ( @ in ) . <nl> + SymbolicValue elementAddress = getConstantValue ( apply - > getOperand ( 1 ) ) ; <nl> + if ( ! elementAddress . isConstant ( ) ) <nl> + return elementAddress ; <nl> + <nl> + auto invalidOperand = [ & ] ( ) { <nl> + return getUnknown ( evaluator , ( SILInstruction * ) apply , <nl> + UnknownReason : : InvalidOperandValue ) ; <nl> + } ; <nl> + if ( elementAddress . getKind ( ) ! = SymbolicValue : : Address ) { <nl> + / / TODO : store the operand number in the error message here . <nl> + return invalidOperand ( ) ; <nl> + } <nl> + <nl> + SmallVector < unsigned , 4 > elementAP ; <nl> + SymbolicValue element = <nl> + elementAddress . getAddressValue ( elementAP ) - > getValue ( ) ; <nl> + <nl> + / / Get the array value . The array is passed @ inout . <nl> + SymbolicValue arrayAddress = getConstantValue ( apply - > getOperand ( 2 ) ) ; <nl> + if ( ! arrayAddress . isConstant ( ) ) <nl> + return arrayAddress ; <nl> + if ( arrayAddress . getKind ( ) ! = SymbolicValue : : Address ) <nl> + return invalidOperand ( ) ; <nl> + <nl> + SmallVector < unsigned , 4 > arrayAP ; <nl> + SymbolicValueMemoryObject * arrayMemoryObject = <nl> + arrayAddress . getAddressValue ( arrayAP ) ; <nl> + SymbolicValue arrayValue = arrayMemoryObject - > getValue ( ) ; <nl> + if ( arrayValue . getKind ( ) ! = SymbolicValue : : Array ) { <nl> + return invalidOperand ( ) ; <nl> + } <nl> + <nl> + / / Create a new array storage by appending the \ c element to the existing <nl> + / / storage , and create a new array using the new storage . <nl> + SymbolicValue arrayStorage = arrayValue . getStorageOfArray ( ) ; <nl> + CanType elementType ; <nl> + ArrayRef < SymbolicValue > oldElements = <nl> + arrayStorage . getStoredElements ( elementType ) ; <nl> + SmallVector < SymbolicValue , 4 > newElements ( oldElements . begin ( ) , <nl> + oldElements . end ( ) ) ; <nl> + newElements . push_back ( element ) ; <nl> + <nl> + SymbolicValueAllocator & allocator = evaluator . getAllocator ( ) ; <nl> + SymbolicValue newStorage = SymbolicValue : : getSymbolicArrayStorage ( <nl> + newElements , elementType , allocator ) ; <nl> + SymbolicValue newArray = SymbolicValue : : getArray ( arrayValue . getArrayType ( ) , <nl> + newStorage , allocator ) ; <nl> + arrayMemoryObject - > setIndexedElement ( arrayAP , newArray , allocator ) ; <nl> + return None ; <nl> + } <nl> case WellKnownFunction : : StringInitEmpty : { / / String . init ( ) <nl> assert ( conventions . getNumDirectSILResults ( ) = = 1 & & <nl> conventions . getNumIndirectSILResults ( ) = = 0 & & <nl> mmm a / test / SILOptimizer / constant_evaluable_subset_test . swift <nl> ppp b / test / SILOptimizer / constant_evaluable_subset_test . swift <nl> <nl> / / Run the ( mandatory ) passes on which constant evaluator depends , and test the <nl> / / constant evaluator on the SIL produced after the dependent passes are run . <nl> / / <nl> - / / RUN : not % target - sil - opt - silgen - cleanup - raw - sil - inst - lowering - allocbox - to - stack - mandatory - inlining - constexpr - limit 3000 - test - constant - evaluable - subset % t / constant_evaluable_subset_test_silgen . sil > / dev / null 2 > % t / error - output <nl> + / / RUN : not % target - sil - opt - silgen - cleanup - raw - sil - inst - lowering - allocbox - to - stack - mandatory - inlining - constexpr - limit 3000 - test - constant - evaluable - subset % t / constant_evaluable_subset_test_silgen . sil > % t / constant_evaluable_subset_test . sil 2 > % t / error - output <nl> / / <nl> / / RUN : % FileCheck % s < % t / error - output <nl> / / <nl> func testIndirectEnum ( _ nat : Nat ) - > Bool { <nl> func interpretIndirectEnum ( ) - > Bool { <nl> return testIndirectEnum ( . succ ( . zero ) ) <nl> } <nl> + <nl> + / / CHECK - LABEL : @ testEmptyArrayInit <nl> + / / CHECK - NOT : error : <nl> + @ _semantics ( " constant_evaluable " ) <nl> + func testEmptyArrayInit ( ) - > [ Int ] { <nl> + return Array < Int > ( ) <nl> + } <nl> + <nl> + @ _semantics ( " test_driver " ) <nl> + func interpretEmptyArrayInit ( ) - > [ Int ] { <nl> + return testEmptyArrayInit ( ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : @ testEmptyArrayLiteral <nl> + / / CHECK - NOT : error : <nl> + @ _semantics ( " constant_evaluable " ) <nl> + func testEmptyArrayLiteral ( ) - > [ Int ] { <nl> + return [ ] <nl> + } <nl> + <nl> + @ _semantics ( " test_driver " ) <nl> + func interpretEmptyArrayLiteral ( ) - > [ Int ] { <nl> + return testEmptyArrayLiteral ( ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : @ testArrayLiteral <nl> + / / CHECK - NOT : error : <nl> + @ _semantics ( " constant_evaluable " ) <nl> + func testArrayLiteral ( _ x : Int , _ y : Int ) - > [ Int ] { <nl> + return [ x , y , 4 ] <nl> + } <nl> + <nl> + @ _semantics ( " test_driver " ) <nl> + func interpretArrayLiteral ( ) - > [ Int ] { <nl> + return testArrayLiteral ( 2 , 3 ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : @ testArrayAppend <nl> + / / CHECK - NOT : error : <nl> + @ _semantics ( " constant_evaluable " ) <nl> + func testArrayAppend ( _ x : Int ) - > [ Int ] { <nl> + var a : [ Int ] = [ ] <nl> + a . append ( x ) <nl> + return a <nl> + } <nl> + <nl> + @ _semantics ( " test_driver " ) <nl> + func interpretArrayAppend ( ) - > [ Int ] { <nl> + return testArrayAppend ( 25 ) <nl> + } <nl> + <nl> + / / CHECK - LABEL : @ testArrayAppendNonEmpty <nl> + / / CHECK - NOT : error : <nl> + @ _semantics ( " constant_evaluable " ) <nl> + func testArrayAppendNonEmpty ( _ x : String ) - > [ String ] { <nl> + var a : [ String ] = [ " ls " , " cat " , " echo " , " cd " ] <nl> + a . append ( x ) <nl> + return a <nl> + } <nl> + <nl> + @ _semantics ( " test_driver " ) <nl> + func interpretArrayAppendNonEmpty ( ) - > [ String ] { <nl> + return testArrayAppendNonEmpty ( " mkdir " ) <nl> + } <nl> mmm a / test / SILOptimizer / constant_evaluator_test . sil <nl> ppp b / test / SILOptimizer / constant_evaluator_test . sil <nl> sil @ interpretAssertionFailure : $ @ convention ( thin ) ( ) - > Never { <nl> / / CHECK : { { . * } } : [ [ @ LINE - 1 ] ] : { { . * } } : note : error - prefix : message <nl> unreachable <nl> } <nl> + <nl> + / / Tests for arrays . <nl> + <nl> + / / Array . init ( ) <nl> + sil [ serialized ] [ _semantics " array . init . empty " ] @ $ sS2ayxGycfC : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + <nl> + / / _allocateUninitializedArray < A > ( _ : ) <nl> + sil [ serialized ] [ always_inline ] [ _semantics " array . uninitialized_intrinsic " ] @ $ ss27_allocateUninitializedArrayySayxG_BptBwlF : $ @ convention ( thin ) < τ_0_0 > ( Builtin . Word ) - > ( @ owned Array < τ_0_0 > , Builtin . RawPointer ) <nl> + <nl> + sil @ interpretArrayInit : $ @ convention ( thin ) ( ) - > @ owned Array < Int > { <nl> + bb0 : <nl> + % 0 = metatype $ @ thin Array < Int > . Type <nl> + / / function_ref Array . init ( ) <nl> + % 1 = function_ref @ $ sS2ayxGycfC : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + % 2 = apply % 1 < Int > ( % 0 ) : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + return % 2 : $ Array < Int > <nl> + } / / CHECK : Returns Array < Int > <nl> + / / CHECK : size : 0 contents [ ] <nl> + <nl> + sil [ ossa ] @ interpretEmptyArrayLiteral : $ @ convention ( thin ) ( ) - > @ owned Array < String > { <nl> + bb0 : <nl> + % 0 = integer_literal $ Builtin . Word , 0 <nl> + / / function_ref _allocateUninitializedArray < A > ( _ : ) <nl> + % 1 = function_ref @ $ ss27_allocateUninitializedArrayySayxG_BptBwlF : $ @ convention ( thin ) < τ_0_0 > ( Builtin . Word ) - > ( @ owned Array < τ_0_0 > , Builtin . RawPointer ) <nl> + % 2 = apply % 1 < String > ( % 0 ) : $ @ convention ( thin ) < τ_0_0 > ( Builtin . Word ) - > ( @ owned Array < τ_0_0 > , Builtin . RawPointer ) <nl> + ( % 3 , % 4 ) = destructure_tuple % 2 : $ ( Array < String > , Builtin . RawPointer ) <nl> + return % 3 : $ Array < String > <nl> + } / / CHECK : Returns Array < String > <nl> + / / CHECK : size : 0 contents [ ] <nl> + <nl> + sil [ ossa ] @ initializeArrayWithLiterals : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > { <nl> + bb0 : <nl> + % 0 = integer_literal $ Builtin . Int64 , 11 / / element 1 <nl> + % 1 = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + % 2 = integer_literal $ Builtin . Int64 , 12 / / element 2 <nl> + % 3 = struct $ Int64 ( % 2 : $ Builtin . Int64 ) <nl> + % 4 = integer_literal $ Builtin . Int64 , 14 / / element 3 <nl> + % 5 = struct $ Int64 ( % 4 : $ Builtin . Int64 ) <nl> + <nl> + % 6 = integer_literal $ Builtin . Word , 3 / / array literal size <nl> + / / function_ref _allocateUninitializedArray < A > ( _ : ) <nl> + % 7 = function_ref @ $ ss27_allocateUninitializedArrayySayxG_BptBwlF : $ @ convention ( thin ) < τ_0_0 > ( Builtin . Word ) - > ( @ owned Array < τ_0_0 > , Builtin . RawPointer ) <nl> + % 8 = apply % 7 < Int64 > ( % 6 ) : $ @ convention ( thin ) < τ_0_0 > ( Builtin . Word ) - > ( @ owned Array < τ_0_0 > , Builtin . RawPointer ) <nl> + ( % 9 , % 10 ) = destructure_tuple % 8 : $ ( Array < Int64 > , Builtin . RawPointer ) <nl> + % 11 = pointer_to_address % 10 : $ Builtin . RawPointer to [ strict ] $ * Int64 <nl> + store % 1 to [ trivial ] % 11 : $ * Int64 <nl> + % 13 = integer_literal $ Builtin . Word , 1 / / Index : 1 <nl> + % 14 = index_addr % 11 : $ * Int64 , % 13 : $ Builtin . Word <nl> + store % 3 to [ trivial ] % 14 : $ * Int64 <nl> + % 16 = integer_literal $ Builtin . Word , 2 / / Index : 2 <nl> + % 17 = index_addr % 11 : $ * Int64 , % 16 : $ Builtin . Word <nl> + store % 5 to [ trivial ] % 17 : $ * Int64 <nl> + return % 9 : $ Array < Int64 > <nl> + } <nl> + <nl> + sil [ ossa ] @ interpretArrayLiteral : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > { <nl> + bb0 : <nl> + % 7 = function_ref @ initializeArrayWithLiterals : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > <nl> + % 8 = apply % 7 ( ) : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > <nl> + return % 8 : $ Array < Int64 > <nl> + } / / CHECK : Returns Array < Int64 > <nl> + / / CHECK : size : 3 <nl> + / / CHECK : agg : 1 elt : int : 11 <nl> + / / CHECK : agg : 1 elt : int : 12 <nl> + / / CHECK : agg : 1 elt : int : 14 <nl> + <nl> + / / Array . append ( _ : ) <nl> + sil [ serialized ] [ _semantics " array . append_element " ] @ $ sSa6appendyyxnF : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + <nl> + sil [ ossa ] @ interpretArrayAppend : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > { <nl> + % 0 = integer_literal $ Builtin . Int64 , 71 <nl> + % 1 = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + % 2 = alloc_stack $ Array < Int64 > , var , name " a " <nl> + % 3 = metatype $ @ thin Array < Int64 > . Type <nl> + / / function_ref Array . init ( ) <nl> + % 4 = function_ref @ $ sS2ayxGycfC : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + % 5 = apply % 4 < Int64 > ( % 3 ) : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + store % 5 to [ init ] % 2 : $ * Array < Int64 > <nl> + % 10 = alloc_stack $ Int64 <nl> + store % 1 to [ trivial ] % 10 : $ * Int64 <nl> + / / function_ref Array . append ( _ : ) <nl> + % 13 = function_ref @ $ sSa6appendyyxnF : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + % 14 = apply % 13 < Int64 > ( % 10 , % 2 ) : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + dealloc_stack % 10 : $ * Int64 <nl> + % 18 = load [ copy ] % 2 : $ * Array < Int64 > <nl> + destroy_addr % 2 : $ * Array < Int64 > <nl> + dealloc_stack % 2 : $ * Array < Int64 > <nl> + return % 18 : $ Array < Int64 > <nl> + } / / CHECK : Returns Array < Int64 > <nl> + / / CHECK : size : 1 <nl> + / / CHECK : agg : 1 elt : int : 71 <nl> + <nl> + sil [ ossa ] @ interpretArrayAppendNonEmpty : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > { <nl> + bb0 : <nl> + % 0 = integer_literal $ Builtin . Int64 , 100 <nl> + % 1 = struct $ Int64 ( % 0 : $ Builtin . Int64 ) <nl> + % 2 = alloc_stack $ Array < Int64 > , var , name " a " <nl> + % 3 = metatype $ @ thin Array < Int64 > . Type <nl> + % 4 = function_ref @ initializeArrayWithLiterals : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > <nl> + % 5 = apply % 4 ( ) : $ @ convention ( thin ) ( ) - > @ owned Array < Int64 > <nl> + store % 5 to [ init ] % 2 : $ * Array < Int64 > <nl> + % 10 = alloc_stack $ Int64 <nl> + store % 1 to [ trivial ] % 10 : $ * Int64 <nl> + / / function_ref Array . append ( _ : ) <nl> + % 13 = function_ref @ $ sSa6appendyyxnF : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + % 14 = apply % 13 < Int64 > ( % 10 , % 2 ) : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + dealloc_stack % 10 : $ * Int64 <nl> + % 18 = load [ copy ] % 2 : $ * Array < Int64 > <nl> + destroy_addr % 2 : $ * Array < Int64 > <nl> + dealloc_stack % 2 : $ * Array < Int64 > <nl> + return % 18 : $ Array < Int64 > <nl> + } / / CHECK : Returns Array < Int64 > <nl> + / / CHECK : size : 4 <nl> + / / CHECK : agg : 1 elt : int : 11 <nl> + / / CHECK : agg : 1 elt : int : 12 <nl> + / / CHECK : agg : 1 elt : int : 14 <nl> + / / CHECK : agg : 1 elt : int : 100 <nl> + <nl> + / / / Test appending of a static string to an array . The construction of a static <nl> + / / / string is a bit complicated due to the use of instructions like " ptrtoint " . <nl> + / / / This tests that array append works with such complex constant values as well . <nl> + sil @ interpretArrayAppendStaticString : $ @ convention ( thin ) ( ) - > @ owned Array < StaticString > { <nl> + % 0 = string_literal utf8 " constant " / / string to be appended . <nl> + <nl> + / / Initialize an empty array <nl> + % 2 = alloc_stack $ Array < StaticString > , var , name " a " <nl> + % 3 = metatype $ @ thin Array < StaticString > . Type <nl> + / / function_ref Array . init ( ) <nl> + % 4 = function_ref @ $ sS2ayxGycfC : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + % 5 = apply % 4 < StaticString > ( % 3 ) : $ @ convention ( method ) < τ_0_0 > ( @ thin Array < τ_0_0 > . Type ) - > @ owned Array < τ_0_0 > <nl> + store % 5 to % 2 : $ * Array < StaticString > <nl> + <nl> + / / Initialize a static string . <nl> + % 6 = integer_literal $ Builtin . Word , 8 <nl> + % 7 = builtin " ptrtoint_Word " ( % 0 : $ Builtin . RawPointer ) : $ Builtin . Word <nl> + % 8 = integer_literal $ Builtin . Int8 , 2 <nl> + % 9 = struct $ StaticString ( % 7 : $ Builtin . Word , % 6 : $ Builtin . Word , % 8 : $ Builtin . Int8 ) <nl> + <nl> + % 10 = alloc_stack $ StaticString <nl> + store % 9 to % 10 : $ * StaticString <nl> + / / function_ref Array . append ( _ : ) <nl> + % 13 = function_ref @ $ sSa6appendyyxnF : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + % 14 = apply % 13 < StaticString > ( % 10 , % 2 ) : $ @ convention ( method ) < τ_0_0 > ( @ in τ_0_0 , @ inout Array < τ_0_0 > ) - > ( ) <nl> + dealloc_stack % 10 : $ * StaticString <nl> + % 18 = load % 2 : $ * Array < StaticString > <nl> + destroy_addr % 2 : $ * Array < StaticString > <nl> + dealloc_stack % 2 : $ * Array < StaticString > <nl> + return % 18 : $ Array < StaticString > <nl> + } / / CHECK : Returns Array < StaticString > <nl> + / / CHECK : size : 1 <nl> + / / CHECK : string : " constant " <nl> + <nl> mmm a / test / SILOptimizer / pound_assert . swift <nl> ppp b / test / SILOptimizer / pound_assert . swift <nl> func evaluate < T > ( addressOnlyEnum : AddressOnlyEnum < T > ) - > Int { <nl> <nl> # assert ( evaluate ( addressOnlyEnum : . double ( IntContainer ( value : 1 ) ) ) = = 2 ) <nl> # assert ( evaluate ( addressOnlyEnum : . triple ( IntContainer ( value : 1 ) ) ) = = 3 ) <nl> + <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / Arrays <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + / / When the const - evaluator evaluates this struct , it forces evaluation of the <nl> + / / ` arr ` value . <nl> + struct ContainsArray { <nl> + let x : Int <nl> + let arr : [ Int ] <nl> + } <nl> + <nl> + func arrayInitEmptyTopLevel ( ) { <nl> + let c = ContainsArray ( x : 1 , arr : Array ( ) ) <nl> + # assert ( c . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitEmptyLiteralTopLevel ( ) { <nl> + / / TODO : More work necessary for array initialization using literals to work <nl> + / / at the top level . <nl> + / / expected - note @ + 1 { { cannot evaluate expression as constant here } } <nl> + let c = ContainsArray ( x : 1 , arr : [ ] ) <nl> + / / expected - error @ + 1 { { # assert condition not constant } } <nl> + # assert ( c . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitLiteral ( ) { <nl> + / / TODO : More work necessary for array initialization using literals to work <nl> + / / at the top level . <nl> + / / expected - note @ + 1 { { cannot evaluate expression as constant here } } <nl> + let c = ContainsArray ( x : 1 , arr : [ 2 , 3 , 4 ] ) <nl> + / / expected - error @ + 1 { { # assert condition not constant } } <nl> + # assert ( c . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitNonConstantElementTopLevel ( x : Int ) { <nl> + / / expected - note @ + 1 { { cannot evaluate expression as constant here } } <nl> + let c = ContainsArray ( x : 1 , arr : [ x ] ) <nl> + / / expected - error @ + 1 { { # assert condition not constant } } <nl> + # assert ( c . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitEmptyFlowSensitive ( ) - > ContainsArray { <nl> + return ContainsArray ( x : 1 , arr : Array ( ) ) <nl> + } <nl> + <nl> + func invokeArrayInitEmptyFlowSensitive ( ) { <nl> + # assert ( arrayInitEmptyFlowSensitive ( ) . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitEmptyLiteralFlowSensitive ( ) - > ContainsArray { <nl> + return ContainsArray ( x : 1 , arr : [ ] ) <nl> + } <nl> + <nl> + func invokeArrayInitEmptyLiteralFlowSensitive ( ) { <nl> + # assert ( arrayInitEmptyLiteralFlowSensitive ( ) . x = = 1 ) <nl> + } <nl> + <nl> + func arrayInitLiteralFlowSensitive ( ) - > ContainsArray { <nl> + return ContainsArray ( x : 1 , arr : [ 2 , 3 , 4 ] ) <nl> + } <nl> + <nl> + func invokeArrayInitLiteralFlowSensitive ( ) { <nl> + # assert ( arrayInitLiteralFlowSensitive ( ) . x = = 1 ) <nl> + } <nl>
[ Constant Evaluator ] Support Array constants in the constant
apple/swift
874bb1df259607ffe3f03e294a41e7b6048738ed
2019-09-24T22:18:14Z
mmm a / Documentation / Books / HTTP / Agency / README . md <nl> ppp b / Documentation / Books / HTTP / Agency / README . md <nl> <nl> HTTP Interface for Agency feature <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> - # # # Configuration <nl> + The Agency is the ArangoDB component which manages the entire ArangoDB cluster . <nl> + ArangoDB itself mainly uses the Agency as a central place to store the configuration <nl> + and the cluster nodes health management . It implements the Raft concensus protocol to act as <nl> + the single - source of truth for the entire cluster . You may know other software providing similar functionality e . g . _Apache Zookeeper_ , _etcd_ or _Consul_ . <nl> <nl> - At all times , i . e . regardless of the state of the agents and the current health of the RAFT consensus , one can invoke the configuration API : <nl> + To an end - user the Agency is essentially a fault - tolerant Key - Value Store with a simple REST - API . <nl> + It is possible to use the Agency API for a variety of use - cases , for example : <nl> <nl> - curl http : / / $ SERVER : $ PORT / _api / agency / config <nl> + - Centralized configuration repository <nl> + - Service discovery registry <nl> + - Distributed synchronization service <nl> + - Distributed Lock - Manager <nl> <nl> - Here , and in all subsequent calls , we assume that ` $ SERVER ` is <nl> - replaced by the server name and ` $ PORT ` is replaced by the port <nl> - number . We use ` curl ` throughout for the examples , but any client <nl> - library performing HTTP requests should do . <nl> - The output might look somewhat like this <nl> + * Note 1 * : To access the Agency API with authentication enabled , you need to include an authorization header <nl> + with every request . The authorization header _must_ contain a * superuser JWT Token * ; For more information see the [ authentication section ] ( . . / General / README . md # Authentication ) . <nl> <nl> - ` ` ` js <nl> - { <nl> - " term " : 1 , <nl> - " leaderId " : " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " , <nl> - " lastCommitted " : 1 , <nl> - " lastAcked " : { <nl> - " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " : 0 . 21 , <nl> - " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " : 0 . 21 , <nl> - " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " : 0 <nl> - } , <nl> - " configuration " : { <nl> - " pool " : { <nl> - " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " : " tcp : / / localhost : 8531 " , <nl> - " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " : " tcp : / / localhost : 8530 " , <nl> - " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " : " tcp : / / localhost : 8529 " <nl> - } , <nl> - " active " : [ <nl> - " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " , <nl> - " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " , <nl> - " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " <nl> - ] , <nl> - " id " : " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " , <nl> - " agency size " : 3 , <nl> - " pool size " : 3 , <nl> - " endpoint " : " tcp : / / localhost : 8529 " , <nl> - " min ping " : 0 . 5 , <nl> - " max ping " : 2 . 5 , <nl> - " supervision " : false , <nl> - " supervision frequency " : 5 , <nl> - " compaction step size " : 1000 , <nl> - " supervision grace period " : 120 <nl> - } <nl> - } <nl> - ` ` ` <nl> - <nl> - This is the actual output of a healthy agency . The configuration of the agency is found in the ` configuration ` section as you might have guessed . It is populated by static information on the startup parameters like ` agency size ` , the once generated ` unique id ` etc . It holds information on the invariants of the RAFT algorithm and data compaction . <nl> - <nl> - The remaining data reflect the variant entities in RAFT , as ` term ` and ` leaderId ` , also some debug information on how long the last leadership vote was received from any particular agency member . Low term numbers on a healthy network are an indication of good operation environemnt , while often increasing term numbers indicate , that the network environemnt and stability suggest to raise the RAFT parameters ` min ping ` and ' max ping ' accordingly . <nl> + * Note 2 * : The key - prefix ` / arango ` contains ArangoDBs internal configuration . You should _never_ change any values below the _arango_ key . <nl> <nl> # # # Key - Value store APIs <nl> <nl> - Generally , all document IO to and from the key - value store consists of JSON arrays . The outer Array is an envelope for multiple read or write transactions . The results are arrays are an envelope around the results corresponding to the order of the incoming transactions . <nl> + Generally , all document IO to and from the key - value store consists of JSON arrays . The outer array is an envelope for multiple read or write transactions . The results are arrays are an envelope around the results corresponding to the order of the incoming transactions . <nl> <nl> Consider the following write operation into a prestine agency : <nl> <nl> curl - L http : / / $ SERVER : $ PORT / _api / agency / read - d ' [ [ " / " ] ] ' <nl> ] <nl> ` ` ` <nl> <nl> - In the first step we commited a single transaction that commits the JSON document inside the inner transaction array to the agency . The result is ` [ 1 ] ` , which is the replicated log index . Repeated invocation will yield growing log numbers 2 , 3 , 4 , etc . <nl> + In the first step we committed a single transaction that commits the JSON document inside the inner transaction array to the agency . The result is ` [ 1 ] ` , which is the replicated log index . Repeated invocation will yield growing log numbers 2 , 3 , 4 , etc . <nl> <nl> - The read access is a complete access to the key - value store indicated by access to it ' s root element and returns the result as an array corresponding to the outermost array in the read transaction . <nl> + The read access is a complete access to the key - value store indicated by access to its root element and returns the result as an array corresponding to the outermost array in the read transaction . <nl> <nl> Let ' s dig in some deeper . <nl> <nl> The notifying POST requests are submitted immediately with any complete array of <nl> " / constants / euler " : { " op " : " create " , " new " : 2 . 718281828459046 } , <nl> " / constants / pi " : { " op " : " delete " } } } <nl> ` ` ` <nl> + <nl> + # # # Configuration <nl> + <nl> + At all times , i . e . regardless of the state of the agents and the current health of the RAFT consensus , one can invoke the configuration API : <nl> + <nl> + curl http : / / $ SERVER : $ PORT / _api / agency / config <nl> + <nl> + Here , and in all subsequent calls , we assume that ` $ SERVER ` is <nl> + replaced by the server name and ` $ PORT ` is replaced by the port <nl> + number . We use ` curl ` throughout for the examples , but any client <nl> + library performing HTTP requests should do . <nl> + The output might look somewhat like this <nl> + <nl> + ` ` ` js <nl> + { <nl> + " term " : 1 , <nl> + " leaderId " : " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " , <nl> + " lastCommitted " : 1 , <nl> + " lastAcked " : { <nl> + " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " : 0 . 21 , <nl> + " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " : 0 . 21 , <nl> + " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " : 0 <nl> + } , <nl> + " configuration " : { <nl> + " pool " : { <nl> + " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " : " tcp : / / localhost : 8531 " , <nl> + " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " : " tcp : / / localhost : 8530 " , <nl> + " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " : " tcp : / / localhost : 8529 " <nl> + } , <nl> + " active " : [ <nl> + " ac129027 - b440 - 4c4f - 84e9 - 75c042942171 " , <nl> + " c54dbb8a - 723d - 4c82 - 98de - 8c841a14a112 " , <nl> + " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " <nl> + ] , <nl> + " id " : " f5d11cde - 8468 - 4fd2 - 8747 - b4ef5c7dfa98 " , <nl> + " agency size " : 3 , <nl> + " pool size " : 3 , <nl> + " endpoint " : " tcp : / / localhost : 8529 " , <nl> + " min ping " : 0 . 5 , <nl> + " max ping " : 2 . 5 , <nl> + " supervision " : false , <nl> + " supervision frequency " : 5 , <nl> + " compaction step size " : 1000 , <nl> + " supervision grace period " : 120 <nl> + } <nl> + } <nl> + ` ` ` <nl> + <nl> + This is the actual output of a healthy agency . The configuration of the agency is found in the ` configuration ` section as you might have guessed . It is populated by static information on the startup parameters like ` agency size ` , the once generated ` unique id ` etc . It holds information on the invariants of the RAFT algorithm and data compaction . <nl> + <nl> + The remaining data reflect the variant entities in RAFT , as ` term ` and ` leaderId ` , also some debug information on how long the last leadership vote was received from any particular agency member . Low term numbers on a healthy network are an indication of good operation environemnt , while often increasing term numbers indicate , that the network environemnt and stability suggest to raise the RAFT parameters ` min ping ` and ' max ping ' accordingly . <nl> mmm a / Documentation / Books / HTTP / General / README . md <nl> ppp b / Documentation / Books / HTTP / General / README . md <nl> built - in dialog . <nl> <nl> # # # Authentication via JWT <nl> <nl> - To authenticate via JWT you must first obtain a JWT . To do so send a POST request to <nl> + ArangoDB uses a standard JWT based authentication method . <nl> + To authenticate via JWT you must first obtain a JWT token with a signature generated via HMAC with SHA - 256 . <nl> + The secret may either be set using ` - - server . jwt - secret ` or will be randomly generated upon server startup . <nl> <nl> - * / _open / auth * <nl> + For more information on JWT please consult RFC7519 and https : / / jwt . io <nl> + <nl> + # # # # User JWT - Token <nl> + <nl> + To authenticate with a specific user you need to supply a JWT token containing <nl> + the _preferred_username_ field with the username . <nl> + You can either let ArangoDB generate this token for you via an API call <nl> + or you can generate it yourself ( only if you know the JWT secret ) . <nl> + <nl> + ArangoDB offers a REST API to generate user tokens for you if you know the username and password . <nl> + To do so send a POST request to <nl> <nl> + * / _open / auth * <nl> containing * username * and * password * JSON - encoded like so : <nl> <nl> { " username " : " root " , " password " : " rootPassword " } <nl> <nl> - Upon success the endpoint will return a 200 OK and an answer containing the JWT in a JSON - <nl> + Upon success the endpoint will return a * * 200 OK * * and an answer containing the JWT in a JSON - <nl> encoded object like so : <nl> <nl> ` ` ` <nl> This JWT should then be used within the Authorization HTTP header in subsequent <nl> Authorization : bearer eyJhbGciOiJIUzI1NiI . . x6EfI <nl> ` ` ` <nl> <nl> - Please note that the JWT will expire after 1 month and needs to be updated . <nl> + Please note that the JWT will expire after 1 month and needs to be updated . We encode the expiration <nl> + date of the JWT token in the _exp_ field in unix time . <nl> + Please note that all JWT tokens must contain the _iss_ field with string value ` arangodb ` . <nl> + As an example the decoded JWT body would look like this : <nl> + <nl> + ` ` ` json <nl> + { <nl> + " exp " : 1540381557 , <nl> + " iat " : 1537789 . 55727901 , <nl> + " iss " : " arangodb " , <nl> + " preferred_username " : " root " <nl> + } <nl> + ` ` ` <nl> + <nl> + # # # # Superuser JWT - Token <nl> <nl> - ArangoDB uses a standard JWT authentication . The secret may either be set using <nl> - ` - - server . jwt - secret ` or will be randomly generated upon server startup . <nl> + To access specific internal APIs as well as Agency and DBServer instances a token generated via ` / open / auth ` is not <nl> + good enough . For these special APIs you will need to generate a special JWT token which grants superuser access . <nl> + Note that using superuser access for normal database operations is * * NOT advised * * . <nl> <nl> - For more information on JWT please consult RFC7519 and https : / / jwt . io <nl> + _Note_ : It is only possible to generate this JWT token with the knowledge of the JWT secret . <nl> + <nl> + For your convenience it is possible to generate this token via the [ ArangoDB starter CLI ] ( . . / . . / Manual / Programs / Starter / Security . html # using - authentication - tokens ) . <nl> + <nl> + Should you whish to generate the JWT token yourself with a tool of your choice , you need to include the correct body . <nl> + The body must contain the _iss_ field with string value ` arangodb ` and the ` server_id ` field with an arbirtrary string identifier : <nl> + <nl> + ` ` ` json <nl> + { <nl> + " exp " : 1537900279 , <nl> + " iat " : 1537800279 , <nl> + " iss " : " arangodb " , <nl> + " server_id " : " myclient " <nl> + } <nl> + ` ` ` <nl> + <nl> + For example to generate a token via the [ jwtgen tool ] ( https : / / www . npmjs . com / package / jwtgen ) ( note the lifetime of one hour ) : <nl> + <nl> + ` ` ` <nl> + jwtgen - s < my - secret > - e 3600 - v - a " HS256 " - c ' iss = arangodb ' - c ' server_id = myclient ' <nl> + curl - v - H " Authorization : bearer $ ( jwtgen - s < my - secret > - e 3600 - a " HS256 " - c ' iss = arangodb ' - c ' server_id = myclient ' ) " http : / / < database - ip > : 8529 / _api / version <nl> + ` ` ` <nl> <nl> Error Handling <nl> mmmmmmmmmmmm - - <nl> mmm a / arangod / Agency / Supervision . cpp <nl> ppp b / arangod / Agency / Supervision . cpp <nl> void Supervision : : shrinkCluster ( ) { <nl> / / Get servers from plan <nl> auto availServers = Job : : availableServers ( _snapshot ) ; <nl> <nl> + / / set by external service like Kubernetes / Starter / DCOS <nl> size_t targetNumDBServers ; <nl> std : : string const NDBServers ( " / Target / NumberOfDBServers " ) ; <nl> <nl> mmm a / arangod / Auth / TokenCache . cpp <nl> ppp b / arangod / Auth / TokenCache . cpp <nl> auth : : TokenCache : : Entry auth : : TokenCache : : validateJwtBody ( <nl> auth : : TokenCache : : Entry authResult ( " " , false , 0 ) ; <nl> if ( bodySlice . hasKey ( " preferred_username " ) ) { <nl> VPackSlice const usernameSlice = bodySlice . get ( " preferred_username " ) ; <nl> - if ( ! usernameSlice . isString ( ) ) { <nl> + if ( ! usernameSlice . isString ( ) | | usernameSlice . getStringLength ( ) = = 0 ) { <nl> return auth : : TokenCache : : Entry : : Unauthenticated ( ) ; <nl> } <nl> authResult . _username = usernameSlice . copyString ( ) ; <nl> + if ( _userManager = = nullptr | | ! _userManager - > userExists ( authResult . _username ) ) { <nl> + return auth : : TokenCache : : Entry : : Unauthenticated ( ) ; <nl> + } <nl> } else if ( bodySlice . hasKey ( " server_id " ) ) { <nl> / / mop : hmm . . . nothing to do here : D <nl> } else { <nl> mmm a / arangod / Auth / UserManager . cpp <nl> ppp b / arangod / Auth / UserManager . cpp <nl> Result auth : : UserManager : : accessUser ( std : : string const & user , <nl> return TRI_ERROR_USER_NOT_FOUND ; <nl> } <nl> <nl> + bool auth : : UserManager : : userExists ( std : : string const & user ) { <nl> + if ( user . empty ( ) ) { <nl> + return false ; <nl> + } <nl> + loadFromDB ( ) ; <nl> + <nl> + READ_LOCKER ( readGuard , _userCacheLock ) ; <nl> + UserMap : : iterator const & it = _userCache . find ( user ) ; <nl> + return it ! = _userCache . end ( ) ; <nl> + } <nl> + <nl> VPackBuilder auth : : UserManager : : serializeUser ( std : : string const & user ) { <nl> loadFromDB ( ) ; <nl> <nl> mmm a / arangod / Auth / UserManager . h <nl> ppp b / arangod / Auth / UserManager . h <nl> class UserManager { <nl> / / / Access user without modifying it <nl> Result accessUser ( std : : string const & user , ConstUserCallback & & ) ; <nl> <nl> + / / / @ brief does this user exists in the db <nl> + bool userExists ( std : : string const & user ) ; <nl> / / / Serialize user into legacy format for REST API <nl> velocypack : : Builder serializeUser ( std : : string const & user ) ; <nl> Result removeUser ( std : : string const & user ) ; <nl> mmm a / arangod / GeneralServer / GeneralCommTask . cpp <nl> ppp b / arangod / GeneralServer / GeneralCommTask . cpp <nl> bool resolveRequestContext ( GeneralRequest & req ) { <nl> <nl> TRI_ASSERT ( ! vocbase - > isDangling ( ) ) ; <nl> <nl> + std : : unique_ptr < VocbaseContext > guard ( VocbaseContext : : create ( req , * vocbase ) ) ; <nl> + if ( ! guard ) { <nl> + return false ; <nl> + } <nl> + <nl> / / the vocbase context is now responsible for releasing the vocbase <nl> - req . setRequestContext ( VocbaseContext : : create ( req , * vocbase ) , true ) ; <nl> + req . setRequestContext ( guard . get ( ) , true ) ; <nl> + guard . release ( ) ; <nl> <nl> / / the " true " means the request is the owner of the context <nl> return true ; <nl> mmm a / arangod / RestServer / VocbaseContext . cpp <nl> ppp b / arangod / RestServer / VocbaseContext . cpp <nl> VocbaseContext * VocbaseContext : : create ( GeneralRequest & req , TRI_vocbase_t & vocba <nl> } <nl> <nl> auth : : UserManager * um = auth - > userManager ( ) ; <nl> - TRI_ASSERT ( um ! = nullptr ) ; <nl> + if ( um = = nullptr ) { <nl> + LOG_TOPIC ( WARN , Logger : : AUTHENTICATION ) < < " users are not supported on this server " ; <nl> + return nullptr ; <nl> + } <nl> <nl> auth : : Level dbLvl = um - > databaseAuthLevel ( req . user ( ) , req . databaseName ( ) ) ; <nl> auth : : Level sysLvl = dbLvl ; <nl> new file mode 100644 <nl> index 00000000000 . . e23a46b47e0 <nl> mmm / dev / null <nl> ppp b / tests / js / client / authentication / auth - cluster . js <nl> <nl> + / * jshint globalstrict : false , strict : false * / <nl> + / * global fail , assertTrue * / <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test the authentication for cluster nodes <nl> + / / / <nl> + / / / @ file <nl> + / / / <nl> + / / / DISCLAIMER <nl> + / / / <nl> + / / / Copyright 2018 ArangoDB GmbH , Cologne , Germany <nl> + / / / <nl> + / / / Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + / / / you may not use this file except in compliance with the License . <nl> + / / / You may obtain a copy of the License at <nl> + / / / <nl> + / / / http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + / / / <nl> + / / / Unless required by applicable law or agreed to in writing , software <nl> + / / / distributed under the License is distributed on an " AS IS " BASIS , <nl> + / / / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + / / / See the License for the specific language governing permissions and <nl> + / / / limitations under the License . <nl> + / / / <nl> + / / / Copyright holder is triAGENS GmbH , Cologne , Germany <nl> + / / / <nl> + / / / @ author Simon Grätzer <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + const jsunity = require ( " jsunity " ) ; <nl> + const arango = require ( " @ arangodb " ) . arango ; <nl> + const db = require ( " internal " ) . db ; <nl> + const request = require ( ' @ arangodb / request ' ) ; <nl> + const crypto = require ( ' @ arangodb / crypto ' ) ; <nl> + const expect = require ( ' chai ' ) . expect ; <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief test suite <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + function AuthSuite ( ) { <nl> + ' use strict ' ; <nl> + var baseUrl = function ( endpoint ) { <nl> + return endpoint . replace ( / ^ tcp : / , ' http : ' ) . replace ( / ^ ssl : / , ' https : ' ) ; <nl> + } ; <nl> + <nl> + / / hardcoded in testsuite <nl> + const jwtSecret = ' haxxmann ' ; <nl> + / / const user = ' hackers @ arangodb . com ' ; <nl> + <nl> + / / supply " PRMR " or " AGNT " or " CRDN " <nl> + function getServersWithRole ( role ) { <nl> + var jwt = crypto . jwtEncode ( jwtSecret , { <nl> + " preferred_username " : " root " , <nl> + " iss " : " arangodb " , " exp " : Math . floor ( Date . now ( ) / 1000 ) + 3600 <nl> + } , ' HS256 ' ) ; <nl> + <nl> + var res = request . get ( { <nl> + url : baseUrl ( arango . getEndpoint ( ) ) + " / _admin / cluster / health " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 200 ) ; <nl> + expect ( res ) . to . have . property ( ' json ' ) ; <nl> + expect ( res . json ) . to . have . property ( ' Health ' ) ; <nl> + <nl> + return Object . keys ( res . json . Health ) . filter ( serverId = > { <nl> + return serverId . substr ( 0 , 4 ) = = = role ; <nl> + } ) . map ( serverId = > res . json . Health [ serverId ] ) ; <nl> + } <nl> + <nl> + return { <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief set up <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + setUp : function ( ) { <nl> + arango . reconnect ( arango . getEndpoint ( ) , db . _name ( ) , " root " , " " ) ; <nl> + / * <nl> + try { <nl> + users . remove ( user ) ; <nl> + } <nl> + catch ( err ) { <nl> + } * / <nl> + } , <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief tear down <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + tearDown : function ( ) { <nl> + / * try { <nl> + users . remove ( user ) ; <nl> + } <nl> + catch ( err ) { <nl> + } * / <nl> + } , <nl> + <nl> + testAccessUser : function ( ) { <nl> + const jwt = crypto . jwtEncode ( jwtSecret , { <nl> + " preferred_username " : " root " , <nl> + " iss " : " arangodb " , " exp " : Math . floor ( Date . now ( ) / 1000 ) + 3600 <nl> + } , ' HS256 ' ) ; <nl> + <nl> + let coordinators = getServersWithRole ( " CRDN " ) ; <nl> + expect ( coordinators ) . to . be . a ( ' array ' ) ; <nl> + expect ( coordinators . length ) . to . be . gt ( 0 ) ; <nl> + coordinators . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 200 ) ; <nl> + } ) ; <nl> + <nl> + let dbservers = getServersWithRole ( " PRMR " ) ; <nl> + expect ( dbservers ) . to . be . a ( ' array ' ) ; <nl> + expect ( dbservers . length ) . to . be . gt ( 0 ) ; <nl> + dbservers . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 401 ) ; <nl> + } ) ; <nl> + <nl> + let agencies = getServersWithRole ( " AGNT " ) ; <nl> + expect ( agencies ) . to . be . a ( ' array ' ) ; <nl> + expect ( agencies . length ) . to . be . gt ( 0 ) ; <nl> + agencies . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 401 ) ; <nl> + } ) ; <nl> + } , <nl> + <nl> + testAccessSuperuser : function ( ) { <nl> + const jwt = crypto . jwtEncode ( jwtSecret , { <nl> + " server_id " : " arangosh " , <nl> + " iss " : " arangodb " , " exp " : Math . floor ( Date . now ( ) / 1000 ) + 3600 <nl> + } , ' HS256 ' ) ; <nl> + <nl> + let coordinators = getServersWithRole ( " CRDN " ) ; <nl> + expect ( coordinators ) . to . be . a ( ' array ' ) ; <nl> + expect ( coordinators . length ) . to . be . gt ( 0 ) ; <nl> + coordinators . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 200 ) ; <nl> + } ) ; <nl> + <nl> + let dbservers = getServersWithRole ( " PRMR " ) ; <nl> + expect ( dbservers ) . to . be . a ( ' array ' ) ; <nl> + expect ( dbservers . length ) . to . be . gt ( 0 ) ; <nl> + dbservers . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 200 ) ; <nl> + } ) ; <nl> + <nl> + let agencies = getServersWithRole ( " AGNT " ) ; <nl> + expect ( agencies ) . to . be . a ( ' array ' ) ; <nl> + expect ( agencies . length ) . to . be . gt ( 0 ) ; <nl> + agencies . forEach ( cc = > { <nl> + expect ( cc ) . to . have . property ( ' Endpoint ' ) ; <nl> + var res = request . get ( { <nl> + url : baseUrl ( cc . Endpoint ) + " / _api / version " , <nl> + auth : { <nl> + bearer : jwt , <nl> + } <nl> + } ) ; <nl> + expect ( res ) . to . be . an . instanceof ( request . Response ) ; <nl> + expect ( res ) . to . have . property ( ' statusCode ' , 200 ) ; <nl> + } ) ; <nl> + } <nl> + <nl> + } ; <nl> + } <nl> + <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief executes the test suite <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + jsunity . run ( AuthSuite ) ; <nl> + <nl> + return jsunity . done ( ) ; <nl> + <nl>
Fix crash on Agency / DBserver with user JWT tokens ( )
arangodb/arangodb
0a9afccde5a6f89b7c057b9d894353258dc04c93
2018-09-26T12:26:35Z
mmm a / xbmc / utils / GBMBufferObject . cpp <nl> ppp b / xbmc / utils / GBMBufferObject . cpp <nl> void CGBMBufferObject : : Register ( ) <nl> <nl> CGBMBufferObject : : CGBMBufferObject ( ) <nl> { <nl> - m_device = static_cast < CWinSystemGbmEGLContext * > ( CServiceBroker : : GetWinSystem ( ) ) - > GetGBMDevice ( ) ; <nl> + m_device = <nl> + static_cast < CWinSystemGbmEGLContext * > ( CServiceBroker : : GetWinSystem ( ) ) - > GetGBMDevice ( ) - > Get ( ) ; <nl> } <nl> <nl> CGBMBufferObject : : ~ CGBMBufferObject ( ) <nl> mmm a / xbmc / windowing / gbm / GBMUtils . cpp <nl> ppp b / xbmc / windowing / gbm / GBMUtils . cpp <nl> <nl> <nl> # include " utils / log . h " <nl> <nl> + # include < mutex > <nl> + <nl> using namespace KODI : : WINDOWING : : GBM ; <nl> <nl> - bool CGBMUtils : : CreateDevice ( int fd ) <nl> + namespace <nl> { <nl> - if ( m_device ) <nl> - CLog : : Log ( LOGWARNING , " CGBMUtils : : % s - device already created " , __FUNCTION__ ) ; <nl> + std : : once_flag flag ; <nl> + } <nl> <nl> - m_device = gbm_create_device ( fd ) ; <nl> - if ( ! m_device ) <nl> + bool CGBMUtils : : CreateDevice ( int fd ) <nl> + { <nl> + auto device = gbm_create_device ( fd ) ; <nl> + if ( ! device ) <nl> { <nl> - CLog : : Log ( LOGERROR , " CGBMUtils : : % s - failed to create device " , __FUNCTION__ ) ; <nl> + CLog : : Log ( LOGERROR , " CGBMUtils : : { } - failed to create device : { } " , __FUNCTION__ , <nl> + strerror ( errno ) ) ; <nl> return false ; <nl> } <nl> <nl> + m_device . reset ( new CGBMDevice ( device ) ) ; <nl> + <nl> return true ; <nl> } <nl> <nl> - void CGBMUtils : : DestroyDevice ( ) <nl> + CGBMUtils : : CGBMDevice : : CGBMDevice ( gbm_device * device ) : m_device ( device ) <nl> { <nl> - if ( ! m_device ) <nl> - CLog : : Log ( LOGWARNING , " CGBMUtils : : % s - device already destroyed " , __FUNCTION__ ) ; <nl> - <nl> - if ( m_device ) <nl> - { <nl> - gbm_device_destroy ( m_device ) ; <nl> - m_device = nullptr ; <nl> - } <nl> } <nl> <nl> - bool CGBMUtils : : CreateSurface ( int width , int height , uint32_t format , const uint64_t * modifiers , const int modifiers_count ) <nl> + bool CGBMUtils : : CGBMDevice : : CreateSurface ( <nl> + int width , int height , uint32_t format , const uint64_t * modifiers , const int modifiers_count ) <nl> { <nl> - if ( m_surface ) <nl> - CLog : : Log ( LOGWARNING , " CGBMUtils : : % s - surface already created " , __FUNCTION__ ) ; <nl> - <nl> + gbm_surface * surface { nullptr } ; <nl> # if defined ( HAS_GBM_MODIFIERS ) <nl> - m_surface = gbm_surface_create_with_modifiers ( m_device , <nl> - width , <nl> - height , <nl> - format , <nl> - modifiers , <nl> - modifiers_count ) ; <nl> + surface = gbm_surface_create_with_modifiers ( m_device , width , height , format , modifiers , <nl> + modifiers_count ) ; <nl> # endif <nl> - if ( ! m_surface ) <nl> + if ( ! surface ) <nl> { <nl> - m_surface = gbm_surface_create ( m_device , <nl> - width , <nl> - height , <nl> - format , <nl> - GBM_BO_USE_SCANOUT | GBM_BO_USE_RENDERING ) ; <nl> + surface = gbm_surface_create ( m_device , width , height , format , <nl> + GBM_BO_USE_SCANOUT | GBM_BO_USE_RENDERING ) ; <nl> } <nl> <nl> - if ( ! m_surface ) <nl> + if ( ! surface ) <nl> { <nl> - CLog : : Log ( LOGERROR , " CGBMUtils : : % s - failed to create surface " , __FUNCTION__ ) ; <nl> + CLog : : Log ( LOGERROR , " CGBMUtils : : { } - failed to create surface : { } " , __FUNCTION__ , <nl> + strerror ( errno ) ) ; <nl> return false ; <nl> } <nl> <nl> - CLog : : Log ( LOGDEBUG , " CGBMUtils : : % s - created surface with size % dx % d " , __FUNCTION__ , <nl> - width , <nl> - height ) ; <nl> + CLog : : Log ( LOGDEBUG , " CGBMUtils : : { } - created surface with size { } x { } " , __FUNCTION__ , width , <nl> + height ) ; <nl> + <nl> + m_surface . reset ( new CGBMSurface ( surface ) ) ; <nl> <nl> return true ; <nl> } <nl> <nl> - void CGBMUtils : : DestroySurface ( ) <nl> + CGBMUtils : : CGBMDevice : : CGBMSurface : : CGBMSurface ( gbm_surface * surface ) : m_surface ( surface ) <nl> { <nl> - if ( ! m_surface ) <nl> - CLog : : Log ( LOGWARNING , " CGBMUtils : : % s - surface already destroyed " , __FUNCTION__ ) ; <nl> + } <nl> <nl> - if ( m_surface ) <nl> - { <nl> - ReleaseBuffer ( ) ; <nl> + CGBMUtils : : CGBMDevice : : CGBMSurface : : CGBMSurfaceBuffer * CGBMUtils : : CGBMDevice : : CGBMSurface : : <nl> + LockFrontBuffer ( ) <nl> + { <nl> + m_buffers . emplace ( std : : make_unique < CGBMSurfaceBuffer > ( m_surface ) ) ; <nl> <nl> - gbm_surface_destroy ( m_surface ) ; <nl> - m_surface = nullptr ; <nl> + if ( ! static_cast < bool > ( gbm_surface_has_free_buffers ( m_surface ) ) ) <nl> + { <nl> + / * <nl> + * We want to use call_once here because we want it to be logged the first time that <nl> + * we have to release buffers . This means that the maximum amount of buffers had been reached . <nl> + * For mesa this should be 4 buffers but it may vary accross other implementations . <nl> + * / <nl> + std : : call_once ( <nl> + flag , [ this ] ( ) { CLog : : Log ( LOGDEBUG , " CGBMUtils - using { } buffers " , m_buffers . size ( ) ) ; } ) ; <nl> + <nl> + m_buffers . pop ( ) ; <nl> } <nl> + <nl> + return m_buffers . back ( ) . get ( ) ; <nl> } <nl> <nl> - struct gbm_bo * CGBMUtils : : LockFrontBuffer ( ) <nl> + CGBMUtils : : CGBMDevice : : CGBMSurface : : CGBMSurfaceBuffer : : CGBMSurfaceBuffer ( gbm_surface * surface ) <nl> + : m_surface ( surface ) , m_buffer ( gbm_surface_lock_front_buffer ( surface ) ) <nl> { <nl> - if ( m_next_bo ) <nl> - CLog : : Log ( LOGWARNING , " CGBMUtils : : % s - uneven surface buffer usage " , __FUNCTION__ ) ; <nl> - <nl> - m_next_bo = gbm_surface_lock_front_buffer ( m_surface ) ; <nl> - return m_next_bo ; <nl> } <nl> <nl> - void CGBMUtils : : ReleaseBuffer ( ) <nl> + CGBMUtils : : CGBMDevice : : CGBMSurface : : CGBMSurfaceBuffer : : ~ CGBMSurfaceBuffer ( ) <nl> { <nl> - if ( m_bo ) <nl> - gbm_surface_release_buffer ( m_surface , m_bo ) ; <nl> - <nl> - m_bo = m_next_bo ; <nl> - m_next_bo = nullptr ; <nl> + if ( m_surface & & m_buffer ) <nl> + gbm_surface_release_buffer ( m_surface , m_buffer ) ; <nl> } <nl> mmm a / xbmc / windowing / gbm / GBMUtils . h <nl> ppp b / xbmc / windowing / gbm / GBMUtils . h <nl> <nl> <nl> # pragma once <nl> <nl> + # include < memory > <nl> + # include < queue > <nl> + <nl> # include < gbm . h > <nl> <nl> namespace KODI <nl> namespace WINDOWING <nl> namespace GBM <nl> { <nl> <nl> + / * * <nl> + * @ brief A wrapper for gbm c classes to allow OOP and RAII . <nl> + * <nl> + * / <nl> class CGBMUtils <nl> { <nl> public : <nl> + CGBMUtils ( const CGBMUtils & ) = delete ; <nl> + CGBMUtils & operator = ( const CGBMUtils & ) = delete ; <nl> CGBMUtils ( ) = default ; <nl> ~ CGBMUtils ( ) = default ; <nl> + <nl> + / * * <nl> + * @ brief Create a gbm device for allocating buffers <nl> + * <nl> + * @ param fd The file descriptor for a backend device <nl> + * @ return true The device creation succeeded <nl> + * @ return false The device creation failed <nl> + * / <nl> bool CreateDevice ( int fd ) ; <nl> - void DestroyDevice ( ) ; <nl> - bool CreateSurface ( int width , int height , uint32_t format , const uint64_t * modifiers , const int modifiers_count ) ; <nl> - void DestroySurface ( ) ; <nl> - struct gbm_bo * LockFrontBuffer ( ) ; <nl> - void ReleaseBuffer ( ) ; <nl> - <nl> - struct gbm_device * GetDevice ( ) const { return m_device ; } <nl> - struct gbm_surface * GetSurface ( ) const { return m_surface ; } <nl> - <nl> - protected : <nl> - struct gbm_device * m_device = nullptr ; <nl> - struct gbm_surface * m_surface = nullptr ; <nl> - struct gbm_bo * m_bo = nullptr ; <nl> - struct gbm_bo * m_next_bo = nullptr ; <nl> + <nl> + / * * <nl> + * @ brief A wrapper for gbm_device to allow OOP and RAII <nl> + * <nl> + * / <nl> + class CGBMDevice <nl> + { <nl> + public : <nl> + CGBMDevice ( const CGBMDevice & ) = delete ; <nl> + CGBMDevice & operator = ( const CGBMDevice & ) = delete ; <nl> + explicit CGBMDevice ( gbm_device * device ) ; <nl> + ~ CGBMDevice ( ) = default ; <nl> + <nl> + / * * <nl> + * @ brief Create a gbm surface <nl> + * <nl> + * @ param width The width to use for the surface <nl> + * @ param height The height to use for the surface <nl> + * @ param format The format to use for the surface <nl> + * @ param modifiers The modifiers to use for the surface <nl> + * @ param modifiers_count The amount of modifiers in the modifiers param <nl> + * @ return true The surface creation succeeded <nl> + * @ return false The surface creation failed <nl> + * / <nl> + bool CreateSurface ( int width , <nl> + int height , <nl> + uint32_t format , <nl> + const uint64_t * modifiers , <nl> + const int modifiers_count ) ; <nl> + <nl> + / * * <nl> + * @ brief Get the underlying gbm_device <nl> + * <nl> + * @ return gbm_device * A pointer to the underlying gbm_device <nl> + * / <nl> + gbm_device * Get ( ) const { return m_device ; } <nl> + <nl> + / * * <nl> + * @ brief A wrapper for gbm_surface to allow OOP and RAII <nl> + * <nl> + * / <nl> + class CGBMSurface <nl> + { <nl> + public : <nl> + CGBMSurface ( const CGBMSurface & ) = delete ; <nl> + CGBMSurface & operator = ( const CGBMSurface & ) = delete ; <nl> + explicit CGBMSurface ( gbm_surface * surface ) ; <nl> + ~ CGBMSurface ( ) = default ; <nl> + <nl> + / * * <nl> + * @ brief Get the underlying gbm_surface <nl> + * <nl> + * @ return gbm_surface * A pointer to the underlying gbm_surface <nl> + * / <nl> + gbm_surface * Get ( ) const { return m_surface ; } <nl> + <nl> + / * * <nl> + * @ brief A wrapper for gbm_bo to allow OOP and RAII <nl> + * <nl> + * / <nl> + class CGBMSurfaceBuffer <nl> + { <nl> + public : <nl> + CGBMSurfaceBuffer ( const CGBMSurfaceBuffer & ) = delete ; <nl> + CGBMSurfaceBuffer & operator = ( const CGBMSurfaceBuffer & ) = delete ; <nl> + explicit CGBMSurfaceBuffer ( gbm_surface * surface ) ; <nl> + ~ CGBMSurfaceBuffer ( ) ; <nl> + <nl> + / * * <nl> + * @ brief Get the underlying gbm_bo <nl> + * <nl> + * @ return gbm_bo * A pointer to the underlying gbm_bo <nl> + * / <nl> + gbm_bo * Get ( ) const { return m_buffer ; } <nl> + <nl> + private : <nl> + gbm_surface * m_surface { nullptr } ; <nl> + gbm_bo * m_buffer { nullptr } ; <nl> + } ; <nl> + <nl> + / * * <nl> + * @ brief Lock the surface ' s current front buffer . <nl> + * <nl> + * @ return CGBMSurfaceBuffer * A pointer to a CGBMSurfaceBuffer object <nl> + * / <nl> + CGBMSurfaceBuffer * LockFrontBuffer ( ) ; <nl> + <nl> + private : <nl> + gbm_surface * m_surface { nullptr } ; <nl> + std : : queue < std : : unique_ptr < CGBMSurfaceBuffer > > m_buffers ; <nl> + } ; <nl> + <nl> + / * * <nl> + * @ brief Get the CGBMSurface object <nl> + * <nl> + * @ return CGBMSurface * A pointer to the CGBMSurface object <nl> + * / <nl> + CGBMDevice : : CGBMSurface * GetSurface ( ) const { return m_surface . get ( ) ; } <nl> + <nl> + private : <nl> + gbm_device * m_device { nullptr } ; <nl> + <nl> + struct CGBMSurfaceDeleter <nl> + { <nl> + void operator ( ) ( CGBMSurface * p ) const <nl> + { <nl> + if ( p ) <nl> + gbm_surface_destroy ( p - > Get ( ) ) ; <nl> + } <nl> + } ; <nl> + std : : unique_ptr < CGBMSurface , CGBMSurfaceDeleter > m_surface ; <nl> + } ; <nl> + <nl> + / * * <nl> + * @ brief Get the CGBMDevice object <nl> + * <nl> + * @ return CGBMDevice * A pointer to the CGBMDevice object <nl> + * / <nl> + CGBMUtils : : CGBMDevice * GetDevice ( ) const { return m_device . get ( ) ; } <nl> + <nl> + private : <nl> + struct CGBMDeviceDeleter <nl> + { <nl> + void operator ( ) ( CGBMDevice * p ) const <nl> + { <nl> + if ( p ) <nl> + gbm_device_destroy ( p - > Get ( ) ) ; <nl> + } <nl> + } ; <nl> + std : : unique_ptr < CGBMDevice , CGBMDeviceDeleter > m_device ; <nl> } ; <nl> <nl> } <nl> mmm a / xbmc / windowing / gbm / WinSystemGbm . cpp <nl> ppp b / xbmc / windowing / gbm / WinSystemGbm . cpp <nl> bool CWinSystemGbm : : InitWindowSystem ( ) <nl> <nl> bool CWinSystemGbm : : DestroyWindowSystem ( ) <nl> { <nl> - m_GBM - > DestroyDevice ( ) ; <nl> - <nl> CLog : : Log ( LOGDEBUG , " CWinSystemGbm : : % s - deinitialized DRM " , __FUNCTION__ ) ; <nl> <nl> m_libinput . reset ( ) ; <nl> bool CWinSystemGbm : : SetFullScreen ( bool fullScreen , RESOLUTION_INFO & res , bool bl <nl> <nl> if ( ! std : : dynamic_pointer_cast < CDRMAtomic > ( m_DRM ) ) <nl> { <nl> - bo = m_GBM - > LockFrontBuffer ( ) ; <nl> + bo = m_GBM - > GetDevice ( ) - > GetSurface ( ) - > LockFrontBuffer ( ) - > Get ( ) ; <nl> } <nl> <nl> auto result = m_DRM - > SetVideoMode ( res , bo ) ; <nl> <nl> - if ( ! std : : dynamic_pointer_cast < CDRMAtomic > ( m_DRM ) ) <nl> - { <nl> - m_GBM - > ReleaseBuffer ( ) ; <nl> - } <nl> - <nl> int delay = CServiceBroker : : GetSettingsComponent ( ) - > GetSettings ( ) - > GetInt ( " videoscreen . delayrefreshchange " ) ; <nl> if ( delay > 0 ) <nl> m_dispResetTimer . Set ( delay * 100 ) ; <nl> void CWinSystemGbm : : FlipPage ( bool rendered , bool videoLayer ) <nl> <nl> if ( rendered ) <nl> { <nl> - bo = m_GBM - > LockFrontBuffer ( ) ; <nl> + bo = m_GBM - > GetDevice ( ) - > GetSurface ( ) - > LockFrontBuffer ( ) - > Get ( ) ; <nl> } <nl> <nl> m_DRM - > FlipPage ( bo , rendered , videoLayer ) ; <nl> <nl> - if ( rendered ) <nl> - { <nl> - m_GBM - > ReleaseBuffer ( ) ; <nl> - } <nl> - <nl> if ( m_videoLayerBridge & & ! videoLayer ) <nl> { <nl> / / delete video layer bridge when video layer no longer is active <nl> mmm a / xbmc / windowing / gbm / WinSystemGbm . h <nl> ppp b / xbmc / windowing / gbm / WinSystemGbm . h <nl> class CWinSystemGbm : public CWinSystemBase <nl> std : : shared_ptr < CVideoLayerBridge > GetVideoLayerBridge ( ) const { return m_videoLayerBridge ; } ; <nl> void RegisterVideoLayerBridge ( std : : shared_ptr < CVideoLayerBridge > bridge ) { m_videoLayerBridge = bridge ; } ; <nl> <nl> - struct gbm_device * GetGBMDevice ( ) const { return m_GBM - > GetDevice ( ) ; } <nl> + CGBMUtils : : CGBMDevice * GetGBMDevice ( ) const { return m_GBM - > GetDevice ( ) ; } <nl> std : : shared_ptr < CDRMUtils > GetDrm ( ) const { return m_DRM ; } <nl> <nl> protected : <nl> mmm a / xbmc / windowing / gbm / WinSystemGbmEGLContext . cpp <nl> ppp b / xbmc / windowing / gbm / WinSystemGbmEGLContext . cpp <nl> bool CWinSystemGbmEGLContext : : InitWindowSystemEGL ( EGLint renderableType , EGLint <nl> return false ; <nl> } <nl> <nl> - if ( ! m_eglContext . CreatePlatformDisplay ( m_GBM - > GetDevice ( ) , m_GBM - > GetDevice ( ) ) ) <nl> + if ( ! m_eglContext . CreatePlatformDisplay ( m_GBM - > GetDevice ( ) - > Get ( ) , m_GBM - > GetDevice ( ) - > Get ( ) ) ) <nl> { <nl> return false ; <nl> } <nl> bool CWinSystemGbmEGLContext : : CreateNewWindow ( const std : : string & name , <nl> uint32_t format = m_eglContext . GetConfigAttrib ( EGL_NATIVE_VISUAL_ID ) ; <nl> std : : vector < uint64_t > * modifiers = m_DRM - > GetGuiPlaneModifiersForFormat ( format ) ; <nl> <nl> - if ( ! m_GBM - > CreateSurface ( res . iWidth , res . iHeight , format , modifiers - > data ( ) , modifiers - > size ( ) ) ) <nl> + if ( ! m_GBM - > GetDevice ( ) - > CreateSurface ( res . iWidth , res . iHeight , format , modifiers - > data ( ) , <nl> + modifiers - > size ( ) ) ) <nl> { <nl> CLog : : Log ( LOGERROR , " CWinSystemGbmEGLContext : : { } - failed to initialize GBM " , __FUNCTION__ ) ; <nl> return false ; <nl> bool CWinSystemGbmEGLContext : : CreateNewWindow ( const std : : string & name , <nl> / / This check + the reinterpret cast is for security reason , if the user has outdated platform header files which often is the case <nl> static_assert ( sizeof ( EGLNativeWindowType ) = = sizeof ( gbm_surface * ) , " Declaration specifier differs in size " ) ; <nl> <nl> - if ( ! m_eglContext . CreatePlatformSurface ( m_GBM - > GetSurface ( ) , reinterpret_cast < EGLNativeWindowType > ( m_GBM - > GetSurface ( ) ) ) ) <nl> + if ( ! m_eglContext . CreatePlatformSurface ( <nl> + m_GBM - > GetDevice ( ) - > GetSurface ( ) - > Get ( ) , <nl> + reinterpret_cast < EGLNativeWindowType > ( m_GBM - > GetDevice ( ) - > GetSurface ( ) - > Get ( ) ) ) ) <nl> { <nl> return false ; <nl> } <nl> bool CWinSystemGbmEGLContext : : CreateNewWindow ( const std : : string & name , <nl> bool CWinSystemGbmEGLContext : : DestroyWindow ( ) <nl> { <nl> m_eglContext . DestroySurface ( ) ; <nl> - m_GBM - > DestroySurface ( ) ; <nl> <nl> CLog : : Log ( LOGDEBUG , " CWinSystemGbmEGLContext : : { } - deinitialized GBM " , __FUNCTION__ ) ; <nl> return true ; <nl>
Merge pull request from lrusak / gbm - reorg
xbmc/xbmc
ef3ea1b27dbc27d4cce5e12946d98dae83c65ac2
2020-10-12T16:30:40Z
mmm a / arangod / Cluster / ClusterInfo . cpp <nl> ppp b / arangod / Cluster / ClusterInfo . cpp <nl> void ClusterInfo : : loadServers ( ) { <nl> < < " errorMessage : " < < result . errorMessage ( ) < < " body : " < < result . body ( ) ; <nl> } <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief Hand out copy of reboot ids <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + std : : unordered_map < ServerID , RebootId > ClusterInfo : : rebootIds ( ) const { <nl> + MUTEX_LOCKER ( mutexLocker , _serversProt . mutex ) ; <nl> + return _serversKnown . rebootIds ( ) ; <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief find the endpoint of a server from its ID . <nl> / / / If it is not found in the cache , the cache is reloaded once , if <nl> ClusterInfo : : ServersKnown : : serversKnown ( ) const noexcept { <nl> return _serversKnown ; <nl> } <nl> <nl> - std : : unordered_map < ServerID , RebootId > ClusterInfo : : ServersKnown : : rebootIds ( ) const noexcept { <nl> + std : : unordered_map < ServerID , RebootId > ClusterInfo : : ServersKnown : : rebootIds ( ) const { <nl> std : : unordered_map < ServerID , RebootId > rebootIds ; <nl> for ( auto const & it : _serversKnown ) { <nl> rebootIds . emplace ( it . first , it . second . rebootId ( ) ) ; <nl> mmm a / arangod / Cluster / ClusterInfo . h <nl> ppp b / arangod / Cluster / ClusterInfo . h <nl> class ClusterInfo final { <nl> public : <nl> explicit constexpr KnownServer ( RebootId rebootId ) : _rebootId ( rebootId ) { } <nl> <nl> - RebootId rebootId ( ) const noexcept { return _rebootId ; } <nl> + RebootId rebootId ( ) const { return _rebootId ; } <nl> <nl> private : <nl> RebootId _rebootId ; <nl> class ClusterInfo final { <nl> <nl> std : : unordered_map < ServerID , KnownServer > const & serversKnown ( ) const noexcept ; <nl> <nl> - std : : unordered_map < ServerID , RebootId > rebootIds ( ) const noexcept ; <nl> + std : : unordered_map < ServerID , RebootId > rebootIds ( ) const ; <nl> <nl> private : <nl> std : : unordered_map < ServerID , KnownServer > _serversKnown ; <nl> class ClusterInfo final { <nl> <nl> std : : unordered_map < ServerID , std : : string > getServerTimestamps ( ) ; <nl> <nl> + std : : unordered_map < ServerID , RebootId > rebootIds ( ) const ; <nl> + <nl> uint64_t getPlanVersion ( ) { <nl> READ_LOCKER ( guard , _planProt . lock ) ; <nl> return _planVersion ; <nl> class ClusterInfo final { <nl> <nl> struct ProtectionData { <nl> std : : atomic < bool > isValid ; <nl> - Mutex mutex ; <nl> + mutable Mutex mutex ; <nl> std : : atomic < uint64_t > wantedVersion ; <nl> std : : atomic < uint64_t > doneVersion ; <nl> arangodb : : basics : : ReadWriteLock lock ; <nl> mmm a / arangod / Cluster / ClusterMethods . cpp <nl> ppp b / arangod / Cluster / ClusterMethods . cpp <nl> <nl> / / / @ author Kaveh Vahedipour <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> + <nl> + # include " Cluster / ClusterTypes . h " <nl> # include " ClusterMethods . h " <nl> <nl> # include " Agency / TimeString . h " <nl> arangodb : : Result hotRestoreCoordinator ( VPackSlice const payload , VPackBuilder & r <nl> <nl> / / We keep the currently registered timestamps in Current / ServersRegistered , <nl> / / such that we can wait until all have reregistered and are up : <nl> - ci - > loadServers ( ) ; <nl> - std : : unordered_map < std : : string , std : : string > serverTimestamps = <nl> - ci - > getServerTimestamps ( ) ; <nl> + ci - > loadCurrentDBServers ( ) ; <nl> + auto const preServersKnown = ci - > rebootIds ( ) ; <nl> <nl> / / Restore all db servers <nl> std : : string previous ; <nl> arangodb : : Result hotRestoreCoordinator ( VPackSlice const payload , VPackBuilder & r <nl> return arangodb : : Result ( TRI_ERROR_HOT_RESTORE_INTERNAL , <nl> " Not all DBservers came back in time ! " ) ; <nl> } <nl> - ci - > loadServers ( ) ; <nl> - std : : unordered_map < std : : string , std : : string > newServerTimestamps = <nl> - ci - > getServerTimestamps ( ) ; <nl> + ci - > loadCurrentDBServers ( ) ; <nl> + auto const postServersKnown = ci - > rebootIds ( ) ; <nl> + if ( ci - > getCurrentDBServers ( ) . size ( ) < dbServers . size ( ) ) { <nl> + LOG_TOPIC ( " 8dce7 " , INFO , Logger : : BACKUP ) < < " Waiting for all db servers to return " ; <nl> + continue ; <nl> + } <nl> + <nl> / / Check timestamps of all dbservers : <nl> size_t good = 0 ; / / Count restarted servers <nl> for ( auto const & dbs : dbServers ) { <nl> - if ( serverTimestamps [ dbs ] ! = newServerTimestamps [ dbs ] ) { <nl> + if ( postServersKnown . at ( dbs ) ! = preServersKnown . at ( dbs ) ) { <nl> + + good ; <nl> } <nl> } <nl> mmm a / arangod / Cluster / ClusterTypes . h <nl> ppp b / arangod / Cluster / ClusterTypes . h <nl> <nl> <nl> # include < limits > <nl> # include < string > <nl> + # include < iostream > <nl> <nl> namespace arangodb { <nl> <nl> class RebootId { <nl> return RebootId { std : : numeric_limits < decltype ( _value ) > : : max ( ) } ; <nl> } <nl> <nl> + std : : ostream & print ( std : : ostream & o ) const { <nl> + o < < _value ; <nl> + return o ; <nl> + } <nl> + <nl> private : <nl> uint64_t _value ; <nl> } ; <nl> <nl> } / / namespace arangodb <nl> <nl> + inline std : : ostream & operator < < ( std : : ostream & o , arangodb : : RebootId const & r ) { <nl> + return r . print ( o ) ; <nl> + } <nl> + <nl> # endif / / ARANGOD_CLUSTER_CLUSTERTYPES_H <nl>
rebootIds instead of boot stamps ( )
arangodb/arangodb
dd10909dfc6d57ebb27753412935885fcaa733f0
2019-09-20T08:26:35Z
mmm a / drivers / gles2 / shader_compiler_gles2 . cpp <nl> ppp b / drivers / gles2 / shader_compiler_gles2 . cpp <nl> String ShaderCompilerGLES2 : : _dump_node_code ( SL : : Node * p_node , int p_level , Gener <nl> code + = " else \ n " ; <nl> code + = _dump_node_code ( cf_node - > blocks [ 1 ] , p_level + 1 , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> } <nl> - } else if ( cf_node - > flow_op = = SL : : FLOW_OP_SWITCH ) { <nl> - code + = _mktab ( p_level ) + " switch ( " + _dump_node_code ( cf_node - > expressions [ 0 ] , p_level , r_gen_code , p_actions , p_default_actions , p_assigning ) + " ) \ n " ; <nl> - code + = _dump_node_code ( cf_node - > blocks [ 0 ] , p_level + 1 , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> - } else if ( cf_node - > flow_op = = SL : : FLOW_OP_CASE ) { <nl> - code + = _mktab ( p_level ) + " case " + _dump_node_code ( cf_node - > expressions [ 0 ] , p_level , r_gen_code , p_actions , p_default_actions , p_assigning ) + " : \ n " ; <nl> - code + = _dump_node_code ( cf_node - > blocks [ 0 ] , p_level + 1 , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> - } else if ( cf_node - > flow_op = = SL : : FLOW_OP_DEFAULT ) { <nl> - code + = _mktab ( p_level ) + " default : \ n " ; <nl> - code + = _dump_node_code ( cf_node - > blocks [ 0 ] , p_level + 1 , r_gen_code , p_actions , p_default_actions , p_assigning ) ; <nl> } else if ( cf_node - > flow_op = = SL : : FLOW_OP_DO ) { <nl> code + = _mktab ( p_level ) ; <nl> code + = " do " ; <nl> mmm a / servers / visual / shader_language . cpp <nl> ppp b / servers / visual / shader_language . cpp <nl> Error ShaderLanguage : : _parse_block ( BlockNode * p_block , const Map < StringName , Bui <nl> _set_tkpos ( pos ) ; / / rollback <nl> } <nl> } else if ( tk . type = = TK_CF_SWITCH ) { <nl> + <nl> + if ( VisualServer : : get_singleton ( ) - > is_low_end ( ) ) { <nl> + _set_error ( " \ " switch \ " operator is supported only on high - end platform ! " ) ; <nl> + return ERR_PARSE_ERROR ; <nl> + } <nl> + <nl> / / switch ( ) { } <nl> tk = _get_token ( ) ; <nl> if ( tk . type ! = TK_PARENTHESIS_OPEN ) { <nl>
Removed switch operator from GLES2 shader back - end
godotengine/godot
6b7f8558d962bf67d4cc4bc1d539b925787fdc03
2019-11-02T09:43:32Z
mmm a / tools / export / blender25 / io_scene_dae / __init__ . py <nl> ppp b / tools / export / blender25 / io_scene_dae / __init__ . py <nl> class ExportDAE ( bpy . types . Operator , ExportHelper ) : <nl> description = " Export only objects on the active layers . " , <nl> default = True , <nl> ) <nl> - use_exclude_ctrl_bones = BoolProperty ( <nl> - name = " Exclude Control Bones " , <nl> - description = " Exclude skeleton bones with names that begin with ' ctrl ' . " , <nl> - default = True , <nl> - ) <nl> use_anim = BoolProperty ( <nl> name = " Export Animation " , <nl> description = " Export keyframe animation " , <nl>
Remove of use_exclude_ctrl_bones option
godotengine/godot
4210d5e4594fcf80bc121bccc916d411663d02bc
2015-09-18T09:52:34Z
mmm a / xbmc / GUIInfoManager . cpp <nl> ppp b / xbmc / GUIInfoManager . cpp <nl> int CGUIInfoManager : : TranslateSingleString ( const CStdString & strCondition ) <nl> CStdString platform = info [ 2 ] . name ; <nl> if ( platform = = " linux " ) return SYSTEM_PLATFORM_LINUX ; <nl> else if ( platform = = " windows " ) return SYSTEM_PLATFORM_WINDOWS ; <nl> - else if ( platform = = " osx " ) return SYSTEM_PLATFORM_OSX ; <nl> + else if ( platform = = " darwin " ) return SYSTEM_PLATFORM_DARWIN ; <nl> else if ( platform = = " osx " ) return SYSTEM_PLATFORM_DARWIN_OSX ; <nl> else if ( platform = = " ios " ) return SYSTEM_PLATFORM_DARWIN_IOS ; <nl> else if ( platform = = " atv2 " ) return SYSTEM_PLATFORM_DARWIN_ATV2 ; <nl> bool CGUIInfoManager : : GetBool ( int condition1 , int contextWindow , const CGUIListI <nl> # else <nl> bReturn = false ; <nl> # endif <nl> - else if ( condition = = SYSTEM_PLATFORM_OSX ) <nl> - / / TODO : rename SYSTEM_PLATFORM_OSX to SYSTEM_PLATFORM_DARWIN after eden release . <nl> + else if ( condition = = SYSTEM_PLATFORM_DARWIN ) <nl> # ifdef TARGET_DARWIN <nl> bReturn = true ; <nl> # else <nl> mmm a / xbmc / GUIInfoManager . h <nl> ppp b / xbmc / GUIInfoManager . h <nl> namespace INFO <nl> # define SYSTEM_PLATFORM_XBOX 740 <nl> # define SYSTEM_PLATFORM_LINUX 741 <nl> # define SYSTEM_PLATFORM_WINDOWS 742 <nl> - # define SYSTEM_PLATFORM_OSX 743 <nl> + # define SYSTEM_PLATFORM_DARWIN 743 <nl> # define SYSTEM_PLATFORM_DARWIN_OSX 744 <nl> # define SYSTEM_PLATFORM_DARWIN_IOS 745 <nl> # define SYSTEM_PLATFORM_DARWIN_ATV2 746 <nl>
Merge pull request from Memphiz / platformdarwin
xbmc/xbmc
cb4aaaa9171895399106bd9ac55ceb5549b1b33c
2012-06-01T07:20:09Z
mmm a / Marlin / planner . cpp <nl> ppp b / Marlin / planner . cpp <nl> void Planner : : check_axes_activity ( ) { <nl> * extruder - target extruder <nl> * / <nl> void Planner : : _buffer_line ( const float & a , const float & b , const float & c , const float & e , float fr_mm_s , const uint8_t extruder ) { <nl> - / / Calculate the buffer head after we push this byte <nl> - int next_buffer_head = next_block_index ( block_buffer_head ) ; <nl> - <nl> - / / If the buffer is full : good ! That means we are well ahead of the robot . <nl> - / / Rest here until there is room in the buffer . <nl> - while ( block_buffer_tail = = next_buffer_head ) idle ( ) ; <nl> <nl> / / The target position of the tool in absolute steps <nl> / / Calculate target position in absolute steps <nl> void Planner : : _buffer_line ( const float & a , const float & b , const float & c , const <nl> } <nl> # endif <nl> <nl> + / / Calculate the buffer head after we push this byte <nl> + int next_buffer_head = next_block_index ( block_buffer_head ) ; <nl> + <nl> + / / If the buffer is full : good ! That means we are well ahead of the robot . <nl> + / / Rest here until there is room in the buffer . <nl> + while ( block_buffer_tail = = next_buffer_head ) idle ( ) ; <nl> + <nl> / / Prepare to set up new block <nl> block_t * block = & block_buffer [ block_buffer_head ] ; <nl> <nl>
buffer_line can calculate while it ' s waiting
MarlinFirmware/Marlin
c7f22f688fa45298d8c24f9d480e2d8738b64591
2016-10-29T22:08:42Z
mmm a / tensorflow / g3doc / api_docs / python / contrib . distributions . md <nl> ppp b / tensorflow / g3doc / api_docs / python / contrib . distributions . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . from_params ( cls , make_safe = True , * * kwargs ) ` { # Distribution . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Distribution . get_batch_shape ( ) ` { # Distribution . get_batch_shape } <nl> Mode of the distribution . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Distribution . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . param_static_shapes ( cls , sample_shape ) ` { # Distribution . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Distribution . pdf ( value , name = ' pdf ' ) ` { # Distribution . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # Binomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Binomial . get_batch_shape ( ) ` { # Binomial . get_batch_shape } <nl> Name to prepend to all ops . <nl> Probability of success . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Binomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . param_static_shapes ( cls , sample_shape ) ` { # Binomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Binomial . pdf ( value , name = ' pdf ' ) ` { # Binomial . pdf } <nl> Entropy of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . from_params ( cls , make_safe = True , * * kwargs ) ` { # Bernoulli . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Bernoulli . get_batch_shape ( ) ` { # Bernoulli . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Bernoulli . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . param_static_shapes ( cls , sample_shape ) ` { # Bernoulli . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Bernoulli . pdf ( value , name = ' pdf ' ) ` { # Bernoulli . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . from_params ( cls , make_safe = True , * * kwargs ) ` { # Beta . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Beta . get_batch_shape ( ) ` { # Beta . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Beta . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . param_static_shapes ( cls , sample_shape ) ` { # Beta . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Beta . pdf ( value , name = ' pdf ' ) ` { # Beta . pdf } <nl> Cumulative distribution function . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . from_params ( cls , make_safe = True , * * kwargs ) ` { # Categorical . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Categorical . get_batch_shape ( ) ` { # Categorical . get_batch_shape } <nl> Mean of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Categorical . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . param_static_shapes ( cls , sample_shape ) ` { # Categorical . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Categorical . pdf ( value , name = ' pdf ' ) ` { # Categorical . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . from_params ( cls , make_safe = True , * * kwargs ) ` { # Chi2 . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Chi2 . get_batch_shape ( ) ` { # Chi2 . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Chi2 . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . param_static_shapes ( cls , sample_shape ) ` { # Chi2 . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Chi2 . pdf ( value , name = ' pdf ' ) ` { # Chi2 . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . from_params ( cls , make_safe = True , * * kwargs ) ` { # Exponential . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Exponential . get_batch_shape ( ) ` { # Exponential . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Exponential . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . param_static_shapes ( cls , sample_shape ) ` { # Exponential . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Exponential . pdf ( value , name = ' pdf ' ) ` { # Exponential . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . from_params ( cls , make_safe = True , * * kwargs ) ` { # Gamma . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Gamma . get_batch_shape ( ) ` { # Gamma . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> <nl> # # # # ` tf . contrib . distributions . Gamma . name ` { # Gamma . name } <nl> <nl> - Name to prepend to all ops . <nl> + Name to prepend to all ops . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Gamma . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . param_static_shapes ( cls , sample_shape ) ` { # Gamma . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> <nl> <nl> - - - <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . from_params ( cls , make_safe = True , * * kwargs ) ` { # InverseGamma . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . InverseGamma . get_batch_shape ( ) ` { # InverseGamma . get_batch_shape } <nl> The mode of an inverse gamma distribution is ` beta / ( alpha + 1 ) ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # InverseGamma . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . param_static_shapes ( cls , sample_shape ) ` { # InverseGamma . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . InverseGamma . pdf ( value , name = ' pdf ' ) ` { # InverseGamma . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . from_params ( cls , make_safe = True , * * kwargs ) ` { # Laplace . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Laplace . get_batch_shape ( ) ` { # Laplace . get_batch_shape } <nl> Mode of this distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Laplace . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . param_static_shapes ( cls , sample_shape ) ` { # Laplace . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Laplace . pdf ( value , name = ' pdf ' ) ` { # Laplace . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . from_params ( cls , make_safe = True , * * kwargs ) ` { # Normal . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Normal . get_batch_shape ( ) ` { # Normal . get_batch_shape } <nl> Distribution parameter for the mean . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Normal . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . param_static_shapes ( cls , sample_shape ) ` { # Normal . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Normal . pdf ( value , name = ' pdf ' ) ` { # Normal . pdf } <nl> The entropy of Student t distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . from_params ( cls , make_safe = True , * * kwargs ) ` { # StudentT . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . StudentT . get_batch_shape ( ) ` { # StudentT . get_batch_shape } <nl> Locations of these Student ' s t distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # StudentT . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . param_static_shapes ( cls , sample_shape ) ` { # StudentT . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . StudentT . pdf ( value , name = ' pdf ' ) ` { # StudentT . pdf } <nl> The entropy of Uniform distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . from_params ( cls , make_safe = True , * * kwargs ) ` { # Uniform . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Uniform . get_batch_shape ( ) ` { # Uniform . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Uniform . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . param_static_shapes ( cls , sample_shape ) ` { # Uniform . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Uniform . pdf ( value , name = ' pdf ' ) ` { # Uniform . pdf } <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalDiag . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiag . get_batch_shape ( ) ` { # MultivariateNormalDiag . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalDiag . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalDiag . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiag . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalDiag . pdf } <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalFull . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalFull . get_batch_shape ( ) ` { # MultivariateNormalFull . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalFull . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalFull . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalFull . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalFull . pdf } <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalCholesky . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . get_batch_shape ( ) ` { # MultivariateNormalCholesky . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalCholesky . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalCholesky . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalCholesky . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . from_params ( cls , make_safe = True , * * kwargs ) ` { # Dirichlet . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Dirichlet . get_batch_shape ( ) ` { # Dirichlet . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Dirichlet . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . param_static_shapes ( cls , sample_shape ) ` { # Dirichlet . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Dirichlet . pdf ( value , name = ' pdf ' ) ` { # Dirichlet . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # DirichletMultinomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . DirichletMultinomial . get_batch_shape ( ) ` { # DirichletMultinomial . get_batch_shape } <nl> Parameter defining this distribution . <nl> <nl> # # # # ` tf . contrib . distributions . DirichletMultinomial . name ` { # DirichletMultinomial . name } <nl> <nl> - Name to prepend to all ops . <nl> + Name to prepend to all ops . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # DirichletMultinomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . param_static_shapes ( cls , sample_shape ) ` { # DirichletMultinomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> <nl> <nl> - - - <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # Multinomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Multinomial . get_batch_shape ( ) ` { # Multinomial . get_batch_shape } <nl> Name to prepend to all ops . <nl> Event probabilities . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Multinomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . param_static_shapes ( cls , sample_shape ) ` { # Multinomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Multinomial . pdf ( value , name = ' pdf ' ) ` { # Multinomial . pdf } <nl> Entropy of the distribution in nats . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . from_params ( cls , make_safe = True , * * kwargs ) ` { # WishartCholesky . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartCholesky . get_batch_shape ( ) ` { # WishartCholesky . get_batch_shape } <nl> Mode of the distribution . <nl> Name prepended to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # WishartCholesky . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . param_static_shapes ( cls , sample_shape ) ` { # WishartCholesky . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartCholesky . pdf ( value , name = ' pdf ' ) ` { # WishartCholesky . pdf } <nl> Entropy of the distribution in nats . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . from_params ( cls , make_safe = True , * * kwargs ) ` { # WishartFull . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartFull . get_batch_shape ( ) ` { # WishartFull . get_batch_shape } <nl> Mode of the distribution . <nl> Name prepended to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # WishartFull . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . param_static_shapes ( cls , sample_shape ) ` { # WishartFull . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartFull . pdf ( value , name = ' pdf ' ) ` { # WishartFull . pdf } <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . from_params ( cls , make_safe = True , * * kwargs ) ` { # TransformedDistribution . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . TransformedDistribution . get_batch_shape ( ) ` { # TransformedDistribution . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # TransformedDistribution . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . param_static_shapes ( cls , sample_shape ) ` { # TransformedDistribution . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . TransformedDistribution . pdf ( value , name = ' pdf ' ) ` { # TransformedDistribution . pdf } <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalDiagPlusVDVT . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . get_batch_shape ( ) ` { # MultivariateNormalDiagPlusVDVT . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalDiagPlusVDVT . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalDiagPlusVDVT . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalDiagPlusVDVT . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Bernoulli . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Bernoulli . md <nl> Entropy of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . from_params ( cls , make_safe = True , * * kwargs ) ` { # Bernoulli . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Bernoulli . get_batch_shape ( ) ` { # Bernoulli . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Bernoulli . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Bernoulli . param_static_shapes ( cls , sample_shape ) ` { # Bernoulli . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Bernoulli . pdf ( value , name = ' pdf ' ) ` { # Bernoulli . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Dirichlet . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Dirichlet . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . from_params ( cls , make_safe = True , * * kwargs ) ` { # Dirichlet . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Dirichlet . get_batch_shape ( ) ` { # Dirichlet . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Dirichlet . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Dirichlet . param_static_shapes ( cls , sample_shape ) ` { # Dirichlet . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Dirichlet . pdf ( value , name = ' pdf ' ) ` { # Dirichlet . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Distribution . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . Distribution . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . from_params ( cls , make_safe = True , * * kwargs ) ` { # Distribution . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Distribution . get_batch_shape ( ) ` { # Distribution . get_batch_shape } <nl> Mode of the distribution . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Distribution . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Distribution . param_static_shapes ( cls , sample_shape ) ` { # Distribution . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Distribution . pdf ( value , name = ' pdf ' ) ` { # Distribution . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . MultivariateNormalCholesky . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard0 / tf . contrib . distributions . MultivariateNormalCholesky . md <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalCholesky . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . get_batch_shape ( ) ` { # MultivariateNormalCholesky . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalCholesky . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalCholesky . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalCholesky . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalCholesky . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . MultivariateNormalDiag . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . MultivariateNormalDiag . md <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalDiag . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiag . get_batch_shape ( ) ` { # MultivariateNormalDiag . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalDiag . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiag . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalDiag . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiag . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalDiag . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . StudentT . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . StudentT . md <nl> The entropy of Student t distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . from_params ( cls , make_safe = True , * * kwargs ) ` { # StudentT . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . StudentT . get_batch_shape ( ) ` { # StudentT . get_batch_shape } <nl> Locations of these Student ' s t distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # StudentT . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . StudentT . param_static_shapes ( cls , sample_shape ) ` { # StudentT . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . StudentT . pdf ( value , name = ' pdf ' ) ` { # StudentT . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . TransformedDistribution . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard1 / tf . contrib . distributions . TransformedDistribution . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . from_params ( cls , make_safe = True , * * kwargs ) ` { # TransformedDistribution . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . TransformedDistribution . get_batch_shape ( ) ` { # TransformedDistribution . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # TransformedDistribution . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . TransformedDistribution . param_static_shapes ( cls , sample_shape ) ` { # TransformedDistribution . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . TransformedDistribution . pdf ( value , name = ' pdf ' ) ` { # TransformedDistribution . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Categorical . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Categorical . md <nl> Cumulative distribution function . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . from_params ( cls , make_safe = True , * * kwargs ) ` { # Categorical . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Categorical . get_batch_shape ( ) ` { # Categorical . get_batch_shape } <nl> Mean of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Categorical . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Categorical . param_static_shapes ( cls , sample_shape ) ` { # Categorical . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Categorical . pdf ( value , name = ' pdf ' ) ` { # Categorical . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Chi2 . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Chi2 . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . from_params ( cls , make_safe = True , * * kwargs ) ` { # Chi2 . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Chi2 . get_batch_shape ( ) ` { # Chi2 . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Chi2 . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Chi2 . param_static_shapes ( cls , sample_shape ) ` { # Chi2 . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Chi2 . pdf ( value , name = ' pdf ' ) ` { # Chi2 . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Uniform . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . Uniform . md <nl> The entropy of Uniform distribution ( s ) . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . from_params ( cls , make_safe = True , * * kwargs ) ` { # Uniform . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Uniform . get_batch_shape ( ) ` { # Uniform . get_batch_shape } <nl> Mode of the distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Uniform . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Uniform . param_static_shapes ( cls , sample_shape ) ` { # Uniform . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Uniform . pdf ( value , name = ' pdf ' ) ` { # Uniform . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . WishartCholesky . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard2 / tf . contrib . distributions . WishartCholesky . md <nl> Entropy of the distribution in nats . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . from_params ( cls , make_safe = True , * * kwargs ) ` { # WishartCholesky . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartCholesky . get_batch_shape ( ) ` { # WishartCholesky . get_batch_shape } <nl> Mode of the distribution . <nl> Name prepended to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # WishartCholesky . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartCholesky . param_static_shapes ( cls , sample_shape ) ` { # WishartCholesky . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartCholesky . pdf ( value , name = ' pdf ' ) ` { # WishartCholesky . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Binomial . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Binomial . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # Binomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Binomial . get_batch_shape ( ) ` { # Binomial . get_batch_shape } <nl> Name to prepend to all ops . <nl> Probability of success . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Binomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Binomial . param_static_shapes ( cls , sample_shape ) ` { # Binomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Binomial . pdf ( value , name = ' pdf ' ) ` { # Binomial . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . DirichletMultinomial . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . DirichletMultinomial . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # DirichletMultinomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . DirichletMultinomial . get_batch_shape ( ) ` { # DirichletMultinomial . get_batch_shape } <nl> Parameter defining this distribution . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # DirichletMultinomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . DirichletMultinomial . param_static_shapes ( cls , sample_shape ) ` { # DirichletMultinomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . DirichletMultinomial . pdf ( value , name = ' pdf ' ) ` { # DirichletMultinomial . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Exponential . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Exponential . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . from_params ( cls , make_safe = True , * * kwargs ) ` { # Exponential . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Exponential . get_batch_shape ( ) ` { # Exponential . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Exponential . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Exponential . param_static_shapes ( cls , sample_shape ) ` { # Exponential . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Exponential . pdf ( value , name = ' pdf ' ) ` { # Exponential . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Gamma . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Gamma . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . from_params ( cls , make_safe = True , * * kwargs ) ` { # Gamma . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Gamma . get_batch_shape ( ) ` { # Gamma . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Gamma . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Gamma . param_static_shapes ( cls , sample_shape ) ` { # Gamma . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Gamma . pdf ( value , name = ' pdf ' ) ` { # Gamma . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . InverseGamma . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . InverseGamma . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . from_params ( cls , make_safe = True , * * kwargs ) ` { # InverseGamma . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . InverseGamma . get_batch_shape ( ) ` { # InverseGamma . get_batch_shape } <nl> The mode of an inverse gamma distribution is ` beta / ( alpha + 1 ) ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # InverseGamma . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . InverseGamma . param_static_shapes ( cls , sample_shape ) ` { # InverseGamma . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . InverseGamma . pdf ( value , name = ' pdf ' ) ` { # InverseGamma . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Multinomial . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . Multinomial . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . from_params ( cls , make_safe = True , * * kwargs ) ` { # Multinomial . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Multinomial . get_batch_shape ( ) ` { # Multinomial . get_batch_shape } <nl> Name to prepend to all ops . <nl> Event probabilities . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Multinomial . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Multinomial . param_static_shapes ( cls , sample_shape ) ` { # Multinomial . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Multinomial . pdf ( value , name = ' pdf ' ) ` { # Multinomial . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard3 / tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . md <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalDiagPlusVDVT . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . get_batch_shape ( ) ` { # MultivariateNormalDiagPlusVDVT . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalDiagPlusVDVT . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalDiagPlusVDVT . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalDiagPlusVDVT . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalDiagPlusVDVT . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard6 / tf . contrib . distributions . Beta . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard6 / tf . contrib . distributions . Beta . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . from_params ( cls , make_safe = True , * * kwargs ) ` { # Beta . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Beta . get_batch_shape ( ) ` { # Beta . get_batch_shape } <nl> will be raised rather than returning ` NaN ` . <nl> Name to prepend to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Beta . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Beta . param_static_shapes ( cls , sample_shape ) ` { # Beta . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Beta . pdf ( value , name = ' pdf ' ) ` { # Beta . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard6 / tf . contrib . distributions . Laplace . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard6 / tf . contrib . distributions . Laplace . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . from_params ( cls , make_safe = True , * * kwargs ) ` { # Laplace . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Laplace . get_batch_shape ( ) ` { # Laplace . get_batch_shape } <nl> Mode of this distribution . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Laplace . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Laplace . param_static_shapes ( cls , sample_shape ) ` { # Laplace . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Laplace . pdf ( value , name = ' pdf ' ) ` { # Laplace . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard7 / tf . contrib . distributions . MultivariateNormalFull . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard7 / tf . contrib . distributions . MultivariateNormalFull . md <nl> The entropies of these Multivariate Normals . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . from_params ( cls , make_safe = True , * * kwargs ) ` { # MultivariateNormalFull . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalFull . get_batch_shape ( ) ` { # MultivariateNormalFull . get_batch_shape } <nl> Mode of each batch member . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # MultivariateNormalFull . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . MultivariateNormalFull . param_static_shapes ( cls , sample_shape ) ` { # MultivariateNormalFull . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . MultivariateNormalFull . pdf ( value , name = ' pdf ' ) ` { # MultivariateNormalFull . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard7 / tf . contrib . distributions . Normal . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard7 / tf . contrib . distributions . Normal . md <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> ` Tensor ` ` event_shape ` <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . from_params ( cls , make_safe = True , * * kwargs ) ` { # Normal . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Normal . get_batch_shape ( ) ` { # Normal . get_batch_shape } <nl> Distribution parameter for the mean . <nl> <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # Normal . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . Normal . param_static_shapes ( cls , sample_shape ) ` { # Normal . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . Normal . pdf ( value , name = ' pdf ' ) ` { # Normal . pdf } <nl> mmm a / tensorflow / g3doc / api_docs / python / functions_and_classes / shard9 / tf . contrib . distributions . WishartFull . md <nl> ppp b / tensorflow / g3doc / api_docs / python / functions_and_classes / shard9 / tf . contrib . distributions . WishartFull . md <nl> Entropy of the distribution in nats . <nl> Shape of a sample from a single distribution as a 1 - D int32 ` Tensor ` . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . from_params ( cls , make_safe = True , * * kwargs ) ` { # WishartFull . from_params } <nl> + <nl> + Given ( unconstrained ) parameters , return an instantiated distribution . <nl> + <nl> + Subclasses should implement a static method ` _safe_transforms ` that returns <nl> + a dict of parameter transforms , which will be used if ` make_safe = True ` . <nl> + <nl> + Example usage : <nl> + <nl> + ` ` ` <nl> + # Let ' s say we want a sample of size ( batch_size , 10 ) <nl> + shapes = MultiVariateNormalDiag . param_shapes ( [ batch_size , 10 ] ) <nl> + <nl> + # shapes has a Tensor shape for mu and sigma <nl> + # shapes = = { <nl> + # ' mu ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # ' sigma ' : tf . constant ( [ batch_size , 10 ] ) , <nl> + # } <nl> + <nl> + # Here we parameterize mu and sigma with the output of a linear <nl> + # layer . Note that sigma is unconstrained . <nl> + params = { } <nl> + for name , shape in shapes . items ( ) : <nl> + params [ name ] = linear ( x , shape [ 1 ] ) <nl> + <nl> + # Note that you can forward other kwargs to the ` Distribution ` , like <nl> + # ` allow_nan_stats ` or ` name ` . <nl> + mvn = MultiVariateNormalDiag . from_params ( * * params , allow_nan_stats = True ) <nl> + ` ` ` <nl> + <nl> + Distribution parameters may have constraints ( e . g . ` sigma ` must be positive <nl> + for a ` Normal ` distribution ) and the ` from_params ` method will apply default <nl> + parameter transforms . If a user wants to use their own transform , they can <nl> + apply it externally and set ` make_safe = False ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` make_safe ` < / b > : Whether the ` params ` should be constrained . If True , <nl> + ` from_params ` will apply default parameter transforms . If False , no <nl> + parameter transforms will be applied . <nl> + * < b > ` * * kwargs ` < / b > : dict of parameters for the distribution . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + A distribution parameterized by possibly transformed parameters in <nl> + ` kwargs ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` TypeError ` < / b > : if ` make_safe ` is ` True ` but ` _safe_transforms ` is not <nl> + implemented directly for ` cls ` . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartFull . get_batch_shape ( ) ` { # WishartFull . get_batch_shape } <nl> Mode of the distribution . <nl> Name prepended to all ops . <nl> <nl> <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . param_shapes ( cls , sample_shape , name = ' DistributionParamShapes ' ) ` { # WishartFull . param_shapes } <nl> + <nl> + Shapes of parameters given the desired shape of a call to ` sample ( ) ` . <nl> + <nl> + Subclasses should override static method ` _param_shapes ` . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` Tensor ` or python list / tuple . Desired shape of a call to <nl> + ` sample ( ) ` . <nl> + * < b > ` name ` < / b > : name to prepend ops with . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` Tensor ` shapes . <nl> + <nl> + <nl> + - - - <nl> + <nl> + # # # # ` tf . contrib . distributions . WishartFull . param_static_shapes ( cls , sample_shape ) ` { # WishartFull . param_static_shapes } <nl> + <nl> + param_shapes with static ( i . e . TensorShape ) shapes . <nl> + <nl> + # # # # # Args : <nl> + <nl> + <nl> + * < b > ` sample_shape ` < / b > : ` TensorShape ` or python list / tuple . Desired shape of a call <nl> + to ` sample ( ) ` . <nl> + <nl> + # # # # # Returns : <nl> + <nl> + ` dict ` of parameter name to ` TensorShape ` . <nl> + <nl> + # # # # # Raises : <nl> + <nl> + <nl> + * < b > ` ValueError ` < / b > : if ` sample_shape ` is a ` TensorShape ` and is not fully defined . <nl> + <nl> + <nl> - - - <nl> <nl> # # # # ` tf . contrib . distributions . WishartFull . pdf ( value , name = ' pdf ' ) ` { # WishartFull . pdf } <nl>
Update generated Python Op docs .
tensorflow/tensorflow
ce57cb78824905fcf850c7ce3fbe017225e0a6cc
2016-08-16T21:02:27Z
mmm a / atom / browser / api / atom_api_app . cc <nl> ppp b / atom / browser / api / atom_api_app . cc <nl> <nl> # include " base / environment . h " <nl> # include " base / files / file_path . h " <nl> # include " base / path_service . h " <nl> + # include " brightray / browser / brightray_paths . h " <nl> # include " native_mate / callback . h " <nl> # include " native_mate / dictionary . h " <nl> # include " native_mate / object_template_builder . h " <nl> <nl> <nl> # include " atom / common / node_includes . h " <nl> <nl> - # if defined ( OS_LINUX ) <nl> - # include " base / nix / xdg_util . h " <nl> - # endif <nl> - <nl> using atom : : Browser ; <nl> <nl> namespace mate { <nl> namespace api { <nl> <nl> namespace { <nl> <nl> + / / Return the path constant from string . <nl> + int GetPathConstant ( const std : : string & name ) { <nl> + if ( name = = " appData " ) <nl> + return brightray : : DIR_APP_DATA ; <nl> + else if ( name = = " userData " ) <nl> + return brightray : : DIR_USER_DATA ; <nl> + else if ( name = = " cache " ) <nl> + return brightray : : DIR_CACHE ; <nl> + else if ( name = = " userCache " ) <nl> + return brightray : : DIR_USER_CACHE ; <nl> + else if ( name = = " home " ) <nl> + return base : : DIR_HOME ; <nl> + else if ( name = = " temp " ) <nl> + return base : : DIR_TEMP ; <nl> + else if ( name = = " userDesktop " ) <nl> + return base : : DIR_USER_DESKTOP ; <nl> + else if ( name = = " exe " ) <nl> + return base : : FILE_EXE ; <nl> + else if ( name = = " module " ) <nl> + return base : : FILE_MODULE ; <nl> + else <nl> + return - 1 ; <nl> + } <nl> + <nl> class ResolveProxyHelper { <nl> public : <nl> ResolveProxyHelper ( const GURL & url , App : : ResolveProxyCallback callback ) <nl> void App : : OnFinishLaunching ( ) { <nl> Emit ( " ready " ) ; <nl> } <nl> <nl> - base : : FilePath App : : GetDataPath ( ) { <nl> + base : : FilePath App : : GetPath ( mate : : Arguments * args , const std : : string & name ) { <nl> + bool succeed = false ; <nl> base : : FilePath path ; <nl> - # if defined ( OS_LINUX ) <nl> - scoped_ptr < base : : Environment > env ( base : : Environment : : Create ( ) ) ; <nl> - path = base : : nix : : GetXDGDirectory ( env . get ( ) , <nl> - base : : nix : : kXdgConfigHomeEnvVar , <nl> - base : : nix : : kDotConfigDir ) ; <nl> - # else <nl> - PathService : : Get ( base : : DIR_APP_DATA , & path ) ; <nl> - # endif <nl> + int key = GetPathConstant ( name ) ; <nl> + if ( key > = 0 ) <nl> + succeed = PathService : : Get ( key , & path ) ; <nl> + if ( ! succeed ) <nl> + args - > ThrowError ( " Failed to get path " ) ; <nl> + return path ; <nl> + } <nl> <nl> - return path . Append ( base : : FilePath : : FromUTF8Unsafe ( <nl> - Browser : : Get ( ) - > GetName ( ) ) ) ; <nl> + void App : : SetPath ( mate : : Arguments * args , <nl> + const std : : string & name , <nl> + const base : : FilePath & path ) { <nl> + bool succeed = false ; <nl> + int key = GetPathConstant ( name ) ; <nl> + if ( key > = 0 ) <nl> + succeed = PathService : : Override ( key , path ) ; <nl> + if ( ! succeed ) <nl> + args - > ThrowError ( " Failed to set path " ) ; <nl> } <nl> <nl> void App : : ResolveProxy ( const GURL & url , ResolveProxyCallback callback ) { <nl> mate : : ObjectTemplateBuilder App : : GetObjectTemplateBuilder ( <nl> . SetMethod ( " setUserTasks " , <nl> base : : Bind ( & Browser : : SetUserTasks , browser ) ) <nl> # endif <nl> - . SetMethod ( " getDataPath " , & App : : GetDataPath ) <nl> + . SetMethod ( " setPath " , & App : : SetPath ) <nl> + . SetMethod ( " getPath " , & App : : GetPath ) <nl> . SetMethod ( " resolveProxy " , & App : : ResolveProxy ) <nl> . SetMethod ( " setDesktopName " , & App : : SetDesktopName ) ; <nl> } <nl> mmm a / atom / browser / api / atom_api_app . h <nl> ppp b / atom / browser / api / atom_api_app . h <nl> namespace base { <nl> class FilePath ; <nl> } <nl> <nl> + namespace mate { <nl> + class Arguments ; <nl> + } <nl> + <nl> namespace atom { <nl> <nl> namespace api { <nl> class App : public mate : : EventEmitter , <nl> v8 : : Isolate * isolate ) override ; <nl> <nl> private : <nl> - base : : FilePath GetDataPath ( ) ; <nl> + / / Get / Set the pre - defined path in PathService . <nl> + base : : FilePath GetPath ( mate : : Arguments * args , const std : : string & name ) ; <nl> + void SetPath ( mate : : Arguments * args , <nl> + const std : : string & name , <nl> + const base : : FilePath & path ) ; <nl> + <nl> void ResolveProxy ( const GURL & url , ResolveProxyCallback callback ) ; <nl> void SetDesktopName ( const std : : string & desktop_name ) ; <nl> <nl> mmm a / atom / browser / api / lib / app . coffee <nl> ppp b / atom / browser / api / lib / app . coffee <nl> bindings = process . atomBinding ' app ' <nl> app = bindings . app <nl> app . __proto__ = EventEmitter . prototype <nl> <nl> - app . getHomeDir = - > <nl> - process . env [ if process . platform is ' win32 ' then ' USERPROFILE ' else ' HOME ' ] <nl> - <nl> app . setApplicationMenu = ( menu ) - > <nl> require ( ' menu ' ) . setApplicationMenu menu <nl> <nl> if process . platform is ' darwin ' <nl> app . once ' ready ' , - > app . emit ' finish - launching ' <nl> app . terminate = app . quit <nl> app . exit = process . exit <nl> + app . getHomeDir = - > app . getPath ' home ' <nl> + app . getDataPath = - > app . getPath ' userData ' <nl> + app . setDataPath = ( path ) - > app . setPath ' userData ' , path <nl> <nl> # Only one App object pemitted . <nl> module . exports = app <nl> mmm a / atom / browser / default_app / main . js <nl> ppp b / atom / browser / default_app / main . js <nl> if ( option . file & & ! option . webdriver ) { <nl> app . setName ( packageJson . productName ) ; <nl> else if ( packageJson . name ) <nl> app . setName ( packageJson . name ) ; <nl> + app . setPath ( ' userData ' , path . join ( app . getPath ( ' appData ' ) , app . getName ( ) ) ) ; <nl> + app . setPath ( ' userCache ' , path . join ( app . getPath ( ' cache ' ) , app . getName ( ) ) ) ; <nl> } <nl> <nl> / / Run the app . <nl> mmm a / atom / browser / lib / chrome - extension . coffee <nl> ppp b / atom / browser / lib / chrome - extension . coffee <nl> getExtensionInfoFromPath = ( srcDirectory ) - > <nl> srcDirectory : srcDirectory <nl> extensionInfoMap [ manifest . name ] <nl> <nl> - # Load persistented extensions . <nl> - loadedExtensionsPath = path . join app . getDataPath ( ) , ' DevTools Extensions ' <nl> - <nl> - try <nl> - loadedExtensions = JSON . parse fs . readFileSync ( loadedExtensionsPath ) <nl> - loadedExtensions = [ ] unless Array . isArray loadedExtensions <nl> - # Preheat the extensionInfo cache . <nl> - getExtensionInfoFromPath srcDirectory for srcDirectory in loadedExtensions <nl> - catch e <nl> + # The loaded extensions cache and its persistent path . <nl> + loadedExtensions = null <nl> + loadedExtensionsPath = null <nl> <nl> # Persistent loaded extensions . <nl> app . on ' will - quit ' , - > <nl> app . once ' ready ' , - > <nl> protocol = require ' protocol ' <nl> BrowserWindow = require ' browser - window ' <nl> <nl> + # Load persistented extensions . <nl> + loadedExtensionsPath = path . join app . getDataPath ( ) , ' DevTools Extensions ' <nl> + <nl> + try <nl> + loadedExtensions = JSON . parse fs . readFileSync ( loadedExtensionsPath ) <nl> + loadedExtensions = [ ] unless Array . isArray loadedExtensions <nl> + # Preheat the extensionInfo cache . <nl> + getExtensionInfoFromPath srcDirectory for srcDirectory in loadedExtensions <nl> + catch e <nl> + <nl> # The chrome - extension : can map a extension URL request to real file path . <nl> protocol . registerProtocol ' chrome - extension ' , ( request ) - > <nl> parsed = url . parse request . url <nl> mmm a / atom / browser / lib / init . coffee <nl> ppp b / atom / browser / lib / init . coffee <nl> process . once ' BIND_DONE ' , - > <nl> else <nl> app . setDesktopName ' # { app . getName ( ) } . desktop ' <nl> <nl> + # Set the user path according to application ' s name . <nl> + app . setPath ' userData ' , path . join ( app . getPath ( ' appData ' ) , app . getName ( ) ) <nl> + app . setPath ' userCache ' , path . join ( app . getPath ( ' cache ' ) , app . getName ( ) ) <nl> + <nl> # Load the chrome extension support . <nl> require ' . / chrome - extension . js ' <nl> <nl> mmm a / docs / api / app . md <nl> ppp b / docs / api / app . md <nl> executed . It is possible that a window cancels the quitting by returning <nl> Quit the application directly , it will not try to close all windows so cleanup <nl> code will not run . <nl> <nl> - # # app . getDataPath ( ) <nl> + # # app . getPath ( name ) <nl> + <nl> + * ` name ` String <nl> + <nl> + Retrieves a path to a special directory or file associated with ` name ` . On <nl> + failure an ` Error ` would throw . <nl> + <nl> + You can request following paths by the names : <nl> + <nl> + * ` home ` : User ' s home directory <nl> + * ` appData ` : Per - user application data directory , by default it is pointed to : <nl> + * ` % APPDATA % ` on Windows <nl> + * ` $ XDG_CONFIG_HOME ` or ` ~ / . config ` on Linux <nl> + * ` ~ / Library / Application Support ` on OS X <nl> + * ` userData ` : The directory for storing your app ' s configuration files , by <nl> + default it is the ` appData ` directory appended with your app ' s name <nl> + * ` cache ` : Per - user application cache directory , by default it is pointed to : <nl> + * ` % APPDATA % ` on Window , which doesn ' t has a universal place for cache <nl> + * ` $ XDG_CACHE_HOME ` or ` ~ / . cache ` on Linux <nl> + * ` ~ / Library / Caches ` on OS X <nl> + * ` userCache ` : The directory for placing your app ' s caches , by default it is the <nl> + ` cache ` directory appended with your app ' s name <nl> + * ` temp ` : Temporary directory <nl> + * ` userDesktop ` : The current user ' s Desktop directory <nl> + * ` exe ` : The current executable file <nl> + * ` module ` : The ` libchromiumcontent ` library <nl> + <nl> + # # app . setPath ( name , path ) <nl> + <nl> + * ` name ` String <nl> + * ` path ` String <nl> + <nl> + Overrides the ` path ` to a special directory or file associated with ` name ` . if <nl> + the path specifies a directory that does not exist , the directory will be <nl> + created by this method . On failure an ` Error ` would throw . <nl> <nl> - Returns the path for storing configuration files , with app name appended . <nl> + You can only override paths of ` name ` s defined in ` app . getPath ` . <nl> <nl> - * ` % APPDATA % \ MyAppName ` on Windows <nl> - * ` ~ / . config / MyAppName ` on Linux <nl> - * ` ~ / Library / Application Support / MyAppName ` on OS X <nl> + By default web pages ' cookies and caches will be stored under ` userData ` <nl> + directory , if you want to change this location , you have to override the <nl> + ` userData ` path before the ` ready ` event of ` app ` module gets emitted . <nl> <nl> # # app . getVersion ( ) <nl> <nl> mmm a / vendor / brightray <nl> ppp b / vendor / brightray <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit 09dc5f11e9c83e6ff3c2b6b1b58ceb2b8eae35c4 <nl> + Subproject commit ad17292154ddd2436c377bbe2d10fa213338f4fa <nl>
Merge pull request from atom / set - data - path
electron/electron
e75950cb89c3dc133b4aabef84e09ef6a8d2e4d0
2015-01-19T05:53:29Z
mmm a / src / unity / python / turicreate / data_structures / sframe . py <nl> ppp b / src / unity / python / turicreate / data_structures / sframe . py <nl> def save ( self , filename , format = None ) : <nl> if format is None : <nl> if filename . endswith ( ( ' . csv ' , ' . csv . gz ' ) ) : <nl> format = ' csv ' <nl> + elif filename . endswith ( ( ' . json ' ) ) : <nl> + format = ' json ' <nl> else : <nl> format = ' binary ' <nl> else : <nl>
Add SFrame save auto detection for JSON Format ( )
apple/turicreate
7f9bae09043a7609d3af2d889a653b81e60f0977
2018-08-24T17:23:22Z
mmm a / benchmark / single - source / AngryPhonebook . swift <nl> ppp b / benchmark / single - source / AngryPhonebook . swift <nl> <nl> import TestsUtils <nl> import Foundation <nl> <nl> + let t : [ BenchmarkCategory ] = [ . validation , . api , . String ] <nl> + <nl> public let AngryPhonebook = [ <nl> BenchmarkInfo ( <nl> - name : " AngryPhonebook " , <nl> - runFunction : run_AngryPhonebook , <nl> - tags : [ . validation , . api , . String ] , <nl> - legacyFactor : 7 ) , <nl> + name : " AngryPhonebook . Latin " , <nl> + runFunction : { angryPhonebook ( $ 0 , latin ) } , <nl> + tags : t , <nl> + setUpFunction : { blackHole ( latin ) } ) , <nl> BenchmarkInfo ( <nl> - name : " AngryPhonebookCyrillic " , <nl> - runFunction : run_AngryPhonebookCyrillic , <nl> - tags : [ . validation , . api , . String ] ) , <nl> + name : " AngryPhonebook . Armenian " , <nl> + runFunction : { angryPhonebook ( $ 0 , armenian ) } , <nl> + tags : t , <nl> + setUpFunction : { blackHole ( armenian ) } ) , <nl> BenchmarkInfo ( <nl> - name : " AngryPhonebookArmenian " , <nl> - runFunction : run_AngryPhonebookArmenian , <nl> - tags : [ . validation , . api , . String ] ) <nl> + name : " AngryPhonebook . Cyrillic " , <nl> + runFunction : { angryPhonebook ( $ 0 , cyrillic ) } , <nl> + tags : t , <nl> + setUpFunction : { blackHole ( cyrillic ) } ) <nl> ] <nl> <nl> - let words = [ <nl> - " James " , " John " , " Robert " , " Michael " , " William " , " David " , " Richard " , " Joseph " , <nl> - " Charles " , " Thomas " , " Christopher " , " Daniel " , " Matthew " , " Donald " , " Anthony " , <nl> - " Paul " , " Mark " , " George " , " Steven " , " Kenneth " , " Andrew " , " Edward " , " Brian " , <nl> - " Joshua " , " Kevin " , " Ronald " , " Timothy " , " Jason " , " Jeffrey " , " Gary " , " Ryan " , <nl> - " Nicholas " , " Eric " , " Stephen " , " Jacob " , " Larry " , " Frank " ] <nl> - <nl> - @ inline ( never ) <nl> - public func run_AngryPhonebook ( _ N : Int ) { <nl> - / / Permute the names . <nl> - for _ in 1 . . . N { <nl> - for firstname in words { <nl> - for lastname in words { <nl> - _ = ( firstname . uppercased ( ) , lastname . lowercased ( ) ) <nl> - } <nl> - } <nl> - } <nl> - } <nl> + / / Workloads for various scripts . Always 20 names for 400 pairings . <nl> + / / To keep the performance of various scripts roughly comparable , aim for <nl> + / / a total length of approximately 120 characters . <nl> + / / E . g . : ` latin . joined ( separator : " " ) . count = = 118 ` <nl> <nl> - let cyrillicNames : [ String ] = [ <nl> - " Александр " , " Аркадий " , " Аня " , " Даниил " , " Дмитрий " , " Эдуард " , " Юрій 🇺 🇦 " , " Давид " , <nl> - " Анна " , " Дмитрий 🇺 🇸 " , " Евгений " , " 👍 🏼 Борис " , " Владимир " , " 👍 🏽 Артур " , " Антон " , <nl> - " Антон 👍 🏻 " , " Надія 👍 " , " Алёна " , " Алиса " , " Елена 🇷 🇺 " , " Елизавета 👍 🏾 " , " 👍 🏿 Инна " , <nl> - " Жанна 🇨 🇦 " , " Ульяна " , " Кристина " , " Ксения " , " 👍 🏿 👍 🏾 👍 🏽 👍 🏼 👍 🏻 👍 🇺 🇦 🇷 🇺 🇨 🇦 🇺 🇸 👨 ‍ 👩 ‍ 👧 ‍ 👦 " ] <nl> + let latin = [ <nl> + " James " , " John " , " Robert " , " Michael " , " William " , " David " , " Richard " , " Joseph " , <nl> + " Charles " , " Thomas " , " Jacob " , " Daniel " , " Matthew " , " Donald " , " Anthony " , <nl> + " Paul " , " Mark " , " George " , " Steven " , " Kenneth " ] <nl> <nl> - @ inline ( never ) <nl> - public func run_AngryPhonebookCyrillic ( _ N : Int ) { <nl> - / / Permute the names . <nl> - for _ in 1 . . . N { <nl> - for firstname in cyrillicNames { <nl> - for lastname in cyrillicNames { <nl> - _ = ( firstname . uppercased ( ) , lastname . lowercased ( ) ) <nl> - } <nl> - } <nl> - } <nl> - } <nl> + let armenian : [ String ] = [ <nl> + " Արմեն " , " Աննա " , " Հարութ " , " Միքայել " , " Մարիա " , " Դավիթ " , " Վարդան " , <nl> + " Նարինե " , " Տիգրան " , " Տաթևիկ " , " Թագուհի " , " Թամարա " , " Ազնաուր " , " Գրիգոր " , <nl> + " Կոմիտաս " , " Հայկ " , " Գառնիկ " , " Վահրամ " , " Վահագն " , " Գևորգ " ] <nl> <nl> - let armenianNames : [ String ] = [ <nl> - " Արմեն 🇦 🇲 " , " Աննա " , " Հարություն " , " Միքայել " , " Մարիա " , " Դավիթ " , " Վարդան " , " Նարինե " , <nl> - " Հռիփսիմե " , " Տիգրան 👍 " , " Տաթև " , " Ադամ " , " Ազատ " , " Ազնաւուր 🇨 🇦 " , " Գրիգոր " , " Անի " , <nl> - " Լիլիթ 👍 🏽 " , " Հայկ 👍 🏼 " , " Անդրանիկ " , " Գառնիկ 👨 ‍ 👩 ‍ 👧 ‍ 👦 " , " Վահրամ " , " Վահագն 👍 🏿 " , " Գևորգ " , <nl> - " Թագուհի 🇺 🇸 " , " Թամարա 👍 🏻 " , " Արամ " , " Արսեն " , " Կոմիտաս " , " 👍 🏿 👍 🏾 👍 🏽 👍 🏼 👍 🏻 👍 🇦 🇲 🇨 🇦 🇺 🇸 👨 ‍ 👩 ‍ 👧 ‍ 👦 " ] <nl> + let cyrillic : [ String ] = [ <nl> + " Ульяна " , " Аркадий " , " Аня " , " Даниил " , " Дмитрий " , " Эдуард " , " Юрій " , " Давид " , <nl> + " Анна " , " Дмитрий " , " Евгений " , " Борис " , " Ксения " , " Артур " , " Аполлон " , <nl> + " Соломон " , " Николай " , " Кристи " , " Надежда " , " Спартак " ] <nl> <nl> @ inline ( never ) <nl> - public func run_AngryPhonebookArmenian ( _ N : Int ) { <nl> + public func angryPhonebook ( _ N : Int , _ names : [ String ] ) { <nl> + assert ( names . count = = 20 ) <nl> / / Permute the names . <nl> for _ in 1 . . . N { <nl> - for firstname in armenianNames { <nl> - for lastname in armenianNames { <nl> - _ = ( firstname . uppercased ( ) , lastname . lowercased ( ) ) <nl> + for firstname in names { <nl> + for lastname in names { <nl> + blackHole ( ( firstname . uppercased ( ) , lastname . lowercased ( ) ) ) <nl> } <nl> } <nl> } <nl>
SR - 10855 : Consistent test data and code refactor
apple/swift
4483e849ef327d4e81126f6a13b0692d349f9694
2019-06-26T04:33:39Z
mmm a / examples / difftaichi / diffmpm . py <nl> ppp b / examples / difftaichi / diffmpm . py <nl> def forward ( total_steps = steps ) : <nl> # simulation <nl> for s in range ( total_steps - 1 ) : <nl> clear_grid ( ) <nl> - compute_actuation ( ) <nl> + compute_actuation ( s ) <nl> p2g ( s ) <nl> grid_op ( ) <nl> g2p ( s ) <nl> def backward ( ) : <nl> g2p . grad ( s ) <nl> grid_op . grad ( ) <nl> p2g . grad ( s ) <nl> - compute_actuation . grad ( ) <nl> + compute_actuation . grad ( s ) <nl> <nl> <nl> class Scene : <nl> mmm a / examples / difftaichi / diffmpm3d . py <nl> ppp b / examples / difftaichi / diffmpm3d . py <nl> def forward ( total_steps = steps ) : <nl> # simulation <nl> for s in range ( total_steps - 1 ) : <nl> clear_grid ( ) <nl> - compute_actuation ( ) <nl> + compute_actuation ( s ) <nl> p2g ( s ) <nl> grid_op ( ) <nl> g2p ( s ) <nl> def backward ( ) : <nl> g2p . grad ( s ) <nl> grid_op . grad ( ) <nl> p2g . grad ( s ) <nl> - compute_actuation . grad ( ) <nl> + compute_actuation . grad ( s ) <nl> <nl> <nl> class Scene : <nl> mmm a / examples / difftaichi / liquid . py <nl> ppp b / examples / difftaichi / liquid . py <nl> def forward ( total_steps = steps ) : <nl> # simulation <nl> for s in range ( total_steps - 1 ) : <nl> clear_grid ( ) <nl> - compute_actuation ( ) <nl> + compute_actuation ( s ) <nl> p2g ( s ) <nl> grid_op ( ) <nl> g2p ( s ) <nl> def backward ( ) : <nl> g2p . grad ( s ) <nl> grid_op . grad ( ) <nl> p2g . grad ( s ) <nl> - compute_actuation . grad ( ) <nl> + compute_actuation . grad ( s ) <nl> <nl> <nl> class Scene : <nl>
fixed missing compute_actuation parameters in mpm
taichi-dev/taichi
a0ab9c39f2dffe486edb302bb171ac92121695d5
2019-10-21T02:42:33Z
mmm a / test / stdlib / Runtime . swift <nl> ppp b / test / stdlib / Runtime . swift <nl> Reflection . test ( " CustomMirror " ) { <nl> var output = " " <nl> dump ( Brilliant ( 123 , " four five six " ) , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Brilliant ( 123 , four five six ) # 0 \ n " <nl> - expected + = " - first : 123 \ n " <nl> - expected + = " - second : four five six \ n " <nl> - expected + = " ▿ self : Brilliant ( 123 , four five six ) # 0 \ n " <nl> + let expected = <nl> + " ▿ Brilliant ( 123 , four five six ) # 0 \ n " + <nl> + " - first : 123 \ n " + <nl> + " - second : four five six \ n " + <nl> + " ▿ self : Brilliant ( 123 , four five six ) # 0 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CustomMirror " ) { <nl> var output = " " <nl> dump ( Brilliant ( 123 , " four five six " ) , & output , maxItems : 3 ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Brilliant ( 123 , four five six ) # 0 \ n " <nl> - expected + = " - first : 123 \ n " <nl> - expected + = " - second : four five six \ n " <nl> - expected + = " ( 1 more child ) \ n " <nl> + let expected = <nl> + " ▿ Brilliant ( 123 , four five six ) # 0 \ n " + <nl> + " - first : 123 \ n " + <nl> + " - second : four five six \ n " + <nl> + " ( 1 more child ) \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CustomMirror " ) { <nl> var output = " " <nl> dump ( Brilliant ( 123 , " four five six " ) , & output , maxItems : 2 ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Brilliant ( 123 , four five six ) # 0 \ n " <nl> - expected + = " - first : 123 \ n " <nl> - expected + = " ( 2 more children ) \ n " <nl> + let expected = <nl> + " ▿ Brilliant ( 123 , four five six ) # 0 \ n " + <nl> + " - first : 123 \ n " + <nl> + " ( 2 more children ) \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CustomMirror " ) { <nl> var output = " " <nl> dump ( Brilliant ( 123 , " four five six " ) , & output , maxItems : 1 ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Brilliant ( 123 , four five six ) # 0 \ n " <nl> - expected + = " ( 3 children ) \ n " <nl> + let expected = <nl> + " ▿ Brilliant ( 123 , four five six ) # 0 \ n " + <nl> + " ( 3 children ) \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CustomMirrorIsInherited " ) { <nl> var output = " " <nl> dump ( Irradiant ( ) , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Brilliant ( 400 , ) # 0 \ n " <nl> - expected + = " - first : 400 \ n " <nl> - expected + = " - second : \ n " <nl> - expected + = " ▿ self : Brilliant ( 400 , ) # 0 \ n " <nl> + let expected = <nl> + " ▿ Brilliant ( 400 , ) # 0 \ n " + <nl> + " - first : 400 \ n " + <nl> + " - second : \ n " + <nl> + " ▿ self : Brilliant ( 400 , ) # 0 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " TupleMirror " ) { <nl> let tuple = ( Brilliant ( 384 , " seven six eight " ) , Matte ( " nine " ) ) <nl> dump ( tuple , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 2 elements ) \ n " <nl> - expected + = " ▿ . 0 : Brilliant ( 384 , seven six eight ) # 0 \ n " <nl> - expected + = " - first : 384 \ n " <nl> - expected + = " - second : seven six eight \ n " <nl> - expected + = " ▿ self : Brilliant ( 384 , seven six eight ) # 0 \ n " <nl> - expected + = " ▿ . 1 : a . Matte \ n " <nl> - expected + = " - s : nine \ n " <nl> + let expected = <nl> + " ▿ ( 2 elements ) \ n " + <nl> + " ▿ . 0 : Brilliant ( 384 , seven six eight ) # 0 \ n " + <nl> + " - first : 384 \ n " + <nl> + " - second : seven six eight \ n " + <nl> + " ▿ self : Brilliant ( 384 , seven six eight ) # 0 \ n " + <nl> + " ▿ . 1 : a . Matte \ n " + <nl> + " - s : nine \ n " <nl> <nl> expectEqual ( expected , output ) <nl> <nl> Reflection . test ( " TupleMirror " ) { <nl> let tuple = ( 1 , 2 . 5 , false , " three " ) <nl> dump ( tuple , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 4 elements ) \ n " <nl> - expected + = " - . 0 : 1 \ n " <nl> - expected + = " - . 1 : 2 . 5 \ n " <nl> - expected + = " - . 2 : false \ n " <nl> - expected + = " - . 3 : three \ n " <nl> + let expected = <nl> + " ▿ ( 4 elements ) \ n " + <nl> + " - . 0 : 1 \ n " + <nl> + " - . 1 : 2 . 5 \ n " + <nl> + " - . 2 : false \ n " + <nl> + " - . 3 : three \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " TupleMirror " ) { <nl> let tuple = ( 1 , ( " Hello " , " World " ) ) <nl> dump ( tuple , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 2 elements ) \ n " <nl> - expected + = " - . 0 : 1 \ n " <nl> - expected + = " ▿ . 1 : ( 2 elements ) \ n " <nl> - expected + = " - . 0 : Hello \ n " <nl> - expected + = " - . 1 : World \ n " <nl> + let expected = <nl> + " ▿ ( 2 elements ) \ n " + <nl> + " - . 0 : 1 \ n " + <nl> + " ▿ . 1 : ( 2 elements ) \ n " + <nl> + " - . 0 : Hello \ n " + <nl> + " - . 1 : World \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " String . UTF8View / Mirror " ) { <nl> var output = " " <nl> dump ( " \ u { 61 } \ u { 304b } \ u { 3099 } " . utf8 , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ n " <nl> - expected + = " - [ 0 ] : 97 \ n " <nl> - expected + = " - [ 1 ] : 227 \ n " <nl> - expected + = " - [ 2 ] : 129 \ n " <nl> - expected + = " - [ 3 ] : 139 \ n " <nl> - expected + = " - [ 4 ] : 227 \ n " <nl> - expected + = " - [ 5 ] : 130 \ n " <nl> - expected + = " - [ 6 ] : 153 \ n " <nl> + let expected = <nl> + " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ n " + <nl> + " - [ 0 ] : 97 \ n " + <nl> + " - [ 1 ] : 227 \ n " + <nl> + " - [ 2 ] : 129 \ n " + <nl> + " - [ 3 ] : 139 \ n " + <nl> + " - [ 4 ] : 227 \ n " + <nl> + " - [ 5 ] : 130 \ n " + <nl> + " - [ 6 ] : 153 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " String . UTF16View / Mirror " ) { <nl> var output = " " <nl> dump ( " \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } " . utf16 , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } \ n " <nl> - expected + = " - [ 0 ] : 97 \ n " <nl> - expected + = " - [ 1 ] : 12363 \ n " <nl> - expected + = " - [ 2 ] : 12441 \ n " <nl> - expected + = " - [ 3 ] : 55357 \ n " <nl> - expected + = " - [ 4 ] : 56357 \ n " <nl> + let expected = <nl> + " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } \ n " + <nl> + " - [ 0 ] : 97 \ n " + <nl> + " - [ 1 ] : 12363 \ n " + <nl> + " - [ 2 ] : 12441 \ n " + <nl> + " - [ 3 ] : 55357 \ n " + <nl> + " - [ 4 ] : 56357 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " String . UnicodeScalarView / Mirror " ) { <nl> var output = " " <nl> dump ( " \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } " . unicodeScalars , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } \ n " <nl> - expected + = " - [ 0 ] : \ u { 61 } \ n " <nl> - expected + = " - [ 1 ] : \ u { 304b } \ n " <nl> - expected + = " - [ 2 ] : \ u { 3099 } \ n " <nl> - expected + = " - [ 3 ] : \ u { 1f425 } \ n " <nl> + let expected = <nl> + " ▿ \ u { 61 } \ u { 304b } \ u { 3099 } \ u { 1f425 } \ n " + <nl> + " - [ 0 ] : \ u { 61 } \ n " + <nl> + " - [ 1 ] : \ u { 304b } \ n " + <nl> + " - [ 2 ] : \ u { 3099 } \ n " + <nl> + " - [ 3 ] : \ u { 1f425 } \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CGPoint " ) { <nl> var output = " " <nl> dump ( CGPoint ( x : 1 . 25 , y : 2 . 75 ) , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 1 . 25 , 2 . 75 ) \ n " <nl> - expected + = " - x : 1 . 25 \ n " <nl> - expected + = " - y : 2 . 75 \ n " <nl> + let expected = <nl> + " ▿ ( 1 . 25 , 2 . 75 ) \ n " + <nl> + " - x : 1 . 25 \ n " + <nl> + " - y : 2 . 75 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CGSize " ) { <nl> var output = " " <nl> dump ( CGSize ( width : 1 . 25 , height : 2 . 75 ) , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 1 . 25 , 2 . 75 ) \ n " <nl> - expected + = " - width : 1 . 25 \ n " <nl> - expected + = " - height : 2 . 75 \ n " <nl> + let expected = <nl> + " ▿ ( 1 . 25 , 2 . 75 ) \ n " + <nl> + " - width : 1 . 25 \ n " + <nl> + " - height : 2 . 75 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " CGRect " ) { <nl> size : CGSize ( width : 10 . 25 , height : 11 . 75 ) ) , <nl> & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ ( 1 . 25 , 2 . 25 , 10 . 25 , 11 . 75 ) \ n " <nl> - expected + = " ▿ origin : ( 1 . 25 , 2 . 25 ) \ n " <nl> - expected + = " - x : 1 . 25 \ n " <nl> - expected + = " - y : 2 . 25 \ n " <nl> - expected + = " ▿ size : ( 10 . 25 , 11 . 75 ) \ n " <nl> - expected + = " - width : 10 . 25 \ n " <nl> - expected + = " - height : 11 . 75 \ n " <nl> + let expected = <nl> + " ▿ ( 1 . 25 , 2 . 25 , 10 . 25 , 11 . 75 ) \ n " + <nl> + " ▿ origin : ( 1 . 25 , 2 . 25 ) \ n " + <nl> + " - x : 1 . 25 \ n " + <nl> + " - y : 2 . 25 \ n " + <nl> + " ▿ size : ( 10 . 25 , 11 . 75 ) \ n " + <nl> + " - width : 10 . 25 \ n " + <nl> + " - height : 11 . 75 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " Unmanaged / nil " ) { <nl> var optionalURL : Unmanaged < CFURL > ? = nil <nl> dump ( optionalURL , & output ) <nl> <nl> - var expected = " - nil \ n " <nl> + let expected = " - nil \ n " <nl> <nl> expectEqual ( expected , output ) <nl> } <nl> Reflection . test ( " Unmanaged / not - nil " ) { <nl> Unmanaged . passRetained ( CFURLCreateWithString ( nil , " http : / / llvm . org / " , nil ) ) <nl> dump ( optionalURL , & output ) <nl> <nl> - var expected = " " <nl> - expected + = " ▿ Swift . Unmanaged \ n " <nl> - expected + = " ▿ Some : Swift . Unmanaged \ n " <nl> - expected + = " - _value : http : / / llvm . org / # 0 \ n " <nl> + let expected = <nl> + " ▿ Swift . Unmanaged \ n " + <nl> + " ▿ Some : Swift . Unmanaged \ n " + <nl> + " - _value : http : / / llvm . org / # 0 \ n " <nl> <nl> expectEqual ( expected , output ) <nl> <nl>
Tests : use optimizer - friendly string concatenation
apple/swift
6f10c0954578cf318b02c748897244db9e74401f
2014-08-28T12:44:26Z
mmm a / s / strategy_shard . cpp <nl> ppp b / s / strategy_shard . cpp <nl> namespace mongo { <nl> <nl> BSONObj key = manager - > getShardKey ( ) . extractKey ( query ) ; <nl> BSONForEach ( e , key ) { <nl> - PRINT ( e ) ; <nl> - PRINT ( getGtLtOp ( e ) ) ; <nl> uassert ( 13465 , " shard key in upsert query must be an exact match " , getGtLtOp ( e ) = = BSONObj : : Equality ) ; <nl> } <nl> } <nl>
forgot to remove debugging code
mongodb/mongo
c58ddb10d4cd63e3fe21ee439b1595459ab1e3b9
2010-09-03T15:56:31Z
mmm a / include / swift / AST / DiagnosticsParse . def <nl> ppp b / include / swift / AST / DiagnosticsParse . def <nl> ERROR ( expected_rparen_tuple_pattern_list , none , <nl> " expected ' ) ' at end of tuple pattern " , ( ) ) <nl> ERROR ( untyped_pattern_ellipsis , none , <nl> " ' . . . ' cannot be applied to a subpattern which is not explicitly typed " , ( ) ) <nl> - ERROR ( non_func_decl_pattern_init , none , <nl> - " default argument is only permitted for a non - curried function parameter " , ( ) ) <nl> + ERROR ( no_default_arg_closure , none , <nl> + " default arguments are not allowed in closures " , ( ) ) <nl> + ERROR ( no_default_arg_subscript , none , <nl> + " default arguments are not allowed in subscripts " , ( ) ) <nl> + ERROR ( no_default_arg_curried , none , <nl> + " default arguments are not allowed in curried parameter lists " , ( ) ) <nl> ERROR ( var_not_allowed_in_pattern , none , <nl> " Use of ' var ' binding here is not allowed " , ( ) ) <nl> WARNING ( let_on_param_is_redundant , none , <nl> mmm a / lib / Parse / ParsePattern . cpp <nl> ppp b / lib / Parse / ParsePattern . cpp <nl> void Parser : : DefaultArgumentInfo : : setFunctionContext ( DeclContext * DC ) { <nl> static ParserStatus parseDefaultArgument ( Parser & P , <nl> Parser : : DefaultArgumentInfo * defaultArgs , <nl> unsigned argIndex , <nl> - ExprHandle * & init ) { <nl> + ExprHandle * & init , <nl> + Parser : : ParameterContextKind paramContext ) { <nl> SourceLoc equalLoc = P . consumeToken ( tok : : equal ) ; <nl> <nl> / / Enter a fresh default - argument context with a meaningless parent . <nl> static ParserStatus parseDefaultArgument ( Parser & P , <nl> defaultArgs - > ParsedContexts . push_back ( initDC ) ; <nl> } <nl> <nl> + Diag < > diagID = { DiagID ( ) } ; <nl> + switch ( paramContext ) { <nl> + case Parser : : ParameterContextKind : : Function : <nl> + case Parser : : ParameterContextKind : : Operator : <nl> + case Parser : : ParameterContextKind : : Initializer : <nl> + break ; <nl> + case Parser : : ParameterContextKind : : Closure : <nl> + diagID = diag : : no_default_arg_closure ; <nl> + break ; <nl> + case Parser : : ParameterContextKind : : Subscript : <nl> + diagID = diag : : no_default_arg_subscript ; <nl> + break ; <nl> + case Parser : : ParameterContextKind : : Curried : <nl> + diagID = diag : : no_default_arg_curried ; <nl> + break ; <nl> + } <nl> + <nl> + assert ( ( diagID . ID ! = DiagID ( ) ) = = ! defaultArgs & & <nl> + " Default arguments specified for an unexpected parameter list kind " ) ; <nl> + <nl> if ( ! defaultArgs ) { <nl> - auto inFlight = P . diagnose ( equalLoc , diag : : non_func_decl_pattern_init ) ; <nl> + auto inFlight = P . diagnose ( equalLoc , diagID ) ; <nl> if ( initR . isNonNull ( ) ) <nl> inFlight . fixItRemove ( SourceRange ( equalLoc , initR . get ( ) - > getEndLoc ( ) ) ) ; <nl> return ParserStatus ( ) ; <nl> Parser : : parseParameterClause ( SourceLoc & leftParenLoc , <nl> if ( Tok . is ( tok : : equal ) ) { <nl> param . EqualLoc = Tok . getLoc ( ) ; <nl> status | = parseDefaultArgument ( * this , defaultArgs , defaultArgIndex , <nl> - param . DefaultArg ) ; <nl> + param . DefaultArg , paramContext ) ; <nl> <nl> if ( param . EllipsisLoc . isValid ( ) ) { <nl> / / The range of the complete default argument . <nl> mapParsedParameters ( Parser & parser , <nl> } <nl> <nl> if ( param . DefaultArg ) { <nl> - if ( ! isFirstParameterClause ) { <nl> - / / Default arguments are only permitted on the first parameter clause . <nl> - parser . diagnose ( param . EqualLoc , diag : : non_func_decl_pattern_init ) <nl> - . fixItRemove ( SourceRange ( param . EqualLoc , <nl> - param . DefaultArg - > getExpr ( ) - > getEndLoc ( ) ) ) ; <nl> - } else { <nl> - result - > setDefaultArgumentKind ( getDefaultArgKind ( param . DefaultArg ) ) ; <nl> - result - > setDefaultValue ( param . DefaultArg ) ; <nl> - } <nl> + assert ( isFirstParameterClause & & <nl> + " Default arguments are only permitted on the first param clause " ) ; <nl> + result - > setDefaultArgumentKind ( getDefaultArgKind ( param . DefaultArg ) ) ; <nl> + result - > setDefaultValue ( param . DefaultArg ) ; <nl> } <nl> <nl> elements . push_back ( result ) ; <nl> mmm a / test / decl / subscript / subscripting . swift <nl> ppp b / test / decl / subscript / subscripting . swift <nl> protocol r23952125 { <nl> var c : Int / / expected - error { { property in protocol must have explicit { get } or { get set } specifier } } <nl> } <nl> <nl> - <nl> - <nl> + / / < rdar : / / problem / 16812341 > QoI : Poor error message when providing a default value for a subscript parameter <nl> + struct S4 { <nl> + subscript ( subs : Int = 0 ) - > Int { / / expected - error { { default arguments are not allowed in subscripts } } <nl> + get { <nl> + return 1 <nl> + } <nl> + } <nl> + } <nl> mmm a / test / expr / closure / default_args . swift <nl> ppp b / test / expr / closure / default_args . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> func simple_default_args ( ) { <nl> - let _ : ( Int ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { default argument is only permitted for a non - curried function parameter } } { { 36 - 39 = } } <nl> - let _ : ( ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } expected - error { { default argument is only permitted for a non - curried function parameter } } { { 33 - 36 = } } <nl> + / / < rdar : / / problem / 22753605 > QoI : bad diagnostic when closure has default argument <nl> + let _ : ( Int ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { default arguments are not allowed in closures } } { { 36 - 39 = } } <nl> + let _ : ( ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } expected - error { { default arguments are not allowed in closures } } { { 33 - 36 = } } <nl> let _ : ( ) - > Int = { ( x : Int ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } <nl> } <nl> <nl>
Fix < rdar : / / problem / 22753605 > QoI : bad diagnostic when closure has default argument
apple/swift
f0377bee52eed0c6c8007a2149bc32e1e6454098
2016-01-23T07:25:47Z
mmm a / hphp / hack / test / unit / naming / naming_table_tests . ml <nl> ppp b / hphp / hack / test / unit / naming / naming_table_tests . ml <nl> let test_context_changes_funs ( ) = <nl> ( Naming_provider . get_fun_canon_name ctx " \ \ BAR " ) <nl> " Old function in context should NOT be accessible by canon name " ) <nl> <nl> - let text_context_changes_classes ( ) = <nl> + let test_context_changes_classes ( ) = <nl> run_naming_table_test <nl> ( fun ~ ctx ~ unbacked_naming_table : _ ~ backed_naming_table : _ ~ db_name : _ - > <nl> let ( ctx , _entry ) = <nl> let text_context_changes_classes ( ) = <nl> ( Naming_provider . get_type_canon_name ctx " \ \ FOO " ) <nl> " Old class in context should NOT be accessible by canon name " ) <nl> <nl> - let text_context_changes_typedefs ( ) = <nl> + let test_context_changes_typedefs ( ) = <nl> run_naming_table_test <nl> ( fun ~ ctx ~ unbacked_naming_table : _ ~ backed_naming_table : _ ~ db_name : _ - > <nl> let ( ctx , _entry ) = <nl> let ( ) = <nl> ( " test_local_changes " , test_local_changes ) ; <nl> ( " test_context_changes_consts " , test_context_changes_consts ) ; <nl> ( " test_context_changes_funs " , test_context_changes_funs ) ; <nl> - ( " text_context_changes_classes " , text_context_changes_classes ) ; <nl> - ( " text_context_changes_typedefs " , text_context_changes_typedefs ) ; <nl> + ( " test_context_changes_classes " , test_context_changes_classes ) ; <nl> + ( " test_context_changes_typedefs " , test_context_changes_typedefs ) ; <nl> ] <nl>
Fix typo : ` text_ * ` - > ` test_ * `
facebook/hhvm
e59ec2fb32705b9a208cceda12a829ad067f2076
2020-03-19T23:38:05Z
mmm a / tensorflow / compiler / tests / tensor_array_ops_test . py <nl> ppp b / tensorflow / compiler / tests / tensor_array_ops_test . py <nl> def testTensorArrayWriteRead ( self ) : <nl> r0 = w2 . read ( 0 ) <nl> r1 = w2 . read ( 1 ) <nl> r2 = w2 . read ( 2 ) <nl> + flow = w2 . flow <nl> <nl> - d0 , d1 , d2 = session . run ( [ r0 , r1 , r2 ] ) <nl> + d0 , d1 , d2 , flow_val = session . run ( [ r0 , r1 , r2 , flow ] ) <nl> self . assertAllEqual ( [ [ 4 . 0 , 5 . 0 ] ] , d0 ) <nl> self . assertAllEqual ( [ [ 1 . 0 , 3 . 0 ] ] , d1 ) <nl> self . assertAllEqual ( [ [ 7 . 0 , - 8 . 5 ] ] , d2 ) <nl> + self . assertAllEqual ( [ ] , flow_val . shape ) <nl> <nl> def _testTensorArrayWritePack ( self , tf_dtype ) : <nl> with self . test_session ( ) , self . test_scope ( ) : <nl> mmm a / tensorflow / compiler / tf2xla / kernels / tensor_array_ops . cc <nl> ppp b / tensorflow / compiler / tf2xla / kernels / tensor_array_ops . cc <nl> class TensorArrayOp : public XlaOpKernel { <nl> dtype_ , value , & var ) ) ; <nl> var - > tensor_array_size = size ; <nl> ctx - > SetResourceOutput ( 0 , var ) ; <nl> - ctx - > SetConstantOutput ( 1 , Tensor ( DT_FLOAT ) ) ; <nl> + <nl> + Tensor flow ( DT_FLOAT , TensorShape ( { } ) ) ; <nl> + flow . scalar < float > ( ) ( ) = 0 . 0f ; <nl> + ctx - > SetConstantOutput ( 1 , flow ) ; <nl> } <nl> <nl> private : <nl> class TensorArrayWriteOp : public XlaOpKernel { <nl> xla : : ComputationDataHandle ta = resource - > value ; <nl> xla : : ComputationDataHandle index = ctx - > Input ( 1 ) ; <nl> xla : : ComputationDataHandle value = ctx - > Input ( 2 ) ; <nl> + xla : : ComputationDataHandle flow = ctx - > Input ( 3 ) ; <nl> <nl> / / start_indices of the DynamicUpdateSlice are [ index , 0 , 0 , . . . , 0 ] . <nl> auto start_indices = XlaHelpers : : PadWithZeros ( b , index , elem_shape . dims ( ) ) ; <nl> class TensorArrayWriteOp : public XlaOpKernel { <nl> DynamicAddSlice ( b , ta , update , slice_shape . dim_sizes ( ) , start_indices ) ; <nl> <nl> resource - > value = written ; <nl> - ctx - > SetConstantOutput ( 0 , Tensor ( DT_FLOAT ) ) ; <nl> + ctx - > SetOutput ( 0 , flow ) ; <nl> } <nl> <nl> private : <nl> class TensorArrayScatterOp : public XlaOpKernel { <nl> <nl> xla : : ComputationDataHandle ta = resource - > value ; <nl> const xla : : ComputationDataHandle value = ctx - > Input ( 2 ) ; <nl> + const xla : : ComputationDataHandle flow = ctx - > Input ( 3 ) ; <nl> <nl> auto slice_dims = value_shape . dim_sizes ( ) ; <nl> slice_dims [ 0 ] = 1LL ; <nl> class TensorArrayScatterOp : public XlaOpKernel { <nl> } <nl> <nl> resource - > value = ta ; <nl> - ctx - > SetConstantOutput ( 0 , Tensor ( DT_FLOAT ) ) ; <nl> + ctx - > SetOutput ( 0 , flow ) ; <nl> } <nl> <nl> private : <nl> class TensorArraySplitOp : public XlaOpKernel { <nl> lengths . size ( ) , " vs . " , resource - > tensor_array_size , " ) " ) ) ; <nl> <nl> const xla : : ComputationDataHandle value = ctx - > Input ( 1 ) ; <nl> + const xla : : ComputationDataHandle flow = ctx - > Input ( 3 ) ; <nl> <nl> OP_REQUIRES ( ctx , value_shape . num_elements ( ) = = ta_shape . num_elements ( ) , <nl> errors : : InvalidArgument ( " mismatched element count " , <nl> class TensorArraySplitOp : public XlaOpKernel { <nl> <nl> resource - > value = b - > Add ( ta , b - > Reshape ( value , ta_shape . dim_sizes ( ) ) ) ; <nl> <nl> - ctx - > SetConstantOutput ( 0 , Tensor ( DT_FLOAT ) ) ; <nl> + ctx - > SetOutput ( 0 , flow ) ; <nl> } <nl> <nl> private : <nl>
[ TF : XLA ] Make the shape of a TensorArray flow value a scalar .
tensorflow/tensorflow
78cec04df0f714741f930ff3f234268102b71065
2017-07-25T16:52:05Z
mmm a / scripts / fetchDependency . ps1 <nl> ppp b / scripts / fetchDependency . ps1 <nl> function SelectProgram ( [ Parameter ( Mandatory = $ true ) ] [ string ] $ Dependency ) <nl> <nl> if ( $ Dependency - eq " cmake " ) <nl> { <nl> - $ requiredVersion = " 3 . 9 . 0 " <nl> - $ downloadVersion = " 3 . 9 . 0 " <nl> - $ url = " https : / / cmake . org / files / v3 . 9 / cmake - 3 . 9 . 0 - win32 - x86 . zip " <nl> - $ downloadPath = " $ downloadsDir \ cmake - 3 . 9 . 0 - win32 - x86 . zip " <nl> - $ expectedDownloadedFileHash = " 9d593839f64b94718a1b75b8519b56ecb959e4d37d406bf2a087e2c1f7a6b89c " <nl> - $ executableFromDownload = " $ downloadsDir \ cmake - 3 . 9 . 0 - win32 - x86 \ bin \ cmake . exe " <nl> + $ requiredVersion = " 3 . 9 . 1 " <nl> + $ downloadVersion = " 3 . 9 . 1 " <nl> + $ url = " https : / / cmake . org / files / v3 . 9 / cmake - 3 . 9 . 1 - win32 - x86 . zip " <nl> + $ downloadPath = " $ downloadsDir \ cmake - 3 . 9 . 1 - win32 - x86 . zip " <nl> + $ expectedDownloadedFileHash = " e0d9501bd34e3100e925dcb2e07f5f0ce8980bdbe5fce0ae950b21368d54c1a1 " <nl> + $ executableFromDownload = " $ downloadsDir \ cmake - 3 . 9 . 1 - win32 - x86 \ bin \ cmake . exe " <nl> $ extractionType = $ ExtractionType_ZIP <nl> $ extractionFolder = $ downloadsDir <nl> } <nl>
update make to 3 . 9 . 1
microsoft/vcpkg
8b09cb2efed9323133e54429a84be39c8c6c850f
2017-08-11T16:02:36Z
mmm a / configure . ac <nl> ppp b / configure . ac <nl> AC_CONFIG_FILES ( [ <nl> conf / Makefile <nl> ] ) <nl> AC_OUTPUT <nl> + <nl> + AC_MSG_NOTICE ( [ = = = Configuration results = = = ] ) <nl> + AC_MSG_NOTICE ( [ Version : $ PACKAGE_VERSION ] ) <nl> + AC_MSG_NOTICE ( [ c + + compiler : $ CXX ] ) <nl> + AC_MSG_NOTICE ( [ Warning CXXFLAGS : $ WARNING_CXXFLAGS ] ) <nl> + AC_MSG_NOTICE ( [ Picky CXXFLAGS : $ PICKY_CXXFLAGS ] ) <nl> + AC_MSG_NOTICE ( [ Harden CFLAGS : $ HARDEN_CFLAGS ] ) <nl> + AC_MSG_NOTICE ( [ = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ] ) <nl>
Display results at end of configure .
mobile-shell/mosh
3e3510d382466d01b4ee34a3d5cc437e631565bd
2012-10-29T19:03:13Z
deleted file mode 100644 <nl> index 630ae424ee9e . . 000000000000 <nl> mmm a / npm / path . txt <nl> ppp / dev / null <nl> @ @ - 1 + 0 , 0 @ @ <nl> - / Users / rnbwd / Projects / forks / electron - prebuilt / dist / Electron . app / Contents / MacOS / Electron <nl> \ No newline at end of file <nl>
update
electron/electron
8fdb5b42d8939ca960f75651c57e095aaad2e67d
2015-05-07T00:52:51Z