diff
stringlengths 41
2.03M
| msg
stringlengths 1
1.5k
⌀ | repo
stringlengths 5
40
| sha
stringlengths 40
40
| time
stringlengths 20
20
|
---|---|---|---|---|
mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> IF ( APPLE ) <nl> set ( CMAKE_CXX_FLAGS " $ { CMAKE_CXX_FLAGS } - stdlib = libc + + " ) <nl> set ( CMAKE_EXE_LINKER_FLAGS " $ { CMAKE_EXE_LINKER_FLAGS } - stdlib = libc + + " ) <nl> ELSEIF ( WIN32 ) <nl> - # WSAPoll and other fileutil . cpp functionalities didn ' t exist before WinNT 6 . x ( Vista and up ) <nl> + # WSAPoll and SHGetKnownFolderPath ( AppData / Roaming ) didn ' t exist before WinNT 6 . x ( Vista ) <nl> add_definitions ( - D_WIN32_WINNT = 0x0600 - DWINVER = 0x0600 ) <nl> set ( PLATFORM_LIBRARIES winmm ws2_32 ) <nl> - if ( MINGW ) <nl> + IF ( MINGW ) <nl> # PSAPI is the Process Status API <nl> - set ( PLATFORM_LIBRARIES winmm ws2_32 psapi imm32 version ) <nl> + set ( PLATFORM_LIBRARIES $ { PLATFORM_LIBRARIES } psapi imm32 version ) <nl> ENDIF ( MINGW ) <nl> ELSEIF ( CMAKE_SYSTEM_NAME MATCHES " ^ ( Linux | kFreeBSD | GNU | SunOS ) $ " ) <nl> set ( PLATFORM_LIBRARIES rt ) <nl> | Appending PLATFORM_LIBRARIES instead of redefining them | yuzu-emu/yuzu | 35e979f041aed8a5fc1da3dde254ce11e6138562 | 2016-11-30T15:28:21Z |
mmm a / generic / THTensorMath . c <nl> ppp b / generic / THTensorMath . c <nl> void THTensor_ ( mean ) ( THTensor * r_ , THTensor * t , int dimension , int keepdim ) <nl> THTensor_ ( div ) ( r_ , r_ , t - > size [ dimension ] ) ; <nl> } <nl> <nl> - void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keepdim ) <nl> + void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int biased , int keepdim ) <nl> { <nl> THLongStorage * dim ; <nl> <nl> void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keep <nl> sum2 + = z * z ; <nl> } <nl> <nl> - if ( flag ) <nl> + if ( biased ) <nl> { <nl> sum / = t_size ; <nl> sum2 / = t_size ; <nl> void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keep <nl> } <nl> } <nl> <nl> - void THTensor_ ( var ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keepdim ) <nl> + void THTensor_ ( var ) ( THTensor * r_ , THTensor * t , int dimension , int biased , int keepdim ) <nl> { <nl> THLongStorage * dim ; <nl> <nl> void THTensor_ ( var ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keep <nl> sum2 + = z * z ; <nl> } <nl> <nl> - if ( flag ) <nl> + if ( biased ) <nl> { <nl> sum / = t_size ; <nl> sum2 / = t_size ; <nl> accreal THTensor_ ( meanall ) ( THTensor * tensor ) <nl> return THTensor_ ( sumall ) ( tensor ) / THTensor_ ( nElement ) ( tensor ) ; <nl> } <nl> <nl> - accreal THTensor_ ( varall ) ( THTensor * tensor ) <nl> + accreal THTensor_ ( varall ) ( THTensor * tensor , int biased ) <nl> { <nl> accreal mean = THTensor_ ( meanall ) ( tensor ) ; <nl> accreal sum = 0 ; <nl> TH_TENSOR_APPLY ( real , tensor , sum + = ( * tensor_data - mean ) * ( * tensor_data - mean ) ; ) ; <nl> - sum / = ( THTensor_ ( nElement ) ( tensor ) - 1 ) ; <nl> + sum / = THTensor_ ( nElement ) ( tensor ) - ( biased ? 0 : 1 ) ; <nl> return sum ; <nl> } <nl> <nl> - accreal THTensor_ ( stdall ) ( THTensor * tensor ) <nl> + accreal THTensor_ ( stdall ) ( THTensor * tensor , int biased ) <nl> { <nl> - return sqrt ( THTensor_ ( varall ) ( tensor ) ) ; <nl> + return sqrt ( THTensor_ ( varall ) ( tensor , biased ) ) ; <nl> } <nl> <nl> void THTensor_ ( linspace ) ( THTensor * r_ , real a , real b , long n ) <nl> mmm a / generic / THTensorMath . h <nl> ppp b / generic / THTensorMath . h <nl> TH_API void THTensor_ ( frac ) ( THTensor * r_ , THTensor * t ) ; <nl> TH_API void THTensor_ ( lerp ) ( THTensor * r_ , THTensor * a , THTensor * b , real weight ) ; <nl> <nl> TH_API void THTensor_ ( mean ) ( THTensor * r_ , THTensor * t , int dimension , int keepdim ) ; <nl> - TH_API void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keepdim ) ; <nl> - TH_API void THTensor_ ( var ) ( THTensor * r_ , THTensor * t , int dimension , int flag , int keepdim ) ; <nl> + TH_API void THTensor_ ( std ) ( THTensor * r_ , THTensor * t , int dimension , int biased , int keepdim ) ; <nl> + TH_API void THTensor_ ( var ) ( THTensor * r_ , THTensor * t , int dimension , int biased , int keepdim ) ; <nl> TH_API void THTensor_ ( norm ) ( THTensor * r_ , THTensor * t , real value , int dimension , int keepdim ) ; <nl> TH_API void THTensor_ ( renorm ) ( THTensor * r_ , THTensor * t , real value , int dimension , real maxnorm ) ; <nl> TH_API accreal THTensor_ ( dist ) ( THTensor * a , THTensor * b , real value ) ; <nl> TH_API void THTensor_ ( histc ) ( THTensor * hist , THTensor * tensor , long nbins , real <nl> TH_API void THTensor_ ( bhistc ) ( THTensor * hist , THTensor * tensor , long nbins , real minvalue , real maxvalue ) ; <nl> <nl> TH_API accreal THTensor_ ( meanall ) ( THTensor * self ) ; <nl> - TH_API accreal THTensor_ ( varall ) ( THTensor * self ) ; <nl> - TH_API accreal THTensor_ ( stdall ) ( THTensor * self ) ; <nl> + TH_API accreal THTensor_ ( varall ) ( THTensor * self , int biased ) ; <nl> + TH_API accreal THTensor_ ( stdall ) ( THTensor * self , int biased ) ; <nl> TH_API accreal THTensor_ ( normall ) ( THTensor * t , real value ) ; <nl> <nl> TH_API void THTensor_ ( linspace ) ( THTensor * r_ , real a , real b , long n ) ; <nl> | Wrap unbiased flag in var , std , varall , stdall | pytorch/pytorch | f805a8388be8dc55af0e3aa165b13cd0fce484d3 | 2017-07-14T21:25:25Z |
mmm a / folly / Range . h <nl> ppp b / folly / Range . h <nl> template < class T > class Range ; <nl> * as Boyer - Moore . On the upside , it does not do any upfront <nl> * preprocessing and does not allocate memory . <nl> * / <nl> - template < class T > <nl> + template < class T , class Comp = std : : equal_to < typename Range < T > : : value_type > > <nl> inline size_t qfind ( const Range < T > & haystack , <nl> - const Range < T > & needle ) ; <nl> + const Range < T > & needle , <nl> + Comp eq = Comp ( ) ) ; <nl> <nl> / * * <nl> * Finds the first occurrence of needle in haystack . The result is the <nl> struct StringPieceHash { <nl> / * * <nl> * Finds substrings faster than brute force by borrowing from Boyer - Moore <nl> * / <nl> - template < class T , class Comp > <nl> + template < class T , class Comp = std : : equal_to < typename Range < T > : : value_type > > <nl> size_t qfind ( const Range < T > & haystack , <nl> const Range < T > & needle , <nl> - Comp eq ) { <nl> + Comp eq = Comp ( ) ) { <nl> / / Don ' t use std : : search , use a Boyer - Moore - like trick by comparing <nl> / / the last characters first <nl> auto const nsize = needle . size ( ) ; <nl> struct AsciiCaseInsensitive { <nl> extern const AsciiCaseSensitive asciiCaseSensitive ; <nl> extern const AsciiCaseInsensitive asciiCaseInsensitive ; <nl> <nl> - template < class T > <nl> - size_t qfind ( const Range < T > & haystack , <nl> - const Range < T > & needle ) { <nl> - return qfind ( haystack , needle , asciiCaseSensitive ) ; <nl> - } <nl> - <nl> template < class T > <nl> size_t qfind ( const Range < T > & haystack , <nl> const typename Range < T > : : value_type & needle ) { <nl> mmm a / folly / test / RangeTest . cpp <nl> ppp b / folly / test / RangeTest . cpp <nl> TEST ( StringPiece , InvalidRange ) { <nl> EXPECT_THROW ( a . subpiece ( 6 ) , std : : out_of_range ) ; <nl> } <nl> <nl> + TEST ( qfind , UInt32_Ranges ) { <nl> + vector < uint32_t > a ( { 1 , 2 , 3 , 260 , 5 } ) ; <nl> + vector < uint32_t > b ( { 2 , 3 , 4 } ) ; <nl> + <nl> + auto a_range = folly : : Range < const uint32_t * > ( & a [ 0 ] , a . size ( ) ) ; <nl> + auto b_range = folly : : Range < const uint32_t * > ( & b [ 0 ] , b . size ( ) ) ; <nl> + <nl> + EXPECT_EQ ( qfind ( a_range , b_range ) , string : : npos ) ; <nl> + <nl> + a [ 3 ] = 4 ; <nl> + EXPECT_EQ ( qfind ( a_range , b_range ) , 1 ) ; <nl> + } <nl> + <nl> template < typename NeedleFinder > <nl> class NeedleFinderTest : public : : testing : : Test { <nl> public : <nl> | Use std : : equal < > as qfind default comparator | facebook/folly | 9ada53a918e2e29c124f1571a609cf3ca98e377d | 2013-08-28T21:30:11Z |
mmm a / src / app / commands / cmd_new_frame . cpp <nl> ppp b / src / app / commands / cmd_new_frame . cpp <nl> <nl> # include " app / ui / editor / editor . h " <nl> # include " app / ui / main_window . h " <nl> # include " app / ui / status_bar . h " <nl> + # include " app / ui / timeline . h " <nl> # include " app / ui_context . h " <nl> # include " doc / cel . h " <nl> # include " doc / image . h " <nl> void NewFrameCommand : : onExecute ( Context * context ) <nl> case Content : : NEW_EMPTY_FRAME : <nl> api . addEmptyFrame ( sprite , writer . frame ( ) + 1 ) ; <nl> break ; <nl> - case Content : : DUPLICATE_CEL : <nl> - api . copyCel ( <nl> - static_cast < LayerImage * > ( writer . layer ( ) ) , writer . frame ( ) , <nl> - static_cast < LayerImage * > ( writer . layer ( ) ) , writer . frame ( ) + 1 ) ; <nl> - <nl> - / / TODO should we use DocumentObserver ? <nl> - if ( UIContext : : instance ( ) = = context ) { <nl> - if ( DocumentView * view = UIContext : : instance ( ) - > activeView ( ) ) <nl> - view - > getEditor ( ) - > setFrame ( writer . frame ( ) + 1 ) ; <nl> + case Content : : DUPLICATE_CEL : { <nl> + / / TODO the range of selected frames should be in doc : : Site . <nl> + Timeline * timeline = App : : instance ( ) - > getMainWindow ( ) - > getTimeline ( ) ; <nl> + Timeline : : Range range = timeline - > range ( ) ; <nl> + if ( range . enabled ( ) ) { <nl> + timeline - > prepareToMoveRange ( ) ; <nl> + <nl> + for ( LayerIndex layer = range . layerBegin ( ) ; layer < = range . layerEnd ( ) ; + + layer ) { <nl> + Layer * layerPtr = writer . sprite ( ) - > indexToLayer ( layer ) ; <nl> + if ( layerPtr - > isImage ( ) ) { <nl> + for ( frame_t frame = range . frameEnd ( ) ; frame > = range . frameBegin ( ) ; - - frame ) { <nl> + api . copyCel ( <nl> + static_cast < LayerImage * > ( layerPtr ) , frame , <nl> + static_cast < LayerImage * > ( layerPtr ) , frame + range . frames ( ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + range . displace ( 0 , range . frames ( ) ) ; <nl> + timeline - > moveRange ( range ) ; <nl> + } <nl> + else { <nl> + api . copyCel ( <nl> + static_cast < LayerImage * > ( writer . layer ( ) ) , writer . frame ( ) , <nl> + static_cast < LayerImage * > ( writer . layer ( ) ) , writer . frame ( ) + 1 ) ; <nl> + <nl> + / / TODO should we use DocumentObserver ? <nl> + if ( UIContext : : instance ( ) = = context ) { <nl> + if ( DocumentView * view = UIContext : : instance ( ) - > activeView ( ) ) <nl> + view - > getEditor ( ) - > setFrame ( writer . frame ( ) + 1 ) ; <nl> + } <nl> } <nl> break ; <nl> + } <nl> } <nl> <nl> transaction . commit ( ) ; <nl> mmm a / src / app / ui / timeline . cpp <nl> ppp b / src / app / ui / timeline . cpp <nl> void Timeline : : setFrame ( frame_t frame ) <nl> m_editor - > setFrame ( m_frame ) ; <nl> } <nl> <nl> + void Timeline : : prepareToMoveRange ( ) <nl> + { <nl> + ASSERT ( m_range . enabled ( ) ) ; <nl> + <nl> + m_moveRangeData . activeRelativeLayer = getLayerIndex ( m_layer ) - m_range . layerBegin ( ) ; <nl> + m_moveRangeData . activeRelativeFrame = m_frame - m_range . frameBegin ( ) ; <nl> + } <nl> + <nl> + void Timeline : : moveRange ( Range & range ) <nl> + { <nl> + regenerateLayers ( ) ; <nl> + <nl> + if ( range . layerBegin ( ) > = LayerIndex ( 0 ) & & <nl> + range . layerBegin ( ) + m_moveRangeData . activeRelativeLayer < int ( m_layers . size ( ) ) ) { <nl> + setLayer ( m_layers [ range . layerBegin ( ) + m_moveRangeData . activeRelativeLayer ] ) ; <nl> + } <nl> + <nl> + if ( range . frameBegin ( ) > = frame_t ( 0 ) ) <nl> + setFrame ( range . frameBegin ( ) + m_moveRangeData . activeRelativeFrame ) ; <nl> + <nl> + m_range = range ; <nl> + } <nl> + <nl> void Timeline : : activateClipboardRange ( ) <nl> { <nl> m_clipboard_timer . start ( ) ; <nl> void Timeline : : dropRange ( DropOp op ) <nl> break ; <nl> } <nl> <nl> - int activeRelativeLayer = getLayerIndex ( m_layer ) - m_range . layerBegin ( ) ; <nl> - frame_t activeRelativeFrame = m_frame - m_range . frameBegin ( ) ; <nl> + prepareToMoveRange ( ) ; <nl> <nl> try { <nl> if ( copy ) <nl> void Timeline : : dropRange ( DropOp op ) <nl> else <nl> newFromRange = move_range ( m_document , m_range , m_dropRange , place ) ; <nl> <nl> - regenerateLayers ( ) ; <nl> + / / If we drop a cel in the same frame ( but in another layer ) , <nl> + / / document views are not updated , so we are forcing the updating of <nl> + / / all views . <nl> + m_document - > notifyGeneralUpdate ( ) ; <nl> <nl> - m_range = newFromRange ; <nl> - if ( m_range . layerBegin ( ) > = LayerIndex ( 0 ) ) <nl> - setLayer ( m_layers [ m_range . layerBegin ( ) + activeRelativeLayer ] ) ; <nl> - if ( m_range . frameBegin ( ) > = frame_t ( 0 ) ) <nl> - setFrame ( m_range . frameBegin ( ) + activeRelativeFrame ) ; <nl> + moveRange ( newFromRange ) ; <nl> } <nl> catch ( const std : : exception & e ) { <nl> ui : : Alert : : show ( " Problem < < % s | | & OK " , e . what ( ) ) ; <nl> } <nl> - <nl> - / / If we drop a cel in the same frame ( but in another layer ) , <nl> - / / document views are not updated , so we are forcing the updating of <nl> - / / all views . <nl> - m_document - > notifyGeneralUpdate ( ) ; <nl> - <nl> - invalidate ( ) ; <nl> } <nl> <nl> void Timeline : : updateDropRange ( const gfx : : Point & pt ) <nl> mmm a / src / app / ui / timeline . h <nl> ppp b / src / app / ui / timeline . h <nl> namespace app { <nl> <nl> Range range ( ) const { return m_range ; } <nl> <nl> + void prepareToMoveRange ( ) ; <nl> + void moveRange ( Range & range ) ; <nl> + <nl> void activateClipboardRange ( ) ; <nl> <nl> / / Drag - and - drop operations . These actions are used by commands <nl> namespace app { <nl> bool m_copy ; / / True if the drag - and - drop operation is a copy . <nl> <nl> AniControls m_aniControls ; <nl> + <nl> + / / Temporal data used to move the range . <nl> + struct MoveRange { <nl> + int activeRelativeLayer ; <nl> + frame_t activeRelativeFrame ; <nl> + } m_moveRangeData ; <nl> } ; <nl> <nl> } / / namespace app <nl> | Add support to copy cel in next frame for several layers ( fix ) | aseprite/aseprite | 721c8a0a4a34b9b1dc3c01420060d6b30dfc2ca3 | 2015-08-05T12:40:11Z |
mmm a / test / mjsunit / mjsunit . status <nl> ppp b / test / mjsunit / mjsunit . status <nl> <nl> ' bitops - info ' : [ PASS , NO_VARIANTS ] , # fails on ARM hardware . <nl> ' md5 ' : [ PASS , NO_VARIANTS ] , # fails on ARM hardware . <nl> ' debug - break - inline ' : [ PASS , NO_VARIANTS ] , # very flaky . <nl> - ' debug - evaluate - closure ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - evaluate - locals - optimized ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - evaluate - locals - optimized - double ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - evaluate - recursive ' : [ PASS , NO_VARIANTS ] , # only in no - snap debug . <nl> - ' debug - setbreakpoint ' : [ PASS , NO_VARIANTS ] , # only in no - snap debug . <nl> ' debug - step ' : [ PASS , NO_VARIANTS ] , # windows only . <nl> ' debug - step - 2 ' : [ PASS , NO_VARIANTS ] , # flaky in no - snap mode . <nl> ' debug - step - 3 ' : [ PASS , NO_VARIANTS ] , # flaky in no - snap mode . <nl> - ' debug - stepframe - clearing ' : [ PASS , NO_VARIANTS ] , # only in no - snap debug . <nl> - ' debug - stepin - call - function - stub ' : [ PASS , NO_VARIANTS ] , # only in no - snap debug . <nl> - ' regress / regress - 3717 ' : [ PASS , NO_VARIANTS ] , # only in no - snap mode . <nl> ' regress / regress - 2451 ' : [ PASS , NO_VARIANTS ] , # with custom snapshot and gc - stress . <nl> ' debug - multiple - breakpoints ' : [ PASS , NO_VARIANTS ] , # with custom snapshot and gc - stress . <nl> - ' debug - listbreakpoints ' : [ PASS , NO_VARIANTS ] , # arm64 nosnap with turbofan <nl> - ' debug - enable - disable - breakpoints ' : [ PASS , NO_VARIANTS ] , # arm64 nosnap with turbofan . <nl> <nl> # TODO ( jarin / mstarzinger ) : Investigate debugger issues with TurboFan . <nl> + ' debug - evaluate - closure ' : [ PASS , NO_VARIANTS ] , <nl> ' debug - evaluate - locals ' : [ PASS , NO_VARIANTS ] , <nl> ' debug - evaluate - locals - capturing ' : [ PASS , NO_VARIANTS ] , <nl> + ' debug - evaluate - locals - optimized ' : [ PASS , NO_VARIANTS ] , <nl> + ' debug - evaluate - locals - optimized - double ' : [ PASS , NO_VARIANTS ] , <nl> + ' debug - evaluate - recursive ' : [ PASS , NO_VARIANTS ] , # only in no - snap debug . <nl> ' debug - liveedit - check - stack ' : [ PASS , NO_VARIANTS ] , # only in no - snap mode . <nl> ' debug - liveedit - double - call ' : [ PASS , NO_VARIANTS ] , <nl> ' debug - set - variable - value ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - stepout - scope - part1 ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - stepout - scope - part2 ' : [ PASS , NO_VARIANTS ] , <nl> - ' debug - stepout - scope - part3 ' : [ PASS , NO_VARIANTS ] , <nl> ' es6 / debug - evaluate - blockscopes ' : [ PASS , NO_VARIANTS ] , <nl> <nl> # TODO ( titzer ) : - - always - opt incorrectly disables CrankShaft soft deopt points <nl> | [ turbofan ] Re - enable debugger tests that no longer fail . | v8/v8 | 3660505e5f1442f45f02da37b284aac3844a0d8e | 2016-07-08T12:49:53Z |
mmm a / html5 / runtime / task - center . js <nl> ppp b / html5 / runtime / task - center . js <nl> export function init ( ) { <nl> const method = DOM_METHODS [ name ] <nl> proto [ name ] = method ? <nl> ( id , args ) = > method ( id , . . . args ) : <nl> - ( id , args ) = > fallback ( id , [ { module : ' dom ' , method : name , args } ] ) <nl> + ( id , args ) = > fallback ( id , [ { module : ' dom ' , method : name , args } ] , ' - 1 ' ) <nl> } <nl> <nl> proto . componentHandler = global . callNativeComponent | | <nl> mmm a / html5 / test / unit / vdom / listener . js <nl> ppp b / html5 / test / unit / vdom / listener . js <nl> describe ( ' dom listener details ' , ( ) = > { <nl> expect ( spy . args [ 0 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' createBody ' , <nl> args : [ { type : ' r ' , ref : ' _root ' , attr : { a : 1 } , style : { b : 2 } } ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> done ( ) <nl> } ) <nl> <nl> describe ( ' dom listener details ' , ( ) = > { <nl> expect ( spy . args [ 0 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' createBody ' , <nl> args : [ { type : ' r ' , ref : ' _root ' , attr : { a : 1 } , style : { b : 2 } } ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> done ( ) <nl> } ) <nl> <nl> describe ( ' dom listener details ' , ( ) = > { <nl> expect ( spy . args [ 0 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' createBody ' , <nl> args : [ body . toJSON ( ) ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> <nl> const el = doc . createElement ( ' a ' ) <nl> el . setAttr ( ' x ' , 1 ) <nl> describe ( ' dom listener details ' , ( ) = > { <nl> expect ( spy . args [ 1 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' addElement ' , <nl> args : [ ' _root ' , el . toJSON ( ) , - 1 ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> <nl> const el2 = doc . createElement ( ' b ' ) <nl> doc . body . insertBefore ( el2 , el ) / / [ el2 , el ] <nl> describe ( ' dom listener details ' , ( ) = > { <nl> expect ( spy . args [ 2 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' addElement ' , <nl> args : [ ' _root ' , el2 . toJSON ( ) , 0 ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> expect ( spy . args [ 3 ] ) . eql ( [ [ { <nl> module : ' dom ' , method : ' addElement ' , <nl> args : [ ' _root ' , el3 . toJSON ( ) , 2 ] <nl> - } ] ] ) <nl> + } ] , ' - 1 ' ] ) <nl> <nl> done ( ) <nl> } ) <nl> | * [ jsfm ] always return ' - 1 ' in original callNative | apache/incubator-weex | 24fbee6fdeacf5cd2834618434d866db4c55fe0b | 2016-12-30T09:06:29Z |
mmm a / tensorflow / core / common_runtime / optimization_registry . cc <nl> ppp b / tensorflow / core / common_runtime / optimization_registry . cc <nl> void OptimizationPassRegistry : : Register ( <nl> <nl> Status OptimizationPassRegistry : : RunGrouping ( <nl> Grouping grouping , const GraphOptimizationPassOptions & options ) { <nl> + LOG ( INFO ) <nl> + < < " Running all optimization passes in grouping " < < grouping <nl> + < < " . If you see this a lot , you might be extending the graph too many " <nl> + " times ( which means you modify the graph many times before " <nl> + " execution ) . Try reducing graph modifications or using SavedModel to " <nl> + " avoid any graph modification " ; <nl> auto group = groups_ . find ( grouping ) ; <nl> if ( group ! = groups_ . end ( ) ) { <nl> for ( auto & phase : group - > second ) { <nl> | Add a log message to inform user when graph optimization passes are being executed too many times and possible solution . | tensorflow/tensorflow | 4eada543abd9b7d681d8263215259b2db9f9ebbe | 2019-05-08T22:00:16Z |
mmm a / src / ic / accessor - assembler . cc <nl> ppp b / src / ic / accessor - assembler . cc <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> / / for a handler in the stub cache . <nl> TNode < DescriptorArray > descriptors = LoadMapDescriptors ( receiver_map ) ; <nl> <nl> - Label if_descriptor_found ( this ) , stub_cache ( this ) ; <nl> + Label if_descriptor_found ( this ) , try_stub_cache ( this ) ; <nl> TVARIABLE ( IntPtrT , var_name_index ) ; <nl> - Label * notfound = <nl> - use_stub_cache = = kUseStubCache ? & stub_cache : & lookup_prototype_chain ; <nl> + Label * notfound = use_stub_cache = = kUseStubCache ? & try_stub_cache <nl> + : & lookup_prototype_chain ; <nl> DescriptorLookup ( p - > name , descriptors , bitfield3 , & if_descriptor_found , <nl> & var_name_index , notfound ) ; <nl> <nl> void AccessorAssembler : : GenericPropertyLoad ( Node * receiver , Node * receiver_map , <nl> } <nl> <nl> if ( use_stub_cache = = kUseStubCache ) { <nl> + Label stub_cache ( this ) ; <nl> + BIND ( & try_stub_cache ) ; <nl> + / / When there is no feedback vector don ' t use stub cache . <nl> + GotoIfNot ( IsUndefined ( p - > vector ) , & stub_cache ) ; <nl> + / / Fall back to the slow path for private symbols . <nl> + Branch ( IsPrivateSymbol ( p - > name ) , slow , & lookup_prototype_chain ) ; <nl> + <nl> BIND ( & stub_cache ) ; <nl> Comment ( " stub cache probe for fast property load " ) ; <nl> TVARIABLE ( MaybeObject , var_handler ) ; <nl> void AccessorAssembler : : LoadIC_BytecodeHandler ( const LoadICParameters * p , <nl> / / changes in control flow and logic . We currently have no way of ensuring <nl> / / that no frame is constructed , so it ' s easy to break this optimization by <nl> / / accident . <nl> - Label stub_call ( this , Label : : kDeferred ) , miss ( this , Label : : kDeferred ) ; <nl> + Label stub_call ( this , Label : : kDeferred ) , miss ( this , Label : : kDeferred ) , <nl> + no_feedback ( this , Label : : kDeferred ) ; <nl> <nl> - GotoIf ( IsUndefined ( p - > vector ) , & miss ) ; <nl> + Node * recv_map = LoadReceiverMap ( p - > receiver ) ; <nl> + GotoIf ( IsDeprecatedMap ( recv_map ) , & miss ) ; <nl> + <nl> + GotoIf ( IsUndefined ( p - > vector ) , & no_feedback ) ; <nl> <nl> / / Inlined fast path . <nl> { <nl> Comment ( " LoadIC_BytecodeHandler_fast " ) ; <nl> <nl> - Node * recv_map = LoadReceiverMap ( p - > receiver ) ; <nl> - GotoIf ( IsDeprecatedMap ( recv_map ) , & miss ) ; <nl> - <nl> TVARIABLE ( MaybeObject , var_handler ) ; <nl> Label try_polymorphic ( this ) , if_handler ( this , & var_handler ) ; <nl> <nl> void AccessorAssembler : : LoadIC_BytecodeHandler ( const LoadICParameters * p , <nl> p - > receiver , p - > name , p - > slot , p - > vector ) ; <nl> } <nl> <nl> + BIND ( & no_feedback ) ; <nl> + { <nl> + Comment ( " LoadIC_BytecodeHandler_nofeedback " ) ; <nl> + / / Call into the stub that implements the non - inlined parts of LoadIC . <nl> + exit_point - > ReturnCallStub ( <nl> + Builtins : : CallableFor ( isolate ( ) , Builtins : : kLoadIC_Uninitialized ) , <nl> + p - > context , p - > receiver , p - > name , p - > slot , p - > vector ) ; <nl> + } <nl> + <nl> BIND ( & miss ) ; <nl> { <nl> Comment ( " LoadIC_BytecodeHandler_miss " ) ; <nl> void AccessorAssembler : : LoadIC_Noninlined ( const LoadICParameters * p , <nl> } <nl> } <nl> <nl> + / / TODO ( 8860 ) : This check is only required so we can make prototypes fast on <nl> + / / the first load . This is not really useful when there is no feedback vector <nl> + / / and may not be important when lazily allocating feedback vectors . Once lazy <nl> + / / allocation of feedback vectors has landed try to eliminate this check . <nl> + void AccessorAssembler : : BranchIfPrototypeShouldbeFast ( Node * receiver_map , <nl> + Label * prototype_not_fast , <nl> + Label * prototype_fast ) { <nl> + VARIABLE ( var_map , MachineRepresentation : : kTagged ) ; <nl> + var_map . Bind ( receiver_map ) ; <nl> + Label loop_body ( this , & var_map ) ; <nl> + Goto ( & loop_body ) ; <nl> + <nl> + BIND ( & loop_body ) ; <nl> + { <nl> + Node * map = var_map . value ( ) ; <nl> + Node * prototype = LoadMapPrototype ( map ) ; <nl> + GotoIf ( IsNull ( prototype ) , prototype_fast ) ; <nl> + TNode < PrototypeInfo > proto_info = <nl> + LoadMapPrototypeInfo ( receiver_map , prototype_not_fast ) ; <nl> + GotoIf ( IsNull ( prototype ) , prototype_not_fast ) ; <nl> + TNode < Uint32T > flags = <nl> + LoadObjectField < Uint32T > ( proto_info , PrototypeInfo : : kBitFieldOffset ) ; <nl> + GotoIf ( Word32Equal ( flags , Uint32Constant ( 0 ) ) , prototype_not_fast ) ; <nl> + <nl> + Node * prototype_map = LoadMap ( prototype ) ; <nl> + var_map . Bind ( prototype_map ) ; <nl> + Goto ( & loop_body ) ; <nl> + } <nl> + } <nl> + <nl> void AccessorAssembler : : LoadIC_Uninitialized ( const LoadICParameters * p ) { <nl> - Label miss ( this , Label : : kDeferred ) ; <nl> + Label miss ( this , Label : : kDeferred ) , <nl> + check_if_fast_prototype ( this , Label : : kDeferred ) , <nl> + check_function_prototype ( this ) ; <nl> Node * receiver = p - > receiver ; <nl> GotoIf ( TaggedIsSmi ( receiver ) , & miss ) ; <nl> Node * receiver_map = LoadMap ( receiver ) ; <nl> Node * instance_type = LoadMapInstanceType ( receiver_map ) ; <nl> <nl> + GotoIf ( IsUndefined ( p - > vector ) , & check_if_fast_prototype ) ; <nl> / / Optimistically write the state transition to the vector . <nl> StoreFeedbackVectorSlot ( p - > vector , p - > slot , <nl> LoadRoot ( RootIndex : : kpremonomorphic_symbol ) , <nl> SKIP_WRITE_BARRIER , 0 , SMI_PARAMETERS ) ; <nl> StoreWeakReferenceInFeedbackVector ( p - > vector , p - > slot , receiver_map , <nl> kTaggedSize , SMI_PARAMETERS ) ; <nl> + Goto ( & check_function_prototype ) ; <nl> <nl> + BIND ( & check_if_fast_prototype ) ; <nl> + { <nl> + BranchIfPrototypeShouldbeFast ( receiver_map , & miss , <nl> + & check_function_prototype ) ; <nl> + } <nl> + <nl> + BIND ( & check_function_prototype ) ; <nl> { <nl> / / Special case for Function . prototype load , because it ' s very common <nl> / / for ICs that are only executed once ( MyFunc . prototype . foo = . . . ) . <nl> void AccessorAssembler : : LoadIC_Uninitialized ( const LoadICParameters * p ) { <nl> <nl> BIND ( & miss ) ; <nl> { <nl> + Label call_runtime ( this , Label : : kDeferred ) ; <nl> + GotoIf ( IsUndefined ( p - > vector ) , & call_runtime ) ; <nl> / / Undo the optimistic state transition . <nl> StoreFeedbackVectorSlot ( p - > vector , p - > slot , <nl> LoadRoot ( RootIndex : : kuninitialized_symbol ) , <nl> SKIP_WRITE_BARRIER , 0 , SMI_PARAMETERS ) ; <nl> + Goto ( & call_runtime ) ; <nl> <nl> + BIND ( & call_runtime ) ; <nl> TailCallRuntime ( Runtime : : kLoadIC_Miss , p - > context , p - > receiver , p - > name , <nl> p - > slot , p - > vector ) ; <nl> } <nl> } <nl> <nl> - void AccessorAssembler : : LoadGlobalIC ( TNode < FeedbackVector > vector , Node * slot , <nl> + void AccessorAssembler : : LoadGlobalIC ( Node * vector , Node * slot , <nl> const LazyNode < Context > & lazy_context , <nl> const LazyNode < Name > & lazy_name , <nl> TypeofMode typeof_mode , <nl> ExitPoint * exit_point , <nl> ParameterMode slot_mode ) { <nl> Label try_handler ( this , Label : : kDeferred ) , miss ( this , Label : : kDeferred ) ; <nl> - LoadGlobalIC_TryPropertyCellCase ( vector , slot , lazy_context , exit_point , <nl> + GotoIf ( IsUndefined ( vector ) , & miss ) ; <nl> + <nl> + LoadGlobalIC_TryPropertyCellCase ( CAST ( vector ) , slot , lazy_context , exit_point , <nl> & try_handler , & miss , slot_mode ) ; <nl> <nl> BIND ( & try_handler ) ; <nl> - LoadGlobalIC_TryHandlerCase ( vector , slot , lazy_context , lazy_name , <nl> + LoadGlobalIC_TryHandlerCase ( CAST ( vector ) , slot , lazy_context , lazy_name , <nl> typeof_mode , exit_point , & miss , slot_mode ) ; <nl> <nl> BIND ( & miss ) ; <nl> void AccessorAssembler : : KeyedLoadIC ( const LoadICParameters * p ) { <nl> Label if_handler ( this , & var_handler ) , try_polymorphic ( this , Label : : kDeferred ) , <nl> try_megamorphic ( this , Label : : kDeferred ) , <nl> try_polymorphic_name ( this , Label : : kDeferred ) , <nl> - miss ( this , Label : : kDeferred ) ; <nl> + miss ( this , Label : : kDeferred ) , generic ( this , Label : : kDeferred ) ; <nl> <nl> Node * receiver_map = LoadReceiverMap ( p - > receiver ) ; <nl> GotoIf ( IsDeprecatedMap ( receiver_map ) , & miss ) ; <nl> <nl> + GotoIf ( IsUndefined ( p - > vector ) , & generic ) ; <nl> + <nl> / / Check monomorphic case . <nl> TNode < MaybeObject > feedback = <nl> TryMonomorphicCase ( p - > slot , p - > vector , receiver_map , & if_handler , <nl> void AccessorAssembler : : KeyedLoadIC ( const LoadICParameters * p ) { <nl> { <nl> / / Check megamorphic case . <nl> Comment ( " KeyedLoadIC_try_megamorphic " ) ; <nl> - GotoIfNot ( <nl> - WordEqual ( strong_feedback , LoadRoot ( RootIndex : : kmegamorphic_symbol ) ) , <nl> - & try_polymorphic_name ) ; <nl> + Branch ( WordEqual ( strong_feedback , LoadRoot ( RootIndex : : kmegamorphic_symbol ) ) , <nl> + & generic , & try_polymorphic_name ) ; <nl> + } <nl> + <nl> + BIND ( & generic ) ; <nl> + { <nl> / / TODO ( jkummerow ) : Inline this ? Or some of it ? <nl> TailCallBuiltin ( Builtins : : kKeyedLoadIC_Megamorphic , p - > context , p - > receiver , <nl> p - > name , p - > slot , p - > vector ) ; <nl> } <nl> + <nl> BIND ( & try_polymorphic_name ) ; <nl> { <nl> / / We might have a name in feedback , and a weak fixed array in the next <nl> void AccessorAssembler : : GenerateLoadGlobalIC ( TypeofMode typeof_mode ) { <nl> Node * context = Parameter ( Descriptor : : kContext ) ; <nl> <nl> ExitPoint direct_exit ( this ) ; <nl> - LoadGlobalIC ( CAST ( vector ) , slot , <nl> - / / lazy_context <nl> - [ = ] { return CAST ( context ) ; } , <nl> - / / lazy_name <nl> - [ = ] { return CAST ( name ) ; } , typeof_mode , & direct_exit ) ; <nl> + LoadGlobalIC ( <nl> + vector , slot , <nl> + / / lazy_context <nl> + [ = ] { return CAST ( context ) ; } , <nl> + / / lazy_name <nl> + [ = ] { return CAST ( name ) ; } , typeof_mode , & direct_exit ) ; <nl> } <nl> <nl> void AccessorAssembler : : GenerateLoadGlobalICTrampoline ( TypeofMode typeof_mode ) { <nl> mmm a / src / ic / accessor - assembler . h <nl> ppp b / src / ic / accessor - assembler . h <nl> class AccessorAssembler : public CodeStubAssembler { <nl> Node * holder ; <nl> } ; <nl> <nl> - void LoadGlobalIC ( TNode < FeedbackVector > vector , Node * slot , <nl> + void LoadGlobalIC ( Node * vector , Node * slot , <nl> const LazyNode < Context > & lazy_context , <nl> const LazyNode < Name > & lazy_name , TypeofMode typeof_mode , <nl> ExitPoint * exit_point , <nl> class AccessorAssembler : public CodeStubAssembler { <nl> Representation representation , Node * value , <nl> Label * bailout ) ; <nl> <nl> + void BranchIfPrototypeShouldbeFast ( Node * receiver_map , <nl> + Label * prototype_not_fast , <nl> + Label * prototype_fast ) ; <nl> + <nl> / / Extends properties backing store by JSObject : : kFieldsAdded elements , <nl> / / returns updated properties backing store . <nl> Node * ExtendPropertiesBackingStore ( Node * object , Node * index ) ; <nl> mmm a / src / interpreter / interpreter - generator . cc <nl> ppp b / src / interpreter / interpreter - generator . cc <nl> class InterpreterLoadGlobalAssembler : public InterpreterAssembler { <nl> return CAST ( name ) ; <nl> } ; <nl> <nl> - Label miss ( this , Label : : kDeferred ) ; <nl> ParameterMode slot_mode = CodeStubAssembler : : INTPTR_PARAMETERS ; <nl> - GotoIf ( IsUndefined ( maybe_feedback_vector ) , & miss ) ; <nl> - accessor_asm . LoadGlobalIC ( CAST ( maybe_feedback_vector ) , feedback_slot , <nl> + accessor_asm . LoadGlobalIC ( maybe_feedback_vector , feedback_slot , <nl> lazy_context , lazy_name , typeof_mode , & exit_point , <nl> slot_mode ) ; <nl> - <nl> - BIND ( & miss ) ; <nl> - { <nl> - exit_point . ReturnCallRuntime ( <nl> - Runtime : : kLoadGlobalIC_Miss , lazy_context ( ) , lazy_name ( ) , <nl> - ParameterToTagged ( feedback_slot , slot_mode ) , maybe_feedback_vector , <nl> - SmiConstant ( typeof_mode ) ) ; <nl> - } <nl> } <nl> } ; <nl> <nl> IGNITION_HANDLER ( LdaKeyedProperty , InterpreterAssembler ) { <nl> Node * feedback_vector = LoadFeedbackVectorUnchecked ( ) ; <nl> Node * context = GetContext ( ) ; <nl> <nl> - Label no_feedback ( this , Label : : kDeferred ) , end ( this ) ; <nl> VARIABLE ( var_result , MachineRepresentation : : kTagged ) ; <nl> - GotoIf ( IsUndefined ( feedback_vector ) , & no_feedback ) ; <nl> var_result . Bind ( CallBuiltin ( Builtins : : kKeyedLoadIC , context , object , name , <nl> smi_slot , feedback_vector ) ) ; <nl> - Goto ( & end ) ; <nl> - <nl> - BIND ( & no_feedback ) ; <nl> - { <nl> - Comment ( " KeyedLoadIC_no_feedback " ) ; <nl> - var_result . Bind ( CallRuntime ( Runtime : : kKeyedLoadIC_Miss , context , object , <nl> - name , smi_slot , feedback_vector ) ) ; <nl> - Goto ( & end ) ; <nl> - } <nl> - <nl> - BIND ( & end ) ; <nl> SetAccumulator ( var_result . value ( ) ) ; <nl> Dispatch ( ) ; <nl> } <nl> | [ ic ] Update LoadIC builtins to handle no feedback cases | v8/v8 | 90e5e244897191a0afc7060129490e3575c9cf44 | 2019-02-21T11:20:20Z |
mmm a / lib / AST / ASTDumper . cpp <nl> ppp b / lib / AST / ASTDumper . cpp <nl> namespace { <nl> <nl> void visitTypeAliasDecl ( TypeAliasDecl * TAD ) { <nl> printCommon ( TAD , " typealias " ) ; <nl> - PrintWithColorRAII ( OS , TypeColor ) < < " type = ' " ; <nl> + PrintWithColorRAII ( OS , TypeColor ) < < " type = " ; <nl> if ( auto underlying = TAD - > getCachedUnderlyingType ( ) ) { <nl> PrintWithColorRAII ( OS , TypeColor ) <nl> - < < underlying . getString ( ) ; <nl> + < < " ' " < < underlying . getString ( ) < < " ' " ; <nl> } else { <nl> PrintWithColorRAII ( OS , TypeColor ) < < " < < < unresolved > > > " ; <nl> } <nl> - printInherited ( TAD - > getInherited ( ) ) ; <nl> - OS < < " ' ) " ; <nl> + PrintWithColorRAII ( OS , ParenthesisColor ) < < ' ) ' ; <nl> } <nl> - <nl> + <nl> void visitOpaqueTypeDecl ( OpaqueTypeDecl * OTD ) { <nl> printCommon ( OTD , " opaque_type " ) ; <nl> OS < < " naming_decl = " ; <nl> namespace { <nl> void visitTypeAliasType ( TypeAliasType * T , StringRef label ) { <nl> printCommon ( label , " type_alias_type " ) ; <nl> printField ( " decl " , T - > getDecl ( ) - > printRef ( ) ) ; <nl> - PrintWithColorRAII ( OS , TypeColor ) < < " underlying = ' " ; <nl> + PrintWithColorRAII ( OS , TypeColor ) < < " underlying = " ; <nl> if ( auto underlying = T - > getSinglyDesugaredType ( ) ) { <nl> - PrintWithColorRAII ( OS , TypeColor ) < < underlying - > getString ( ) ; <nl> + PrintWithColorRAII ( OS , TypeColor ) <nl> + < < " ' " < < underlying - > getString ( ) < < " ' " ; <nl> } else { <nl> PrintWithColorRAII ( OS , TypeColor ) < < " < < < unresolved > > > " ; <nl> } <nl> | Merge pull request from AnthonyLatsis / typealias - dump | apple/swift | fab35767d91b47367c14c523f8e6220d980f86fc | 2020-04-22T18:00:31Z |
mmm a / src / factory . cc <nl> ppp b / src / factory . cc <nl> Handle < FixedArray > Factory : : NewFixedArray ( int size , PretenureFlag pretenure ) { <nl> } <nl> <nl> <nl> + Handle < FixedArray > Factory : : NewFixedArrayWithHoles ( int size ) { <nl> + ASSERT ( 0 < = size ) ; <nl> + CALL_HEAP_FUNCTION ( Heap : : AllocateFixedArrayWithHoles ( size ) , FixedArray ) ; <nl> + } <nl> + <nl> + <nl> Handle < Dictionary > Factory : : NewDictionary ( int at_least_space_for ) { <nl> ASSERT ( 0 < = at_least_space_for ) ; <nl> CALL_HEAP_FUNCTION ( Dictionary : : Allocate ( at_least_space_for ) , Dictionary ) ; <nl> mmm a / src / factory . h <nl> ppp b / src / factory . h <nl> namespace v8 { namespace internal { <nl> <nl> class Factory : public AllStatic { <nl> public : <nl> - / / Allocate a new fixed array . <nl> + / / Allocate a new fixed array with undefined entries . <nl> static Handle < FixedArray > NewFixedArray ( <nl> int size , <nl> PretenureFlag pretenure = NOT_TENURED ) ; <nl> + <nl> + / / Allocate a new fixed array with non - existing entries ( the hole ) . <nl> + static Handle < FixedArray > NewFixedArrayWithHoles ( int size ) ; <nl> + <nl> static Handle < Dictionary > NewDictionary ( int at_least_space_for ) ; <nl> <nl> static Handle < DescriptorArray > NewDescriptorArray ( int number_of_descriptors ) ; <nl> mmm a / src / objects . h <nl> ppp b / src / objects . h <nl> class Dictionary : public DictionaryBase { <nl> / / Fill in details for properties into storage . <nl> void CopyKeysTo ( FixedArray * storage ) ; <nl> <nl> - / / Returns the value at entry . <nl> - static int ValueIndexFor ( int entry ) { return EntryToIndex ( entry ) + 1 ; } <nl> - <nl> / / For transforming properties of a JSObject . <nl> Object * TransformPropertiesToFastFor ( JSObject * obj , <nl> int unused_property_fields ) ; <nl> mmm a / src / runtime . cc <nl> ppp b / src / runtime . cc <nl> static uint32_t IterateArrayAndPrototypeElements ( Handle < JSArray > array , <nl> / * * <nl> * A helper function of Runtime_ArrayConcat . <nl> * <nl> - * The first argument is an Array of Arrays and objects . It is the same as <nl> - * the arguments array of Array : : concat JS function . <nl> + * The first argument is an Array of arrays and objects . It is the <nl> + * same as the arguments array of Array : : concat JS function . <nl> * <nl> - * If an argument is an Array object , the function visits array elements . <nl> - * If an argument is not an Array object , the function visits the object <nl> - * as if it is an one - element array . <nl> + * If an argument is an Array object , the function visits array <nl> + * elements . If an argument is not an Array object , the function <nl> + * visits the object as if it is an one - element array . <nl> * <nl> - * If the result array index overflows 32 - bit integer , the rounded non - negative <nl> - * number is used as new length . For example , if one array length is 2 ^ 32 - 1 , <nl> - * second array length is 1 , the concatenated array length is 0 . <nl> + * If the result array index overflows 32 - bit integer , the rounded <nl> + * non - negative number is used as new length . For example , if one <nl> + * array length is 2 ^ 32 - 1 , second array length is 1 , the <nl> + * concatenated array length is 0 . <nl> * / <nl> static uint32_t IterateArguments ( Handle < JSArray > arguments , <nl> ArrayConcatVisitor * visitor ) { <nl> static Object * Runtime_ArrayConcat ( Arguments args ) { <nl> Handle < JSArray > result = Factory : : NewJSArray ( 0 ) ; <nl> <nl> uint32_t estimate_nof_elements = IterateArguments ( arguments , NULL ) ; <nl> - / / If estimated number of elements is more than half of length , <nl> - / / A fixed array ( fast case ) is more time & space - efficient than a dictionary . <nl> + / / If estimated number of elements is more than half of length , a <nl> + / / fixed array ( fast case ) is more time and space - efficient than a <nl> + / / dictionary . <nl> bool fast_case = ( estimate_nof_elements * 2 ) > = result_length ; <nl> <nl> Handle < FixedArray > storage ; <nl> if ( fast_case ) { <nl> - storage = Factory : : NewFixedArray ( result_length ) ; <nl> + / / The backing storage array must have non - existing elements to <nl> + / / preserve holes across concat operations . <nl> + storage = Factory : : NewFixedArrayWithHoles ( result_length ) ; <nl> <nl> } else { <nl> / / TODO ( 126 ) : move 25 % pre - allocation logic into Dictionary : : Allocate <nl> mmm a / test / mjsunit / array - concat . js <nl> ppp b / test / mjsunit / array - concat . js <nl> c = a . concat ( ' Hello ' ) ; <nl> assertEquals ( 1 , c . length ) ; <nl> assertEquals ( " Hello " , c [ 0 ] ) ; <nl> assertEquals ( " Hello " , c . toString ( ) ) ; <nl> + <nl> + / / Check that concat preserves holes . <nl> + var holey = [ void 0 , ' a ' , , ' c ' ] . concat ( [ ' d ' , , ' f ' , [ 0 , , 2 ] , void 0 ] ) <nl> + assertEquals ( 9 , holey . length ) ; / / hole in embedded array is ignored <nl> + for ( var i = 0 ; i < holey . length ; i + + ) { <nl> + if ( i = = 2 | | i = = 5 ) { <nl> + assertFalse ( i in holey ) ; <nl> + } else { <nl> + assertTrue ( i in holey ) ; <nl> + } <nl> + } <nl> | Fix issue with Array . concat not preserving holes in the | v8/v8 | b5fe75f9502a3fb582029ca6afdf38550531524f | 2008-10-29T10:02:09Z |
similarity index 100 % <nl> rename from code / computational_geometry / area_of_triangle . cpp <nl> rename to code / computational_geometry / area_of_triangle / area_of_triangle . cpp <nl> | created sub - folder | OpenGenus/cosmos | a4aadfa364d72a0f162830ed2ac532ab12fa5169 | 2017-10-04T19:48:16Z |
mmm a / docs / whats - changed . md <nl> ppp b / docs / whats - changed . md <nl> This page has been added following quite a large ( hopefully the last such ) merge <nl> * New command line parser . Under the hood it is a complete rewrite - now powered by a command line library that will soon be spun out as a separate project : Clara . The options themselves are largely the same but there are some notable differences ( as already discussed ) . <nl> * Completely overhauled output from the textual reporter ( now the Console reporter ) . This now features a much clearer , cleaner format , including good use of indentation . <nl> <nl> + More information can be found in [ this blog post ] ( http : / / www . levelofindirection . com / journal / 2013 / 6 / 28 / catch - 10 . html ) . <nl> If you find any issues please raise issue tickets on the [ issue tracker on GitHub ] ( https : / / github . com / philsquared / Catch / issues ) as before . For general questions , comments and suggestions , though , please use the [ new forums on Google Groups ] ( https : / / groups . google . com / forum / ? fromgroups # ! forum / catch - forum ) . <nl> <nl> mmm <nl> <nl> - [ Home ] ( . . / README . md ) <nl> \ No newline at end of file <nl> + [ Home ] ( . . / README . md ) <nl> | Update whats - changed . md | catchorg/Catch2 | 7bd033c829df396d49391b2137ca15e458164e54 | 2013-06-28T17:18:46Z |
similarity index 91 % <nl> rename from lib / FrontendTool / ParseableInterfaceGeneration . h <nl> rename to include / swift / Frontend / ParseableInterfaceGeneration . h <nl> mmm a / lib / FrontendTool / ParseableInterfaceGeneration . h <nl> ppp b / include / swift / Frontend / ParseableInterfaceGeneration . h <nl> <nl> / / <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - # ifndef SWIFT_FRONTENDTOOL_PARSEABLEINTERFACEGENERATION_H <nl> - # define SWIFT_FRONTENDTOOL_PARSEABLEINTERFACEGENERATION_H <nl> + # ifndef SWIFT_FRONTEND_PARSEABLEINTERFACEGENERATION_H <nl> + # define SWIFT_FRONTEND_PARSEABLEINTERFACEGENERATION_H <nl> <nl> # include " swift / Basic / LLVM . h " <nl> <nl> mmm a / include / swift / Option / Options . h <nl> ppp b / include / swift / Option / Options . h <nl> namespace options { <nl> ModuleWrapOption = ( 1 < < 10 ) , <nl> SwiftFormatOption = ( 1 < < 11 ) , <nl> ArgumentIsPath = ( 1 < < 12 ) , <nl> + TextualInterfaceOption = ( 1 < < 13 ) , <nl> } ; <nl> <nl> enum ID { <nl> mmm a / include / swift / Option / Options . td <nl> ppp b / include / swift / Option / Options . td <nl> def DoesNotAffectIncrementalBuild : OptionFlag ; <nl> / / current working directory . <nl> def ArgumentIsPath : OptionFlag ; <nl> <nl> + / / The option should be written into a . swiftinterface textual interface file , <nl> + / / and read / parsed from there when reconstituting a . swiftmodule from it . <nl> + def TextualInterfaceOption : OptionFlag ; <nl> + <nl> / / / / / / / / / <nl> / / Options <nl> <nl> mmm a / lib / Frontend / CMakeLists . txt <nl> ppp b / lib / Frontend / CMakeLists . txt <nl> add_swift_library ( swiftFrontend STATIC <nl> Frontend . cpp <nl> FrontendInputsAndOutputs . cpp <nl> FrontendOptions . cpp <nl> + ParseableInterfaceGeneration . cpp <nl> PrintingDiagnosticConsumer . cpp <nl> SerializedDiagnosticConsumer . cpp <nl> DEPENDS <nl> similarity index 98 % <nl> rename from lib / FrontendTool / ParseableInterfaceGeneration . cpp <nl> rename to lib / Frontend / ParseableInterfaceGeneration . cpp <nl> mmm a / lib / FrontendTool / ParseableInterfaceGeneration . cpp <nl> ppp b / lib / Frontend / ParseableInterfaceGeneration . cpp <nl> <nl> / / <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - # include " ParseableInterfaceGeneration . h " <nl> - <nl> # include " swift / AST / ASTContext . h " <nl> # include " swift / AST / Decl . h " <nl> # include " swift / AST / DiagnosticsFrontend . h " <nl> # include " swift / AST / Module . h " <nl> + # include " swift / Frontend / ParseableInterfaceGeneration . h " <nl> # include " clang / Basic / Module . h " <nl> <nl> using namespace swift ; <nl> mmm a / lib / FrontendTool / CMakeLists . txt <nl> ppp b / lib / FrontendTool / CMakeLists . txt <nl> add_swift_library ( swiftFrontendTool STATIC <nl> ImportedModules . cpp <nl> ReferenceDependencies . cpp <nl> TBD . cpp <nl> - ParseableInterfaceGeneration . cpp <nl> DEPENDS <nl> swift - syntax - generated - headers SwiftOptions <nl> LINK_LIBRARIES <nl> mmm a / lib / FrontendTool / FrontendTool . cpp <nl> ppp b / lib / FrontendTool / FrontendTool . cpp <nl> <nl> # include " ImportedModules . h " <nl> # include " ReferenceDependencies . h " <nl> # include " TBD . h " <nl> - # include " ParseableInterfaceGeneration . h " <nl> <nl> # include " swift / Subsystems . h " <nl> # include " swift / AST / ASTScope . h " <nl> <nl> # include " swift / Frontend / Frontend . h " <nl> # include " swift / Frontend / PrintingDiagnosticConsumer . h " <nl> # include " swift / Frontend / SerializedDiagnosticConsumer . h " <nl> + # include " swift / Frontend / ParseableInterfaceGeneration . h " <nl> # include " swift / Immediate / Immediate . h " <nl> # include " swift / Index / IndexRecord . h " <nl> # include " swift / Option / Options . h " <nl> | [ ModuleInterface ] Add new TextualInterfaceOption flag , move TextualInterfaceGeneration . { h , cpp } | apple/swift | 196fbbfc4653e6f55870eebe2d4a61ba1127817f | 2018-10-11T23:43:48Z |
mmm a / DEPS <nl> ppp b / DEPS <nl> vars = { <nl> <nl> deps = { <nl> ' build ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / build . git ' + ' @ ' + ' e7ee57c9bf9aab54522601251e913acb67272002 ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / build . git ' + ' @ ' + ' 939db3324b30178d95c90cbf1f24d47bc8131036 ' , <nl> ' third_party / depot_tools ' : <nl> Var ( ' chromium_url ' ) + ' / chromium / tools / depot_tools . git ' + ' @ ' + ' 3bd3c99b4d5c884798648198ba7b01755214fd90 ' , <nl> ' third_party / icu ' : <nl> deps = { <nl> ' dep_type ' : ' cipd ' , <nl> } , <nl> ' tools / clang ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / tools / clang . git ' + ' @ ' + ' 93f80bccaea8d47a653f23d1c139fd17e7f52968 ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / tools / clang . git ' + ' @ ' + ' 361d3671544d55aeb9aeb850ec9e6c78c619b873 ' , <nl> ' tools / luci - go ' : { <nl> ' packages ' : [ <nl> { <nl> | Update V8 DEPS . | v8/v8 | 0510c7a67a3bd2fefdd17fb6e56195dfa4fed9b3 | 2020-08-17T03:43:20Z |
mmm a / src / flag - definitions . h <nl> ppp b / src / flag - definitions . h <nl> DEFINE_bool ( trace_gc_ignore_scavenger , false , <nl> " do not print trace line after scavenger collection " ) <nl> DEFINE_bool ( print_cumulative_gc_stat , false , <nl> " print cumulative GC statistics in name = value format on exit " ) <nl> + DEFINE_bool ( print_max_heap_committed , false , <nl> + " print statistics of the maximum memory committed for the heap " <nl> + " in name = value format on exit " ) <nl> DEFINE_bool ( trace_gc_verbose , false , <nl> " print more details following each garbage collection " ) <nl> DEFINE_bool ( trace_fragmentation , false , <nl> mmm a / src / heap . cc <nl> ppp b / src / heap . cc <nl> Heap : : Heap ( ) <nl> / / ConfigureHeap ( survived_since_last_expansion_ , external_allocation_limit_ ) <nl> / / Will be 4 * reserved_semispace_size_ to ensure that young <nl> / / generation can be aligned to its size . <nl> + maximum_committed_ ( 0 ) , <nl> survived_since_last_expansion_ ( 0 ) , <nl> sweep_generation_ ( 0 ) , <nl> always_allocate_scope_depth_ ( 0 ) , <nl> intptr_t Heap : : CommittedMemoryExecutable ( ) { <nl> } <nl> <nl> <nl> + void Heap : : UpdateMaximumCommitted ( ) { <nl> + if ( ! HasBeenSetUp ( ) ) return ; <nl> + <nl> + intptr_t current_committed_memory = CommittedMemory ( ) ; <nl> + if ( current_committed_memory > maximum_committed_ ) { <nl> + maximum_committed_ = current_committed_memory ; <nl> + } <nl> + } <nl> + <nl> + <nl> intptr_t Heap : : Available ( ) { <nl> if ( ! HasBeenSetUp ( ) ) return 0 ; <nl> <nl> void Heap : : GarbageCollectionPrologue ( ) { <nl> # endif <nl> } <nl> <nl> + UpdateMaximumCommitted ( ) ; <nl> + <nl> # ifdef DEBUG <nl> ASSERT ( ! AllowHeapAllocation : : IsAllowed ( ) & & gc_state_ = = NOT_IN_GC ) ; <nl> <nl> void Heap : : GarbageCollectionEpilogue ( ) { <nl> } <nl> } <nl> <nl> + UpdateMaximumCommitted ( ) ; <nl> + <nl> isolate_ - > counters ( ) - > alive_after_last_gc ( ) - > Set ( <nl> static_cast < int > ( SizeOfObjects ( ) ) ) ; <nl> <nl> void Heap : : GarbageCollectionEpilogue ( ) { <nl> property_cell_space ( ) - > CommittedMemory ( ) / KB ) ) ; <nl> isolate_ - > counters ( ) - > heap_sample_code_space_committed ( ) - > AddSample ( <nl> static_cast < int > ( code_space ( ) - > CommittedMemory ( ) / KB ) ) ; <nl> + <nl> + isolate_ - > counters ( ) - > heap_sample_maximum_committed ( ) - > AddSample ( <nl> + static_cast < int > ( MaximumCommittedMemory ( ) / KB ) ) ; <nl> } <nl> <nl> # define UPDATE_COUNTERS_FOR_SPACE ( space ) \ <nl> void Heap : : TearDown ( ) { <nl> } <nl> # endif <nl> <nl> + UpdateMaximumCommitted ( ) ; <nl> + <nl> if ( FLAG_print_cumulative_gc_stat ) { <nl> PrintF ( " \ n " ) ; <nl> PrintF ( " gc_count = % d " , gc_count_ ) ; <nl> void Heap : : TearDown ( ) { <nl> PrintF ( " \ n \ n " ) ; <nl> } <nl> <nl> + if ( FLAG_print_max_heap_committed ) { <nl> + PrintF ( " \ n " ) ; <nl> + PrintF ( " maximum_committed_by_heap = % " V8_PTR_PREFIX " d " , <nl> + MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_new_space = % " V8_PTR_PREFIX " d " , <nl> + new_space_ . MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_old_pointer_space = % " V8_PTR_PREFIX " d " , <nl> + old_data_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_old_data_space = % " V8_PTR_PREFIX " d " , <nl> + old_pointer_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_old_data_space = % " V8_PTR_PREFIX " d " , <nl> + old_pointer_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_code_space = % " V8_PTR_PREFIX " d " , <nl> + code_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_map_space = % " V8_PTR_PREFIX " d " , <nl> + map_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_cell_space = % " V8_PTR_PREFIX " d " , <nl> + cell_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_property_space = % " V8_PTR_PREFIX " d " , <nl> + property_cell_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " maximum_committed_by_lo_space = % " V8_PTR_PREFIX " d " , <nl> + lo_space_ - > MaximumCommittedMemory ( ) ) ; <nl> + PrintF ( " \ n \ n " ) ; <nl> + } <nl> + <nl> TearDownArrayBuffers ( ) ; <nl> <nl> isolate_ - > global_handles ( ) - > TearDown ( ) ; <nl> mmm a / src / heap . h <nl> ppp b / src / heap . h <nl> class Heap { <nl> / / Returns the amount of phyical memory currently committed for the heap . <nl> size_t CommittedPhysicalMemory ( ) ; <nl> <nl> + / / Returns the maximum amount of memory ever committed for the heap . <nl> + intptr_t MaximumCommittedMemory ( ) { return maximum_committed_ ; } <nl> + <nl> + / / Updates the maximum committed memory for the heap . Should be called <nl> + / / whenever a space grows . <nl> + void UpdateMaximumCommitted ( ) ; <nl> + <nl> / / Returns the available bytes in space w / o growing . <nl> / / Heap doesn ' t guarantee that it can allocate an object that requires <nl> / / all available bytes . Check MaxHeapObjectSize ( ) instead . <nl> class Heap { <nl> int initial_semispace_size_ ; <nl> intptr_t max_old_generation_size_ ; <nl> intptr_t max_executable_size_ ; <nl> + intptr_t maximum_committed_ ; <nl> <nl> / / For keeping track of how much data has survived <nl> / / scavenge since last new space expansion . <nl> mmm a / src / spaces . cc <nl> ppp b / src / spaces . cc <nl> void PagedSpace : : ResetFreeListStatistics ( ) { <nl> } <nl> <nl> <nl> + void PagedSpace : : IncreaseCapacity ( int size ) { <nl> + accounting_stats_ . ExpandSpace ( size ) ; <nl> + } <nl> + <nl> + <nl> void PagedSpace : : ReleasePage ( Page * page , bool unlink ) { <nl> ASSERT ( page - > LiveBytes ( ) = = 0 ) ; <nl> ASSERT ( AreaSize ( ) = = page - > area_size ( ) ) ; <nl> void SemiSpace : : SetUp ( Address start , <nl> initial_capacity_ = RoundDown ( initial_capacity , Page : : kPageSize ) ; <nl> capacity_ = initial_capacity ; <nl> maximum_capacity_ = RoundDown ( maximum_capacity , Page : : kPageSize ) ; <nl> + maximum_committed_ = 0 ; <nl> committed_ = false ; <nl> start_ = start ; <nl> address_mask_ = ~ ( maximum_capacity - 1 ) ; <nl> bool SemiSpace : : Commit ( ) { <nl> current = new_page ; <nl> } <nl> <nl> + SetCapacity ( capacity_ ) ; <nl> committed_ = true ; <nl> Reset ( ) ; <nl> return true ; <nl> bool SemiSpace : : GrowTo ( int new_capacity ) { <nl> start_ + capacity_ , delta , executable ( ) ) ) { <nl> return false ; <nl> } <nl> - capacity_ = new_capacity ; <nl> + SetCapacity ( new_capacity ) ; <nl> NewSpacePage * last_page = anchor ( ) - > prev_page ( ) ; <nl> ASSERT ( last_page ! = anchor ( ) ) ; <nl> for ( int i = pages_before ; i < pages_after ; i + + ) { <nl> bool SemiSpace : : ShrinkTo ( int new_capacity ) { <nl> ASSERT ( ( current_page_ > = first_page ( ) ) & & ( current_page_ < = new_last_page ) ) ; <nl> } <nl> <nl> - capacity_ = new_capacity ; <nl> + SetCapacity ( new_capacity ) ; <nl> <nl> return true ; <nl> } <nl> void SemiSpace : : Swap ( SemiSpace * from , SemiSpace * to ) { <nl> } <nl> <nl> <nl> + void SemiSpace : : SetCapacity ( int new_capacity ) { <nl> + capacity_ = new_capacity ; <nl> + if ( capacity_ > maximum_committed_ ) { <nl> + maximum_committed_ = capacity_ ; <nl> + } <nl> + } <nl> + <nl> + <nl> void SemiSpace : : set_age_mark ( Address mark ) { <nl> ASSERT ( NewSpacePage : : FromLimit ( mark ) - > semi_space ( ) = = this ) ; <nl> age_mark_ = mark ; <nl> LargeObjectSpace : : LargeObjectSpace ( Heap * heap , <nl> bool LargeObjectSpace : : SetUp ( ) { <nl> first_page_ = NULL ; <nl> size_ = 0 ; <nl> + maximum_committed_ = 0 ; <nl> page_count_ = 0 ; <nl> objects_size_ = 0 ; <nl> chunk_map_ . Clear ( ) ; <nl> MaybeObject * LargeObjectSpace : : AllocateRaw ( int object_size , <nl> page - > set_next_page ( first_page_ ) ; <nl> first_page_ = page ; <nl> <nl> + if ( size_ > maximum_committed_ ) { <nl> + maximum_committed_ = size_ ; <nl> + } <nl> + <nl> / / Register all MemoryChunk : : kAlignment - aligned chunks covered by <nl> / / this large page in the chunk map . <nl> uintptr_t base = reinterpret_cast < uintptr_t > ( page ) / MemoryChunk : : kAlignment ; <nl> mmm a / src / spaces . h <nl> ppp b / src / spaces . h <nl> class AllocationStats BASE_EMBEDDED { <nl> / / Zero out all the allocation statistics ( i . e . , no capacity ) . <nl> void Clear ( ) { <nl> capacity_ = 0 ; <nl> + max_capacity_ = 0 ; <nl> size_ = 0 ; <nl> waste_ = 0 ; <nl> } <nl> class AllocationStats BASE_EMBEDDED { <nl> <nl> / / Accessors for the allocation statistics . <nl> intptr_t Capacity ( ) { return capacity_ ; } <nl> + intptr_t MaxCapacity ( ) { return max_capacity_ ; } <nl> intptr_t Size ( ) { return size_ ; } <nl> intptr_t Waste ( ) { return waste_ ; } <nl> <nl> class AllocationStats BASE_EMBEDDED { <nl> void ExpandSpace ( int size_in_bytes ) { <nl> capacity_ + = size_in_bytes ; <nl> size_ + = size_in_bytes ; <nl> + if ( capacity_ > max_capacity_ ) { <nl> + max_capacity_ = capacity_ ; <nl> + } <nl> ASSERT ( size_ > = 0 ) ; <nl> } <nl> <nl> class AllocationStats BASE_EMBEDDED { <nl> <nl> private : <nl> intptr_t capacity_ ; <nl> + intptr_t max_capacity_ ; <nl> intptr_t size_ ; <nl> intptr_t waste_ ; <nl> } ; <nl> class PagedSpace : public Space { <nl> / / spaces this equals the capacity . <nl> intptr_t CommittedMemory ( ) { return Capacity ( ) ; } <nl> <nl> + / / The maximum amount of memory ever committed for this space . <nl> + intptr_t MaximumCommittedMemory ( ) { return accounting_stats_ . MaxCapacity ( ) ; } <nl> + <nl> / / Approximate amount of physical memory committed for this space . <nl> size_t CommittedPhysicalMemory ( ) ; <nl> <nl> class PagedSpace : public Space { <nl> accounting_stats_ . AllocateBytes ( bytes ) ; <nl> } <nl> <nl> - void IncreaseCapacity ( int size ) { <nl> - accounting_stats_ . ExpandSpace ( size ) ; <nl> - } <nl> + void IncreaseCapacity ( int size ) ; <nl> <nl> / / Releases an unused page and shrinks the space . <nl> void ReleasePage ( Page * page , bool unlink ) ; <nl> class SemiSpace : public Space { <nl> <nl> static void Swap ( SemiSpace * from , SemiSpace * to ) ; <nl> <nl> + / / Returns the maximum amount of memory ever committed by the semi space . <nl> + size_t MaximumCommittedMemory ( ) { return maximum_committed_ ; } <nl> + <nl> / / Approximate amount of physical memory committed for this space . <nl> size_t CommittedPhysicalMemory ( ) ; <nl> <nl> class SemiSpace : public Space { <nl> / / Copies the flags into the masked positions on all pages in the space . <nl> void FlipPages ( intptr_t flags , intptr_t flag_mask ) ; <nl> <nl> + / / Updates Capacity and MaximumCommitted based on new capacity . <nl> + void SetCapacity ( int new_capacity ) ; <nl> + <nl> NewSpacePage * anchor ( ) { return & anchor_ ; } <nl> <nl> / / The current and maximum capacity of the space . <nl> class SemiSpace : public Space { <nl> int maximum_capacity_ ; <nl> int initial_capacity_ ; <nl> <nl> + intptr_t maximum_committed_ ; <nl> + <nl> / / The start address of the space . <nl> Address start_ ; <nl> / / Used to govern object promotion during mark - compact collection . <nl> class NewSpace : public Space { <nl> return Capacity ( ) ; <nl> } <nl> <nl> + / / Return the total amount of memory committed for new space . <nl> + intptr_t MaximumCommittedMemory ( ) { <nl> + return to_space_ . MaximumCommittedMemory ( ) + <nl> + from_space_ . MaximumCommittedMemory ( ) ; <nl> + } <nl> + <nl> / / Approximate amount of physical memory committed for this space . <nl> size_t CommittedPhysicalMemory ( ) ; <nl> <nl> class LargeObjectSpace : public Space { <nl> return objects_size_ ; <nl> } <nl> <nl> + intptr_t MaximumCommittedMemory ( ) { <nl> + return maximum_committed_ ; <nl> + } <nl> + <nl> intptr_t CommittedMemory ( ) { <nl> return Size ( ) ; <nl> } <nl> class LargeObjectSpace : public Space { <nl> <nl> private : <nl> intptr_t max_capacity_ ; <nl> + intptr_t maximum_committed_ ; <nl> / / The head of the linked list of large object chunks . <nl> LargePage * first_page_ ; <nl> intptr_t size_ ; / / allocated bytes <nl> mmm a / src / v8 - counters . h <nl> ppp b / src / v8 - counters . h <nl> namespace internal { <nl> V8 . MemoryHeapSamplePropertyCellSpaceCommitted ) \ <nl> HM ( heap_sample_code_space_committed , \ <nl> V8 . MemoryHeapSampleCodeSpaceCommitted ) \ <nl> + HM ( heap_sample_maximum_committed , \ <nl> + V8 . MemoryHeapSampleMaximumCommitted ) \ <nl> <nl> <nl> / / WARNING : STATS_COUNTER_LIST_ * is a very large macro that is causing MSVC <nl> | Add counters to track the maximum amount of memory committed by the heap . | v8/v8 | 0fd7c2a78ad41e97e7d72154b62b5fcb5f8ef683 | 2013-11-05T11:59:42Z |
mmm a / bazel / repository_locations . bzl <nl> ppp b / bazel / repository_locations . bzl <nl> REPOSITORY_LOCATIONS = dict ( <nl> ) , <nl> boringssl = dict ( <nl> # Use commits from branch " chromium - stable - with - bazel " <nl> - sha256 = " e11d2d62cd6c4e1b2e126500e1436a678574300f33f27974f2c7ef271be42727 " , <nl> - strip_prefix = " boringssl - debed9a4d8de5e282f672ffcd7e4a48a201ea78c " , <nl> - # chromium - 73 . 0 . 3683 . 75 <nl> - urls = [ " https : / / github . com / google / boringssl / archive / debed9a4d8de5e282f672ffcd7e4a48a201ea78c . tar . gz " ] , <nl> + sha256 = " 4825306f702fa5cb76fd86c987a88c9bbb241e75f4d86dbb3714530ca73c1fb1 " , <nl> + strip_prefix = " boringssl - 8cb07520451f0dc454654f2da5cdecf0b806f823 " , <nl> + # chromium - 74 . 0 . 3729 . 131 <nl> + urls = [ " https : / / github . com / google / boringssl / archive / 8cb07520451f0dc454654f2da5cdecf0b806f823 . tar . gz " ] , <nl> ) , <nl> boringssl_fips = dict ( <nl> sha256 = " b12ad676ee533824f698741bd127f6fbc82c46344398a6d78d25e62c6c418c73 " , <nl> | tls : update BoringSSL to 8cb07520 ( 3729 ) . ( ) | envoyproxy/envoy | e1adad79215fb5a32b5c92b6f99bdb2645074a6d | 2019-05-01T14:55:55Z |
mmm a / include / swift / SILAnalysis / ARCAnalysis . h <nl> ppp b / include / swift / SILAnalysis / ARCAnalysis . h <nl> bool canCheckRefCount ( SILInstruction * User ) ; <nl> bool canUseValue ( SILInstruction * User , SILValue Ptr , AliasAnalysis * AA ) ; <nl> <nl> / / / If \ p Op has arc uses in the instruction range [ Start , End ) , return the <nl> - / / / first such instruction . Otherwise return Nothing_t : : Nothing . We assume that <nl> + / / / first such instruction . Otherwise return Nothing . We assume that <nl> / / / Start and End are both in the same basic block . <nl> Optional < SILBasicBlock : : iterator > <nl> valueHasARCUsesInInstructionRange ( SILValue Op , <nl> valueHasARCUsesInInstructionRange ( SILValue Op , <nl> AliasAnalysis * AA ) ; <nl> <nl> / / / If \ p Op has instructions in the instruction range ( Start , End ] which may <nl> - / / / decrement it , return the first such instruction . Returns Nothing_t : : Nothing <nl> + / / / decrement it , return the first such instruction . Returns Nothing <nl> / / / if no such instruction exists . We assume that Start and End are both in the <nl> / / / same basic block . <nl> Optional < SILBasicBlock : : iterator > <nl> mmm a / lib / SILAnalysis / ARCAnalysis . cpp <nl> ppp b / lib / SILAnalysis / ARCAnalysis . cpp <nl> bool swift : : arc : : canUseValue ( SILInstruction * User , SILValue Ptr , <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> / / / If \ p Op has arc uses in the instruction range [ Start , End ) , return the <nl> - / / / first such instruction . Otherwise return Nothing_t : : Nothing . We assume that <nl> + / / / first such instruction . Otherwise return Nothing . We assume that <nl> / / / Start and End are both in the same basic block . <nl> Optional < SILBasicBlock : : iterator > <nl> swift : : arc : : <nl> valueHasARCUsesInInstructionRange ( SILValue Op , <nl> <nl> / / If Start = = End , then we have an empty range , return false . <nl> if ( Start = = End ) <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> <nl> / / Otherwise , until Start ! = End . <nl> while ( Start ! = End ) { <nl> valueHasARCUsesInInstructionRange ( SILValue Op , <nl> } <nl> <nl> / / If all such instructions can not use Op , return false . <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> / / / If \ p Op has instructions in the instruction range ( Start , End ] which may <nl> - / / / decrement it , return the first such instruction . Returns Nothing_t : : Nothing <nl> + / / / decrement it , return the first such instruction . Returns Nothing <nl> / / / if no such instruction exists . We assume that Start and End are both in the <nl> / / / same basic block . <nl> Optional < SILBasicBlock : : iterator > <nl> valueHasARCDecrementOrCheckInInstructionRange ( SILValue Op , <nl> <nl> / / If Start = = End , then we have an empty range , return nothing . <nl> if ( Start = = End ) <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> <nl> / / Otherwise , until Start ! = End . <nl> while ( Start ! = End ) { <nl> valueHasARCDecrementOrCheckInInstructionRange ( SILValue Op , <nl> } <nl> <nl> / / If all such instructions can not decrement Op , return nothing . <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> mmm a / lib / SILAnalysis / GlobalARCPairingAnalysis . cpp <nl> ppp b / lib / SILAnalysis / GlobalARCPairingAnalysis . cpp <nl> ARCMatchingSetBuilder : : matchIncrementsToDecrements ( ) { <nl> if ( BURefCountState = = BUMap . end ( ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Could not find state for " <nl> " increment ! \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> DEBUG ( llvm : : dbgs ( ) < < " SUCCESS ! Found state for increment . \ n " ) ; <nl> ARCMatchingSetBuilder : : matchIncrementsToDecrements ( ) { <nl> if ( TDRefCountState = = TDMap . end ( ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Could not find state for " <nl> " decrement . \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> DEBUG ( llvm : : dbgs ( ) < < " SUCCESS ! Found state for decrement . \ n " ) ; <nl> <nl> ARCMatchingSetBuilder : : matchIncrementsToDecrements ( ) { <nl> ! TDRefCountState - > second . containsInstruction ( Increment ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Not tracking instruction or " <nl> " found increment that did not match . \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> / / Add the decrement to the decrement to move set . If we don ' t insert <nl> ARCMatchingSetBuilder : : matchDecrementsToIncrements ( ) { <nl> if ( TDRefCountState = = TDMap . end ( ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Could not find state for " <nl> " increment ! \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> DEBUG ( llvm : : dbgs ( ) < < " SUCCESS ! Found state for decrement . \ n " ) ; <nl> ARCMatchingSetBuilder : : matchDecrementsToIncrements ( ) { <nl> if ( BURefCountState = = BUMap . end ( ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Could not find state for " <nl> " increment . \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> DEBUG ( llvm : : dbgs ( ) < < " SUCCESS ! Found state for increment . \ n " ) ; <nl> ARCMatchingSetBuilder : : matchDecrementsToIncrements ( ) { <nl> ! BURefCountState - > second . containsInstruction ( Decrement ) ) { <nl> DEBUG ( llvm : : dbgs ( ) < < " FAILURE ! Not tracking instruction or " <nl> " found increment that did not match . \ n " ) ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> / / Add the decrement to the decrement to move set . If we don ' t insert <nl> mmm a / lib / SILPasses / FunctionSignatureOpts . cpp <nl> ppp b / lib / SILPasses / FunctionSignatureOpts . cpp <nl> class FunctionSignatureOptCloner <nl> for ( auto & Arg : ArgDescriptors ) <nl> if ( Arg . Arg = = A ) <nl> return Arg ; <nl> - return Nothing_t : : Nothing ; <nl> + return Nothing ; <nl> } <nl> <nl> static SILFunction * initCloned ( SILFunction & Orig , <nl> | Convert Nothing_t : : Nothing = > Nothing . | apple/swift | 5561b3f5385abd5c8d8668d87e5a6bdbda27c6d1 | 2014-08-30T03:11:55Z |
similarity index 100 % <nl> rename from examples / table_iterator . php <nl> rename to examples / table / iterator . php <nl> similarity index 100 % <nl> rename from examples / table . php <nl> rename to examples / table / set . php <nl> mmm a / swoole_config . h <nl> ppp b / swoole_config . h <nl> <nl> # define SW_FILE_CHUNK_SIZE 65536 <nl> <nl> # define SW_TABLE_CONFLICT_PROPORTION 0 . 2 / / 20 % <nl> - # define SW_TABLE_USE_LINKED_LIST <nl> <nl> # define SW_SSL_BUFSIZE 16384 <nl> <nl> <nl> # define SW_HTTP_SERVER_SOFTWARE " swoole - http - server " <nl> # define SW_HTTP_BAD_REQUEST " < h1 > 400 Bad Request < / h1 > \ r \ n " <nl> # define SW_HTTP_PARAM_MAX_NUM 128 <nl> + # define SW_HTTP_COOKIE_KEYLEN 128 <nl> <nl> # endif / * SWOOLE_CONFIG_H_ * / <nl> mmm a / swoole_http . c <nl> ppp b / swoole_http . c <nl> static int http_request_on_header_value ( php_http_parser * parser , const char * at , <nl> int vlen ; <nl> } kv = { 0 } ; <nl> <nl> + char keybuf [ SW_HTTP_COOKIE_KEYLEN ] ; <nl> char * _c = ( char * ) at ; <nl> int n = 1 ; <nl> kv . k = _c ; <nl> static int http_request_on_header_value ( php_http_parser * parser , const char * at , <nl> else if ( * _c = = ' ; ' ) <nl> { <nl> kv . vlen = n ; <nl> - add_assoc_stringl_ex ( cookie , kv . k , kv . klen , kv . v , kv . vlen , 1 ) ; <nl> + if ( kv . klen > = SW_HTTP_COOKIE_KEYLEN ) <nl> + { <nl> + kv . klen = SW_HTTP_COOKIE_KEYLEN - 1 ; <nl> + } <nl> + memcpy ( keybuf , kv . k , kv . klen - 1 ) ; <nl> + keybuf [ kv . klen - 1 ] = 0 ; <nl> + add_assoc_stringl_ex ( cookie , keybuf , kv . klen , kv . v , kv . vlen , 1 ) ; <nl> kv . k = _c + 2 ; <nl> n = 0 ; <nl> } <nl> static int http_request_on_header_value ( php_http_parser * parser , const char * at , <nl> _c + + ; <nl> } <nl> kv . vlen = n ; <nl> - add_assoc_stringl_ex ( cookie , kv . k , kv . klen , kv . v , kv . vlen , 1 ) ; <nl> + if ( kv . klen > = SW_HTTP_COOKIE_KEYLEN ) <nl> + { <nl> + kv . klen = SW_HTTP_COOKIE_KEYLEN - 1 ; <nl> + } <nl> + memcpy ( keybuf , kv . k , kv . klen - 1 ) ; <nl> + keybuf [ kv . klen - 1 ] = 0 ; <nl> + add_assoc_stringl_ex ( cookie , keybuf , kv . klen , kv . v , kv . vlen , 1 ) ; <nl> } <nl> else <nl> { <nl> | Fixed request - > cookie key error . | swoole/swoole-src | d2a1ac815f412b32ea3299b748baf824b81fde47 | 2014-11-04T07:51:32Z |
mmm a / lib / Sema / CSBindings . cpp <nl> ppp b / lib / Sema / CSBindings . cpp <nl> bool ConstraintSystem : : PotentialBindings : : isViable ( <nl> return true ; <nl> } <nl> <nl> - bool ConstraintSystem : : PotentialBindings : : favoredOverDisjunction ( ) const { <nl> + bool ConstraintSystem : : PotentialBindings : : favoredOverDisjunction ( <nl> + Constraint * disjunction ) const { <nl> if ( IsHole | | FullyBound ) <nl> return false ; <nl> <nl> bool ConstraintSystem : : PotentialBindings : : favoredOverDisjunction ( ) const { <nl> / / but we still want to resolve closure body early ( instead of <nl> / / attempting any disjunction ) to gain additional contextual <nl> / / information . <nl> - if ( TypeVar - > getImpl ( ) . isClosureType ( ) ) <nl> - return true ; <nl> + if ( TypeVar - > getImpl ( ) . isClosureType ( ) ) { <nl> + auto boundType = disjunction - > getNestedConstraints ( ) [ 0 ] - > getFirstType ( ) ; <nl> + / / If disjunction is attempting to bind a type variable , let ' s <nl> + / / favor closure because it would add additional context , otherwise <nl> + / / if it ' s something like a collection ( where it has to pick <nl> + / / between a conversion and bridging conversion ) or concrete <nl> + / / type let ' s prefer the disjunction . <nl> + return boundType - > is < TypeVariableType > ( ) ; <nl> + } <nl> <nl> return ! InvolvesTypeVariables ; <nl> } <nl> ConstraintSystem : : getPotentialBindings ( TypeVariableType * typeVar ) const { <nl> / / FIXME : Recurse into these constraints to see whether this <nl> / / type variable is fully bound by any of them . <nl> result . InvolvesTypeVariables = true ; <nl> + <nl> + / / If there is additional context available via disjunction <nl> + / / associated with closure literal ( e . g . coercion to some other <nl> + / / type ) let ' s delay resolving the closure until the disjunction <nl> + / / is attempted . <nl> + if ( typeVar - > getImpl ( ) . isClosureType ( ) ) <nl> + return { typeVar } ; <nl> + <nl> break ; <nl> <nl> case ConstraintKind : : ConformsTo : <nl> mmm a / lib / Sema / CSStep . cpp <nl> ppp b / lib / Sema / CSStep . cpp <nl> StepResult ComponentStep : : take ( bool prevFailed ) { <nl> auto bestBindings = CS . determineBestBindings ( ) ; <nl> <nl> if ( bestBindings & & <nl> - ( ! disjunction | | bestBindings - > favoredOverDisjunction ( ) ) ) { <nl> + ( ! disjunction | | bestBindings - > favoredOverDisjunction ( disjunction ) ) ) { <nl> / / Produce a type variable step . <nl> return suspend ( <nl> std : : make_unique < TypeVariableStep > ( CS , * bestBindings , Solutions ) ) ; <nl> mmm a / lib / Sema / ConstraintSystem . h <nl> ppp b / lib / Sema / ConstraintSystem . h <nl> class ConstraintSystem { <nl> <nl> / / / Check if this binding is favored over a disjunction e . g . <nl> / / / if it has only concrete types or would resolve a closure . <nl> - bool favoredOverDisjunction ( ) const ; <nl> + bool favoredOverDisjunction ( Constraint * disjunction ) const ; <nl> <nl> void dump ( llvm : : raw_ostream & out , <nl> unsigned indent = 0 ) const LLVM_ATTRIBUTE_USED { <nl> | Merge remote - tracking branch ' origin / master ' into master - next | apple/swift | df204275f72846e94060e4ccca2e1a7ac73cf56f | 2020-01-22T00:37:22Z |
mmm a / tools / Linux / packaging / debian / copyright <nl> ppp b / tools / Linux / packaging / debian / copyright <nl> HIDRemote Classes used by OSXRemote Client <nl> ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE <nl> POSSIBILITY OF SUCH DAMAGE . <nl> <nl> + PS3 BD Remote Client <nl> + <nl> + Copyright ( C ) 2008 - 2010 Team XBMC <nl> + <nl> + This program is free software ; you can redistribute it and / or modify <nl> + it under the terms of the GNU General Public License as published by <nl> + the Free Software Foundation ; either version 2 of the License , or <nl> + ( at your option ) any later version . <nl> + <nl> + This program is distributed in the hope that it will be useful , <nl> + but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + GNU General Public License for more details . <nl> + <nl> + You should have received a copy of the GNU General Public License along <nl> + with this program ; if not , write to the Free Software Foundation , Inc . , <nl> + 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA . <nl> + <nl> + <nl> Licenses : <nl> <nl> License : GPL <nl> | Add copyright information for PS3 BD Remote Client . | xbmc/xbmc | 0b9b637af4a950e08f3354d018d53e0222889b25 | 2010-05-15T04:50:09Z |
mmm a / modules / imgproc / src / demosaicing . cpp <nl> ppp b / modules / imgproc / src / demosaicing . cpp <nl> static void Bayer2RGB_VNG_8u ( const Mat & srcmat , Mat & dstmat , int code ) <nl> <nl> bayer + = bstep * 2 ; <nl> <nl> - # if CV_SSE2 <nl> - bool haveSSE = cv : : checkHardwareSupport ( CV_CPU_SSE2 ) ; <nl> - # define _mm_absdiff_epu16 ( a , b ) _mm_adds_epu16 ( _mm_subs_epu16 ( a , b ) , _mm_subs_epu16 ( b , a ) ) <nl> - # endif <nl> - <nl> for ( int y = 2 ; y < size . height - 4 ; y + + ) <nl> { <nl> uchar * dstrow = dst + dststep * y + 6 ; <nl> static void Bayer2RGB_VNG_8u ( const Mat & srcmat , Mat & dstmat , int code ) <nl> <nl> i = 1 ; <nl> <nl> - # if CV_SSE2 <nl> - if ( haveSSE ) <nl> + # if CV_SIMD128 <nl> + for ( ; i < = N - 9 ; i + = 8 , srow + = 8 , brow + = 8 ) <nl> { <nl> - __m128i z = _mm_setzero_si128 ( ) ; <nl> - for ( ; i < = N - 9 ; i + = 8 , srow + = 8 , brow + = 8 ) <nl> - { <nl> - __m128i s1 , s2 , s3 , s4 , s6 , s7 , s8 , s9 ; <nl> - <nl> - s1 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow - 1 - bstep ) ) , z ) ; <nl> - s2 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow - bstep ) ) , z ) ; <nl> - s3 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow + 1 - bstep ) ) , z ) ; <nl> - <nl> - s4 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow - 1 ) ) , z ) ; <nl> - s6 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow + 1 ) ) , z ) ; <nl> - <nl> - s7 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow - 1 + bstep ) ) , z ) ; <nl> - s8 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow + bstep ) ) , z ) ; <nl> - s9 = _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( srow + 1 + bstep ) ) , z ) ; <nl> - <nl> - __m128i b0 , b1 , b2 , b3 , b4 , b5 , b6 ; <nl> - <nl> - b0 = _mm_adds_epu16 ( _mm_slli_epi16 ( _mm_absdiff_epu16 ( s2 , s8 ) , 1 ) , <nl> - _mm_adds_epu16 ( _mm_absdiff_epu16 ( s1 , s7 ) , <nl> - _mm_absdiff_epu16 ( s3 , s9 ) ) ) ; <nl> - b1 = _mm_adds_epu16 ( _mm_slli_epi16 ( _mm_absdiff_epu16 ( s4 , s6 ) , 1 ) , <nl> - _mm_adds_epu16 ( _mm_absdiff_epu16 ( s1 , s3 ) , <nl> - _mm_absdiff_epu16 ( s7 , s9 ) ) ) ; <nl> - b2 = _mm_slli_epi16 ( _mm_absdiff_epu16 ( s3 , s7 ) , 1 ) ; <nl> - b3 = _mm_slli_epi16 ( _mm_absdiff_epu16 ( s1 , s9 ) , 1 ) ; <nl> - <nl> - _mm_storeu_si128 ( ( __m128i * ) brow , b0 ) ; <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N ) , b1 ) ; <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N2 ) , b2 ) ; <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N3 ) , b3 ) ; <nl> - <nl> - b4 = _mm_adds_epu16 ( b2 , _mm_adds_epu16 ( _mm_absdiff_epu16 ( s2 , s4 ) , <nl> - _mm_absdiff_epu16 ( s6 , s8 ) ) ) ; <nl> - b5 = _mm_adds_epu16 ( b3 , _mm_adds_epu16 ( _mm_absdiff_epu16 ( s2 , s6 ) , <nl> - _mm_absdiff_epu16 ( s4 , s8 ) ) ) ; <nl> - b6 = _mm_adds_epu16 ( _mm_adds_epu16 ( s2 , s4 ) , _mm_adds_epu16 ( s6 , s8 ) ) ; <nl> - b6 = _mm_srli_epi16 ( b6 , 1 ) ; <nl> - <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N4 ) , b4 ) ; <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N5 ) , b5 ) ; <nl> - _mm_storeu_si128 ( ( __m128i * ) ( brow + N6 ) , b6 ) ; <nl> - } <nl> + v_uint16x8 s1 , s2 , s3 , s4 , s6 , s7 , s8 , s9 ; <nl> + <nl> + s1 = v_load_expand ( srow - 1 - bstep ) ; <nl> + s2 = v_load_expand ( srow - bstep ) ; <nl> + s3 = v_load_expand ( srow + 1 - bstep ) ; <nl> + <nl> + s4 = v_load_expand ( srow - 1 ) ; <nl> + s6 = v_load_expand ( srow + 1 ) ; <nl> + <nl> + s7 = v_load_expand ( srow - 1 + bstep ) ; <nl> + s8 = v_load_expand ( srow + bstep ) ; <nl> + s9 = v_load_expand ( srow + 1 + bstep ) ; <nl> + <nl> + v_uint16x8 b0 , b1 , b2 , b3 , b4 , b5 , b6 ; <nl> + <nl> + b0 = ( v_absdiff ( s2 , s8 ) < < 1 ) + v_absdiff ( s1 , s7 ) + v_absdiff ( s3 , s9 ) ; <nl> + b1 = ( v_absdiff ( s4 , s6 ) < < 1 ) + v_absdiff ( s1 , s3 ) + v_absdiff ( s7 , s9 ) ; <nl> + b2 = v_absdiff ( s3 , s7 ) < < 1 ; <nl> + b3 = v_absdiff ( s1 , s9 ) < < 1 ; <nl> + <nl> + v_store ( brow , b0 ) ; <nl> + v_store ( brow + N , b1 ) ; <nl> + v_store ( brow + N2 , b2 ) ; <nl> + v_store ( brow + N3 , b3 ) ; <nl> + <nl> + b4 = b2 + v_absdiff ( s2 , s4 ) + v_absdiff ( s6 , s8 ) ; <nl> + b5 = b3 + v_absdiff ( s2 , s6 ) + v_absdiff ( s4 , s8 ) ; <nl> + b6 = ( s2 + s4 + s6 + s8 ) > > 1 ; <nl> + <nl> + v_store ( brow + N4 , b4 ) ; <nl> + v_store ( brow + N5 , b5 ) ; <nl> + v_store ( brow + N6 , b6 ) ; <nl> } <nl> # endif <nl> <nl> static void Bayer2RGB_VNG_8u ( const Mat & srcmat , Mat & dstmat , int code ) <nl> bool greenCell = greenCell0 ; <nl> <nl> i = 2 ; <nl> - # if CV_SSE2 <nl> - int limit = ! haveSSE ? N - 2 : greenCell ? std : : min ( 3 , N - 2 ) : 2 ; <nl> + # if CV_SIMD128 <nl> + int limit = greenCell ? std : : min ( 3 , N - 2 ) : 2 ; <nl> # else <nl> int limit = N - 2 ; <nl> # endif <nl> static void Bayer2RGB_VNG_8u ( const Mat & srcmat , Mat & dstmat , int code ) <nl> greenCell = ! greenCell ; <nl> } <nl> <nl> - # if CV_SSE2 <nl> - if ( ! haveSSE ) <nl> - break ; <nl> - <nl> - __m128i emask = _mm_set1_epi32 ( 0x0000ffff ) , <nl> - omask = _mm_set1_epi32 ( 0xffff0000 ) , <nl> - z = _mm_setzero_si128 ( ) , <nl> - one = _mm_set1_epi16 ( 1 ) ; <nl> - __m128 _0_5 = _mm_set1_ps ( 0 . 5f ) ; <nl> + # if CV_SIMD128 <nl> + v_uint32x4 emask = v_setall_u32 ( 0x0000ffff ) , omask = v_setall_u32 ( 0xffff0000 ) ; <nl> + v_uint16x8 one = v_setall_u16 ( 1 ) , z = v_setzero_u16 ( ) ; <nl> + v_float32x4 _0_5 = v_setall_f32 ( 0 . 5f ) ; <nl> <nl> - # define _mm_merge_epi16 ( a , b ) _mm_or_si128 ( _mm_and_si128 ( a , emask ) , _mm_and_si128 ( b , omask ) ) / / ( aA_aA_aA_aA ) * ( bB_bB_bB_bB ) = > ( bA_bA_bA_bA ) <nl> - # define _mm_cvtloepi16_ps ( a ) _mm_cvtepi32_ps ( _mm_srai_epi32 ( _mm_unpacklo_epi16 ( a , a ) , 16 ) ) / / ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) = > ( 1f , 2f , 3f , 4f ) <nl> - # define _mm_cvthiepi16_ps ( a ) _mm_cvtepi32_ps ( _mm_srai_epi32 ( _mm_unpackhi_epi16 ( a , a ) , 16 ) ) / / ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) = > ( 5f , 6f , 7f , 8f ) <nl> - # define _mm_loadl_u8_s16 ( ptr , offset ) _mm_unpacklo_epi8 ( _mm_loadl_epi64 ( ( __m128i * ) ( ( ptr ) + ( offset ) ) ) , z ) / / load 8 uchars to 8 shorts <nl> + # define v_merge_u16 ( a , b ) ( ( ( a ) & v_reinterpret_as_u16 ( emask ) ) | ( ( b ) & v_reinterpret_as_u16 ( omask ) ) ) / / ( aA_aA_aA_aA ) * ( bB_bB_bB_bB ) = > ( bA_bA_bA_bA ) <nl> + # define v_cvt_s16f32_lo ( a ) v_cvt_f32 ( v_expand_low ( v_reinterpret_as_s16 ( a ) ) ) / / ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) = > ( 1f , 2f , 3f , 4f ) <nl> + # define v_cvt_s16f32_hi ( a ) v_cvt_f32 ( v_expand_high ( v_reinterpret_as_s16 ( a ) ) ) / / ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) = > ( 5f , 6f , 7f , 8f ) <nl> <nl> / / process 8 pixels at once <nl> for ( ; i < = N - 10 ; i + = 8 , srow + = 8 , brow0 + = 8 , brow1 + = 8 , brow2 + = 8 ) <nl> { <nl> / / int gradN = brow0 [ 0 ] + brow1 [ 0 ] ; <nl> - __m128i gradN = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) brow0 ) , _mm_loadu_si128 ( ( __m128i * ) brow1 ) ) ; <nl> + v_uint16x8 gradN = v_load ( brow0 ) + v_load ( brow1 ) ; <nl> <nl> / / int gradS = brow1 [ 0 ] + brow2 [ 0 ] ; <nl> - __m128i gradS = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) brow1 ) , _mm_loadu_si128 ( ( __m128i * ) brow2 ) ) ; <nl> + v_uint16x8 gradS = v_load ( brow1 ) + v_load ( brow2 ) ; <nl> <nl> / / int gradW = brow1 [ N - 1 ] + brow1 [ N ] ; <nl> - __m128i gradW = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N - 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N ) ) ) ; <nl> + v_uint16x8 gradW = v_load ( brow1 + N - 1 ) + v_load ( brow1 + N ) ; <nl> <nl> / / int gradE = brow1 [ N + 1 ] + brow1 [ N ] ; <nl> - __m128i gradE = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N + 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N ) ) ) ; <nl> + v_uint16x8 gradE = v_load ( brow1 + N + 1 ) + v_load ( brow1 + N ) ; <nl> <nl> / / int minGrad = std : : min ( std : : min ( std : : min ( gradN , gradS ) , gradW ) , gradE ) ; <nl> / / int maxGrad = std : : max ( std : : max ( std : : max ( gradN , gradS ) , gradW ) , gradE ) ; <nl> - __m128i minGrad = _mm_min_epi16 ( _mm_min_epi16 ( gradN , gradS ) , _mm_min_epi16 ( gradW , gradE ) ) ; <nl> - __m128i maxGrad = _mm_max_epi16 ( _mm_max_epi16 ( gradN , gradS ) , _mm_max_epi16 ( gradW , gradE ) ) ; <nl> + v_uint16x8 minGrad = v_min ( v_min ( gradN , gradS ) , v_min ( gradW , gradE ) ) ; <nl> + v_uint16x8 maxGrad = v_max ( v_max ( gradN , gradS ) , v_max ( gradW , gradE ) ) ; <nl> <nl> - __m128i grad0 , grad1 ; <nl> + v_uint16x8 grad0 , grad1 ; <nl> <nl> / / int gradNE = brow0 [ N4 + 1 ] + brow1 [ N4 ] ; <nl> / / int gradNE = brow0 [ N2 ] + brow0 [ N2 + 1 ] + brow1 [ N2 ] + brow1 [ N2 + 1 ] ; <nl> - grad0 = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N4 + 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N4 ) ) ) ; <nl> - grad1 = _mm_adds_epi16 ( _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N2 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N2 + 1 ) ) ) , <nl> - _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N2 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N2 + 1 ) ) ) ) ; <nl> - __m128i gradNE = _mm_merge_epi16 ( grad0 , grad1 ) ; <nl> + grad0 = v_load ( brow0 + N4 + 1 ) + v_load ( brow1 + N4 ) ; <nl> + grad1 = v_load ( brow0 + N2 ) + v_load ( brow0 + N2 + 1 ) + v_load ( brow1 + N2 ) + v_load ( brow1 + N2 + 1 ) ; <nl> + v_uint16x8 gradNE = v_merge_u16 ( grad0 , grad1 ) ; <nl> <nl> / / int gradSW = brow1 [ N4 ] + brow2 [ N4 - 1 ] ; <nl> / / int gradSW = brow1 [ N2 ] + brow1 [ N2 - 1 ] + brow2 [ N2 ] + brow2 [ N2 - 1 ] ; <nl> - grad0 = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N4 - 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N4 ) ) ) ; <nl> - grad1 = _mm_adds_epi16 ( _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N2 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N2 - 1 ) ) ) , <nl> - _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N2 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N2 - 1 ) ) ) ) ; <nl> - __m128i gradSW = _mm_merge_epi16 ( grad0 , grad1 ) ; <nl> + grad0 = v_load ( brow2 + N4 - 1 ) + v_load ( brow1 + N4 ) ; <nl> + grad1 = v_load ( brow2 + N2 ) + v_load ( brow2 + N2 - 1 ) + v_load ( brow1 + N2 ) + v_load ( brow1 + N2 - 1 ) ; <nl> + v_uint16x8 gradSW = v_merge_u16 ( grad0 , grad1 ) ; <nl> <nl> - minGrad = _mm_min_epi16 ( _mm_min_epi16 ( minGrad , gradNE ) , gradSW ) ; <nl> - maxGrad = _mm_max_epi16 ( _mm_max_epi16 ( maxGrad , gradNE ) , gradSW ) ; <nl> + minGrad = v_min ( v_min ( minGrad , gradNE ) , gradSW ) ; <nl> + maxGrad = v_max ( v_max ( maxGrad , gradNE ) , gradSW ) ; <nl> <nl> / / int gradNW = brow0 [ N5 - 1 ] + brow1 [ N5 ] ; <nl> / / int gradNW = brow0 [ N3 ] + brow0 [ N3 - 1 ] + brow1 [ N3 ] + brow1 [ N3 - 1 ] ; <nl> - grad0 = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N5 - 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N5 ) ) ) ; <nl> - grad1 = _mm_adds_epi16 ( _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N3 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N3 - 1 ) ) ) , <nl> - _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N3 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N3 - 1 ) ) ) ) ; <nl> - __m128i gradNW = _mm_merge_epi16 ( grad0 , grad1 ) ; <nl> + grad0 = v_load ( brow0 + N5 - 1 ) + v_load ( brow1 + N5 ) ; <nl> + grad1 = v_load ( brow0 + N3 ) + v_load ( brow0 + N3 - 1 ) + v_load ( brow1 + N3 ) + v_load ( brow1 + N3 - 1 ) ; <nl> + v_uint16x8 gradNW = v_merge_u16 ( grad0 , grad1 ) ; <nl> <nl> / / int gradSE = brow1 [ N5 ] + brow2 [ N5 + 1 ] ; <nl> / / int gradSE = brow1 [ N3 ] + brow1 [ N3 + 1 ] + brow2 [ N3 ] + brow2 [ N3 + 1 ] ; <nl> - grad0 = _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N5 + 1 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N5 ) ) ) ; <nl> - grad1 = _mm_adds_epi16 ( _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N3 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N3 + 1 ) ) ) , <nl> - _mm_adds_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N3 ) ) , _mm_loadu_si128 ( ( __m128i * ) ( brow1 + N3 + 1 ) ) ) ) ; <nl> - __m128i gradSE = _mm_merge_epi16 ( grad0 , grad1 ) ; <nl> + grad0 = v_load ( brow2 + N5 + 1 ) + v_load ( brow1 + N5 ) ; <nl> + grad1 = v_load ( brow2 + N3 ) + v_load ( brow2 + N3 + 1 ) + v_load ( brow1 + N3 ) + v_load ( brow1 + N3 + 1 ) ; <nl> + v_uint16x8 gradSE = v_merge_u16 ( grad0 , grad1 ) ; <nl> <nl> - minGrad = _mm_min_epi16 ( _mm_min_epi16 ( minGrad , gradNW ) , gradSE ) ; <nl> - maxGrad = _mm_max_epi16 ( _mm_max_epi16 ( maxGrad , gradNW ) , gradSE ) ; <nl> + minGrad = v_min ( v_min ( minGrad , gradNW ) , gradSE ) ; <nl> + maxGrad = v_max ( v_max ( maxGrad , gradNW ) , gradSE ) ; <nl> <nl> / / int T = minGrad + maxGrad / 2 ; <nl> - __m128i T = _mm_adds_epi16 ( _mm_max_epi16 ( _mm_srli_epi16 ( maxGrad , 1 ) , one ) , minGrad ) ; <nl> - <nl> - __m128i RGs = z , GRs = z , Bs = z , ng = z ; <nl> - <nl> - __m128i x0 = _mm_loadl_u8_s16 ( srow , + 0 ) ; <nl> - __m128i x1 = _mm_loadl_u8_s16 ( srow , - 1 - bstep ) ; <nl> - __m128i x2 = _mm_loadl_u8_s16 ( srow , - 1 - bstep * 2 ) ; <nl> - __m128i x3 = _mm_loadl_u8_s16 ( srow , - bstep ) ; <nl> - __m128i x4 = _mm_loadl_u8_s16 ( srow , + 1 - bstep * 2 ) ; <nl> - __m128i x5 = _mm_loadl_u8_s16 ( srow , + 1 - bstep ) ; <nl> - __m128i x6 = _mm_loadl_u8_s16 ( srow , + 2 - bstep ) ; <nl> - __m128i x7 = _mm_loadl_u8_s16 ( srow , + 1 ) ; <nl> - __m128i x8 = _mm_loadl_u8_s16 ( srow , + 2 + bstep ) ; <nl> - __m128i x9 = _mm_loadl_u8_s16 ( srow , + 1 + bstep ) ; <nl> - __m128i x10 = _mm_loadl_u8_s16 ( srow , + 1 + bstep * 2 ) ; <nl> - __m128i x11 = _mm_loadl_u8_s16 ( srow , + bstep ) ; <nl> - __m128i x12 = _mm_loadl_u8_s16 ( srow , - 1 + bstep * 2 ) ; <nl> - __m128i x13 = _mm_loadl_u8_s16 ( srow , - 1 + bstep ) ; <nl> - __m128i x14 = _mm_loadl_u8_s16 ( srow , - 2 + bstep ) ; <nl> - __m128i x15 = _mm_loadl_u8_s16 ( srow , - 1 ) ; <nl> - __m128i x16 = _mm_loadl_u8_s16 ( srow , - 2 - bstep ) ; <nl> - <nl> - __m128i t0 , t1 , mask ; <nl> + v_uint16x8 T = v_max ( ( maxGrad > > 1 ) , one ) + minGrad ; <nl> + <nl> + v_uint16x8 RGs = z , GRs = z , Bs = z , ng = z ; <nl> + <nl> + v_uint16x8 x0 = v_load_expand ( srow + 0 ) ; <nl> + v_uint16x8 x1 = v_load_expand ( srow - 1 - bstep ) ; <nl> + v_uint16x8 x2 = v_load_expand ( srow - 1 - bstep * 2 ) ; <nl> + v_uint16x8 x3 = v_load_expand ( srow - bstep ) ; <nl> + v_uint16x8 x4 = v_load_expand ( srow + 1 - bstep * 2 ) ; <nl> + v_uint16x8 x5 = v_load_expand ( srow + 1 - bstep ) ; <nl> + v_uint16x8 x6 = v_load_expand ( srow + 2 - bstep ) ; <nl> + v_uint16x8 x7 = v_load_expand ( srow + 1 ) ; <nl> + v_uint16x8 x8 = v_load_expand ( srow + 2 + bstep ) ; <nl> + v_uint16x8 x9 = v_load_expand ( srow + 1 + bstep ) ; <nl> + v_uint16x8 x10 = v_load_expand ( srow + 1 + bstep * 2 ) ; <nl> + v_uint16x8 x11 = v_load_expand ( srow + bstep ) ; <nl> + v_uint16x8 x12 = v_load_expand ( srow - 1 + bstep * 2 ) ; <nl> + v_uint16x8 x13 = v_load_expand ( srow - 1 + bstep ) ; <nl> + v_uint16x8 x14 = v_load_expand ( srow - 2 + bstep ) ; <nl> + v_uint16x8 x15 = v_load_expand ( srow - 1 ) ; <nl> + v_uint16x8 x16 = v_load_expand ( srow - 2 - bstep ) ; <nl> + <nl> + v_uint16x8 t0 , t1 , mask ; <nl> <nl> / / gradN * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradN ) ; / / mask = T > gradN <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradN ) <nl> + mask = ( T > gradN ) ; / / mask = T > gradN <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradN ) <nl> <nl> - t0 = _mm_slli_epi16 ( x3 , 1 ) ; / / srow [ - bstep ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , - bstep * 2 ) , x0 ) ; / / srow [ - bstep * 2 ] + srow [ 0 ] <nl> + t0 = ( x3 < < 1 ) ; / / srow [ - bstep ] * 2 <nl> + t1 = v_load_expand ( srow - bstep * 2 ) + x0 ; / / srow [ - bstep * 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = ( srow [ - bstep * 2 ] + srow [ 0 ] ) * ( T > gradN ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( t1 , mask ) ) ; <nl> + RGs + = ( t1 & mask ) ; <nl> / / GRs + = { srow [ - bstep ] * 2 ; ( srow [ - bstep * 2 - 1 ] + srow [ - bstep * 2 + 1 ] ) } * ( T > gradN ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( t0 , _mm_adds_epi16 ( x2 , x4 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( t0 , x2 + x4 ) & mask ) ; <nl> / / Bs + = { ( srow [ - bstep - 1 ] + srow [ - bstep + 1 ] ) ; srow [ - bstep ] * 2 } * ( T > gradN ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_adds_epi16 ( x1 , x5 ) , t0 ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( x1 + x5 , t0 ) & mask ) ; <nl> <nl> / / gradNE * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradNE ) ; / / mask = T > gradNE <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradNE ) <nl> + mask = ( T > gradNE ) ; / / mask = T > gradNE <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradNE ) <nl> <nl> - t0 = _mm_slli_epi16 ( x5 , 1 ) ; / / srow [ - bstep + 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , - bstep * 2 + 2 ) , x0 ) ; / / srow [ - bstep * 2 + 2 ] + srow [ 0 ] <nl> + t0 = ( x5 < < 1 ) ; / / srow [ - bstep + 1 ] * 2 <nl> + t1 = v_load_expand ( srow - bstep * 2 + 2 ) + x0 ; / / srow [ - bstep * 2 + 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = { ( srow [ - bstep * 2 + 2 ] + srow [ 0 ] ) ; srow [ - bstep + 1 ] * 2 } * ( T > gradNE ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( _mm_merge_epi16 ( t1 , t0 ) , mask ) ) ; <nl> + RGs + = ( v_merge_u16 ( t1 , t0 ) & mask ) ; <nl> / / GRs + = { brow0 [ N6 + 1 ] ; ( srow [ - bstep * 2 + 1 ] + srow [ 1 ] ) } * ( T > gradNE ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N6 + 1 ) ) , _mm_adds_epi16 ( x4 , x7 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( v_load ( brow0 + N6 + 1 ) , x4 + x7 ) & mask ) ; <nl> / / Bs + = { srow [ - bstep + 1 ] * 2 ; ( srow [ - bstep ] + srow [ - bstep + 2 ] ) } * ( T > gradNE ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( t0 , _mm_adds_epi16 ( x3 , x6 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( t0 , x3 + x6 ) & mask ) ; <nl> <nl> / / gradE * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradE ) ; / / mask = T > gradE <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradE ) <nl> + mask = ( T > gradE ) ; / / mask = T > gradE <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradE ) <nl> <nl> - t0 = _mm_slli_epi16 ( x7 , 1 ) ; / / srow [ 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , 2 ) , x0 ) ; / / srow [ 2 ] + srow [ 0 ] <nl> + t0 = ( x7 < < 1 ) ; / / srow [ 1 ] * 2 <nl> + t1 = v_load_expand ( srow + 2 ) + x0 ; / / srow [ 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = ( srow [ 2 ] + srow [ 0 ] ) * ( T > gradE ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( t1 , mask ) ) ; <nl> + RGs + = ( t1 & mask ) ; <nl> / / GRs + = ( srow [ 1 ] * 2 ) * ( T > gradE ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( t0 , mask ) ) ; <nl> + GRs + = ( t0 & mask ) ; <nl> / / Bs + = { ( srow [ - bstep + 1 ] + srow [ bstep + 1 ] ) ; ( srow [ - bstep + 2 ] + srow [ bstep + 2 ] ) } * ( T > gradE ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_adds_epi16 ( x5 , x9 ) , _mm_adds_epi16 ( x6 , x8 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( x5 + x9 , x6 + x8 ) & mask ) ; <nl> <nl> / / gradSE * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradSE ) ; / / mask = T > gradSE <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradSE ) <nl> + mask = ( T > gradSE ) ; / / mask = T > gradSE <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradSE ) <nl> <nl> - t0 = _mm_slli_epi16 ( x9 , 1 ) ; / / srow [ bstep + 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , bstep * 2 + 2 ) , x0 ) ; / / srow [ bstep * 2 + 2 ] + srow [ 0 ] <nl> + t0 = ( x9 < < 1 ) ; / / srow [ bstep + 1 ] * 2 <nl> + t1 = v_load_expand ( srow + bstep * 2 + 2 ) + x0 ; / / srow [ bstep * 2 + 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = { ( srow [ bstep * 2 + 2 ] + srow [ 0 ] ) ; srow [ bstep + 1 ] * 2 } * ( T > gradSE ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( _mm_merge_epi16 ( t1 , t0 ) , mask ) ) ; <nl> + RGs + = ( v_merge_u16 ( t1 , t0 ) & mask ) ; <nl> / / GRs + = { brow2 [ N6 + 1 ] ; ( srow [ 1 ] + srow [ bstep * 2 + 1 ] ) } * ( T > gradSE ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N6 + 1 ) ) , _mm_adds_epi16 ( x7 , x10 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( v_load ( brow2 + N6 + 1 ) , x7 + x10 ) & mask ) ; <nl> / / Bs + = { srow [ bstep + 1 ] * 2 ; ( srow [ bstep + 2 ] + srow [ bstep ] ) } * ( T > gradSE ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_slli_epi16 ( x9 , 1 ) , _mm_adds_epi16 ( x8 , x11 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( ( x9 < < 1 ) , x8 + x11 ) & mask ) ; <nl> <nl> / / gradS * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradS ) ; / / mask = T > gradS <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradS ) <nl> + mask = ( T > gradS ) ; / / mask = T > gradS <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradS ) <nl> <nl> - t0 = _mm_slli_epi16 ( x11 , 1 ) ; / / srow [ bstep ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , bstep * 2 ) , x0 ) ; / / srow [ bstep * 2 ] + srow [ 0 ] <nl> + t0 = ( x11 < < 1 ) ; / / srow [ bstep ] * 2 <nl> + t1 = v_load_expand ( srow + bstep * 2 ) + x0 ; / / srow [ bstep * 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = ( srow [ bstep * 2 ] + srow [ 0 ] ) * ( T > gradS ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( t1 , mask ) ) ; <nl> + RGs + = ( t1 & mask ) ; <nl> / / GRs + = { srow [ bstep ] * 2 ; ( srow [ bstep * 2 + 1 ] + srow [ bstep * 2 - 1 ] ) } * ( T > gradS ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( t0 , _mm_adds_epi16 ( x10 , x12 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( t0 , x10 + x12 ) & mask ) ; <nl> / / Bs + = { ( srow [ bstep + 1 ] + srow [ bstep - 1 ] ) ; srow [ bstep ] * 2 } * ( T > gradS ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_adds_epi16 ( x9 , x13 ) , t0 ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( x9 + x13 , t0 ) & mask ) ; <nl> <nl> / / gradSW * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradSW ) ; / / mask = T > gradSW <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradSW ) <nl> + mask = ( T > gradSW ) ; / / mask = T > gradSW <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradSW ) <nl> <nl> - t0 = _mm_slli_epi16 ( x13 , 1 ) ; / / srow [ bstep - 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , bstep * 2 - 2 ) , x0 ) ; / / srow [ bstep * 2 - 2 ] + srow [ 0 ] <nl> + t0 = ( x13 < < 1 ) ; / / srow [ bstep - 1 ] * 2 <nl> + t1 = v_load_expand ( srow + bstep * 2 - 2 ) + x0 ; / / srow [ bstep * 2 - 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = { ( srow [ bstep * 2 - 2 ] + srow [ 0 ] ) ; srow [ bstep - 1 ] * 2 } * ( T > gradSW ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( _mm_merge_epi16 ( t1 , t0 ) , mask ) ) ; <nl> + RGs + = ( v_merge_u16 ( t1 , t0 ) & mask ) ; <nl> / / GRs + = { brow2 [ N6 - 1 ] ; ( srow [ bstep * 2 - 1 ] + srow [ - 1 ] ) } * ( T > gradSW ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow2 + N6 - 1 ) ) , _mm_adds_epi16 ( x12 , x15 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( v_load ( brow2 + N6 - 1 ) , x12 + x15 ) & mask ) ; <nl> / / Bs + = { srow [ bstep - 1 ] * 2 ; ( srow [ bstep ] + srow [ bstep - 2 ] ) } * ( T > gradSW ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( t0 , _mm_adds_epi16 ( x11 , x14 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( t0 , x11 + x14 ) & mask ) ; <nl> <nl> / / gradW * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradW ) ; / / mask = T > gradW <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradW ) <nl> + mask = ( T > gradW ) ; / / mask = T > gradW <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradW ) <nl> <nl> - t0 = _mm_slli_epi16 ( x15 , 1 ) ; / / srow [ - 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , - 2 ) , x0 ) ; / / srow [ - 2 ] + srow [ 0 ] <nl> + t0 = ( x15 < < 1 ) ; / / srow [ - 1 ] * 2 <nl> + t1 = v_load_expand ( srow - 2 ) + x0 ; / / srow [ - 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = ( srow [ - 2 ] + srow [ 0 ] ) * ( T > gradW ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( t1 , mask ) ) ; <nl> + RGs + = ( t1 & mask ) ; <nl> / / GRs + = ( srow [ - 1 ] * 2 ) * ( T > gradW ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( t0 , mask ) ) ; <nl> + GRs + = ( t0 & mask ) ; <nl> / / Bs + = { ( srow [ - bstep - 1 ] + srow [ bstep - 1 ] ) ; ( srow [ bstep - 2 ] + srow [ - bstep - 2 ] ) } * ( T > gradW ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_adds_epi16 ( x1 , x13 ) , _mm_adds_epi16 ( x14 , x16 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( x1 + x13 , x14 + x16 ) & mask ) ; <nl> <nl> / / gradNW * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> - mask = _mm_cmpgt_epi16 ( T , gradNW ) ; / / mask = T > gradNW <nl> - ng = _mm_sub_epi16 ( ng , mask ) ; / / ng + = ( T > gradNW ) <nl> + mask = ( T > gradNW ) ; / / mask = T > gradNW <nl> + ng = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( ng ) - v_reinterpret_as_s16 ( mask ) ) ; / / ng + = ( T > gradNW ) <nl> <nl> - t0 = _mm_slli_epi16 ( x1 , 1 ) ; / / srow [ - bstep - 1 ] * 2 <nl> - t1 = _mm_adds_epi16 ( _mm_loadl_u8_s16 ( srow , - bstep * 2 - 2 ) , x0 ) ; / / srow [ - bstep * 2 - 2 ] + srow [ 0 ] <nl> + t0 = ( x1 < < 1 ) ; / / srow [ - bstep - 1 ] * 2 <nl> + t1 = v_load_expand ( srow - bstep * 2 - 2 ) + x0 ; / / srow [ - bstep * 2 - 2 ] + srow [ 0 ] <nl> <nl> / / RGs + = { ( srow [ - bstep * 2 - 2 ] + srow [ 0 ] ) ; srow [ - bstep - 1 ] * 2 } * ( T > gradNW ) <nl> - RGs = _mm_adds_epi16 ( RGs , _mm_and_si128 ( _mm_merge_epi16 ( t1 , t0 ) , mask ) ) ; <nl> + RGs + = ( v_merge_u16 ( t1 , t0 ) & mask ) ; <nl> / / GRs + = { brow0 [ N6 - 1 ] ; ( srow [ - bstep * 2 - 1 ] + srow [ - 1 ] ) } * ( T > gradNW ) <nl> - GRs = _mm_adds_epi16 ( GRs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_loadu_si128 ( ( __m128i * ) ( brow0 + N6 - 1 ) ) , _mm_adds_epi16 ( x2 , x15 ) ) , mask ) ) ; <nl> + GRs + = ( v_merge_u16 ( v_load ( brow0 + N6 - 1 ) , x2 + x15 ) & mask ) ; <nl> / / Bs + = { srow [ - bstep - 1 ] * 2 ; ( srow [ - bstep ] + srow [ - bstep - 2 ] ) } * ( T > gradNW ) <nl> - Bs = _mm_adds_epi16 ( Bs , _mm_and_si128 ( _mm_merge_epi16 ( _mm_slli_epi16 ( x1 , 1 ) , _mm_adds_epi16 ( x3 , x16 ) ) , mask ) ) ; <nl> + Bs + = ( v_merge_u16 ( ( x1 < < 1 ) , x3 + x16 ) & mask ) ; <nl> <nl> - __m128 ngf0 = _mm_div_ps ( _0_5 , _mm_cvtloepi16_ps ( ng ) ) ; <nl> - __m128 ngf1 = _mm_div_ps ( _0_5 , _mm_cvthiepi16_ps ( ng ) ) ; <nl> + v_float32x4 ngf0 = _0_5 / v_cvt_s16f32_lo ( ng ) ; <nl> + v_float32x4 ngf1 = _0_5 / v_cvt_s16f32_hi ( ng ) ; <nl> <nl> / / now interpolate r , g & b <nl> - t0 = _mm_subs_epi16 ( GRs , RGs ) ; <nl> - t1 = _mm_subs_epi16 ( Bs , RGs ) ; <nl> + t0 = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( GRs ) - v_reinterpret_as_s16 ( RGs ) ) ; <nl> + t1 = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( Bs ) - v_reinterpret_as_s16 ( RGs ) ) ; <nl> <nl> - t0 = _mm_add_epi16 ( x0 , _mm_packs_epi32 ( <nl> - _mm_cvtps_epi32 ( _mm_mul_ps ( _mm_cvtloepi16_ps ( t0 ) , ngf0 ) ) , <nl> - _mm_cvtps_epi32 ( _mm_mul_ps ( _mm_cvthiepi16_ps ( t0 ) , ngf1 ) ) ) ) ; <nl> + t0 = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( x0 ) + <nl> + v_pack ( <nl> + v_round ( v_cvt_s16f32_lo ( t0 ) * ngf0 ) , <nl> + v_round ( v_cvt_s16f32_hi ( t0 ) * ngf1 ) ) ) ; <nl> <nl> - t1 = _mm_add_epi16 ( x0 , _mm_packs_epi32 ( <nl> - _mm_cvtps_epi32 ( _mm_mul_ps ( _mm_cvtloepi16_ps ( t1 ) , ngf0 ) ) , <nl> - _mm_cvtps_epi32 ( _mm_mul_ps ( _mm_cvthiepi16_ps ( t1 ) , ngf1 ) ) ) ) ; <nl> + t1 = v_reinterpret_as_u16 ( v_reinterpret_as_s16 ( x0 ) + <nl> + v_pack ( <nl> + v_round ( v_cvt_s16f32_lo ( t1 ) * ngf0 ) , <nl> + v_round ( v_cvt_s16f32_hi ( t1 ) * ngf1 ) ) ) ; <nl> <nl> - x1 = _mm_merge_epi16 ( x0 , t0 ) ; <nl> - x2 = _mm_merge_epi16 ( t0 , x0 ) ; <nl> + x1 = v_merge_u16 ( x0 , t0 ) ; <nl> + x2 = v_merge_u16 ( t0 , x0 ) ; <nl> <nl> uchar R [ 8 ] , G [ 8 ] , B [ 8 ] ; <nl> <nl> - _mm_storel_epi64 ( blueIdx ? ( __m128i * ) B : ( __m128i * ) R , _mm_packus_epi16 ( x1 , z ) ) ; <nl> - _mm_storel_epi64 ( ( __m128i * ) G , _mm_packus_epi16 ( x2 , z ) ) ; <nl> - _mm_storel_epi64 ( blueIdx ? ( __m128i * ) R : ( __m128i * ) B , _mm_packus_epi16 ( t1 , z ) ) ; <nl> + v_store_low ( blueIdx ? B : R , v_pack_u ( v_reinterpret_as_s16 ( x1 ) , v_reinterpret_as_s16 ( z ) ) ) ; <nl> + v_store_low ( G , v_pack_u ( v_reinterpret_as_s16 ( x2 ) , v_reinterpret_as_s16 ( z ) ) ) ; <nl> + v_store_low ( blueIdx ? R : B , v_pack_u ( v_reinterpret_as_s16 ( t1 ) , v_reinterpret_as_s16 ( z ) ) ) ; <nl> <nl> for ( int j = 0 ; j < 8 ; j + + , dstrow + = 3 ) <nl> { <nl> | Merge pull request from ChipKerchner : demosaicingToHal2 | opencv/opencv | 686ea5c1a6c7c2c170a147e00910028dba0a3a98 | 2019-11-16T19:45:37Z |
mmm a / CMakeLists . txt <nl> ppp b / CMakeLists . txt <nl> endif ( POLICY CMP0048 ) <nl> project ( googletest - distribution ) <nl> set ( GOOGLETEST_VERSION 1 . 10 . 0 ) <nl> <nl> - if ( CMAKE_VERSION VERSION_LESS " 3 . 1 " ) <nl> - add_definitions ( - std = c + + 11 ) <nl> - else ( ) <nl> - set ( CMAKE_CXX_STANDARD 11 ) <nl> - set ( CMAKE_CXX_STANDARD_REQUIRED ON ) <nl> + if ( CMAKE_VERSION VERSION_GREATER_EQUAL " 3 . 1 " ) <nl> if ( NOT CYGWIN AND NOT MSYS ) <nl> set ( CMAKE_CXX_EXTENSIONS OFF ) <nl> endif ( ) <nl> | Googletest export | google/googletest | 23b2a3b1cf803999fb38175f6e9e038a4495c8a5 | 2020-02-13T18:52:14Z |
mmm a / src / core / lib / debug / stats_data . c <nl> ppp b / src / core / lib / debug / stats_data . c <nl> const char * grpc_stats_counter_name [ GRPC_STATS_COUNTER_COUNT ] = { <nl> " executor_wakeup_initiated " , <nl> " executor_queue_drained " , <nl> " executor_push_retries " , <nl> + " server_requested_calls " , <nl> + " server_slowpath_requests_queued " , <nl> } ; <nl> const char * grpc_stats_counter_doc [ GRPC_STATS_COUNTER_COUNT ] = { <nl> " Number of client side calls created by this process " , <nl> const char * grpc_stats_counter_doc [ GRPC_STATS_COUNTER_COUNT ] = { <nl> " Number of times an executor queue was drained " , <nl> " Number of times we raced and were forced to retry pushing a closure to " <nl> " the executor " , <nl> + " How many calls were requested ( not necessarily received ) by the server " , <nl> + " How many times was the server slow path taken ( indicates too few " <nl> + " outstanding requests ) " , <nl> } ; <nl> const char * grpc_stats_histogram_name [ GRPC_STATS_HISTOGRAM_COUNT ] = { <nl> " tcp_write_size " , <nl> const char * grpc_stats_histogram_name [ GRPC_STATS_HISTOGRAM_COUNT ] = { <nl> " http2_send_message_per_write " , <nl> " http2_send_trailing_metadata_per_write " , <nl> " http2_send_flowctl_per_write " , <nl> + " server_cqs_checked " , <nl> } ; <nl> const char * grpc_stats_histogram_doc [ GRPC_STATS_HISTOGRAM_COUNT ] = { <nl> " Number of bytes offered to each syscall_write " , <nl> const char * grpc_stats_histogram_doc [ GRPC_STATS_HISTOGRAM_COUNT ] = { <nl> " Number of streams whose payload was written per TCP write " , <nl> " Number of streams terminated per TCP write " , <nl> " Number of flow control updates written per TCP write " , <nl> + " How many completion queues were checked looking for a CQ that had " <nl> + " requested the incoming call " , <nl> } ; <nl> const int grpc_stats_table_0 [ 65 ] = { <nl> 0 , 1 , 2 , 3 , 4 , 6 , 8 , 11 , <nl> const uint8_t grpc_stats_table_3 [ 102 ] = { <nl> 23 , 24 , 24 , 24 , 25 , 26 , 27 , 27 , 28 , 28 , 29 , 29 , 30 , 30 , 31 , 31 , 32 , <nl> 32 , 33 , 33 , 34 , 35 , 35 , 36 , 37 , 37 , 38 , 38 , 39 , 39 , 40 , 40 , 41 , 41 , <nl> 42 , 42 , 43 , 44 , 44 , 45 , 46 , 46 , 47 , 48 , 48 , 49 , 49 , 50 , 50 , 51 , 51 } ; <nl> + const int grpc_stats_table_4 [ 9 ] = { 0 , 1 , 2 , 4 , 7 , 13 , 23 , 39 , 64 } ; <nl> + const uint8_t grpc_stats_table_5 [ 9 ] = { 0 , 0 , 1 , 2 , 2 , 3 , 4 , 4 , 5 } ; <nl> void grpc_stats_inc_tcp_write_size ( grpc_exec_ctx * exec_ctx , int value ) { <nl> value = GPR_CLAMP ( value , 0 , 16777216 ) ; <nl> if ( value < 5 ) { <nl> void grpc_stats_inc_http2_send_flowctl_per_write ( grpc_exec_ctx * exec_ctx , <nl> grpc_stats_histo_find_bucket_slow ( <nl> ( exec_ctx ) , value , grpc_stats_table_2 , 64 ) ) ; <nl> } <nl> - const int grpc_stats_histo_buckets [ 10 ] = { 64 , 64 , 64 , 64 , 64 , <nl> - 64 , 64 , 64 , 64 , 64 } ; <nl> - const int grpc_stats_histo_start [ 10 ] = { 0 , 64 , 128 , 192 , 256 , <nl> - 320 , 384 , 448 , 512 , 576 } ; <nl> - const int * const grpc_stats_histo_bucket_boundaries [ 10 ] = { <nl> + void grpc_stats_inc_server_cqs_checked ( grpc_exec_ctx * exec_ctx , int value ) { <nl> + value = GPR_CLAMP ( value , 0 , 64 ) ; <nl> + if ( value < 3 ) { <nl> + GRPC_STATS_INC_HISTOGRAM ( ( exec_ctx ) , <nl> + GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED , value ) ; <nl> + return ; <nl> + } <nl> + union { <nl> + double dbl ; <nl> + uint64_t uint ; <nl> + } _val , _bkt ; <nl> + _val . dbl = value ; <nl> + if ( _val . uint < 4625196817309499392ull ) { <nl> + int bucket = <nl> + grpc_stats_table_5 [ ( ( _val . uint - 4613937818241073152ull ) > > 51 ) ] + 3 ; <nl> + _bkt . dbl = grpc_stats_table_4 [ bucket ] ; <nl> + bucket - = ( _val . uint < _bkt . uint ) ; <nl> + GRPC_STATS_INC_HISTOGRAM ( ( exec_ctx ) , <nl> + GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED , bucket ) ; <nl> + return ; <nl> + } <nl> + GRPC_STATS_INC_HISTOGRAM ( ( exec_ctx ) , GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED , <nl> + grpc_stats_histo_find_bucket_slow ( <nl> + ( exec_ctx ) , value , grpc_stats_table_4 , 8 ) ) ; <nl> + } <nl> + const int grpc_stats_histo_buckets [ 11 ] = { 64 , 64 , 64 , 64 , 64 , 64 , <nl> + 64 , 64 , 64 , 64 , 8 } ; <nl> + const int grpc_stats_histo_start [ 11 ] = { 0 , 64 , 128 , 192 , 256 , 320 , <nl> + 384 , 448 , 512 , 576 , 640 } ; <nl> + const int * const grpc_stats_histo_bucket_boundaries [ 11 ] = { <nl> grpc_stats_table_0 , grpc_stats_table_2 , grpc_stats_table_0 , <nl> grpc_stats_table_0 , grpc_stats_table_2 , grpc_stats_table_0 , <nl> grpc_stats_table_2 , grpc_stats_table_2 , grpc_stats_table_2 , <nl> - grpc_stats_table_2 } ; <nl> - void ( * const grpc_stats_inc_histogram [ 10 ] ) ( grpc_exec_ctx * exec_ctx , int x ) = { <nl> + grpc_stats_table_2 , grpc_stats_table_4 } ; <nl> + void ( * const grpc_stats_inc_histogram [ 11 ] ) ( grpc_exec_ctx * exec_ctx , int x ) = { <nl> grpc_stats_inc_tcp_write_size , <nl> grpc_stats_inc_tcp_write_iov_size , <nl> grpc_stats_inc_tcp_read_size , <nl> void ( * const grpc_stats_inc_histogram [ 10 ] ) ( grpc_exec_ctx * exec_ctx , int x ) = { <nl> grpc_stats_inc_http2_send_initial_metadata_per_write , <nl> grpc_stats_inc_http2_send_message_per_write , <nl> grpc_stats_inc_http2_send_trailing_metadata_per_write , <nl> - grpc_stats_inc_http2_send_flowctl_per_write } ; <nl> + grpc_stats_inc_http2_send_flowctl_per_write , <nl> + grpc_stats_inc_server_cqs_checked } ; <nl> mmm a / src / core / lib / debug / stats_data . h <nl> ppp b / src / core / lib / debug / stats_data . h <nl> typedef enum { <nl> GRPC_STATS_COUNTER_EXECUTOR_WAKEUP_INITIATED , <nl> GRPC_STATS_COUNTER_EXECUTOR_QUEUE_DRAINED , <nl> GRPC_STATS_COUNTER_EXECUTOR_PUSH_RETRIES , <nl> + GRPC_STATS_COUNTER_SERVER_REQUESTED_CALLS , <nl> + GRPC_STATS_COUNTER_SERVER_SLOWPATH_REQUESTS_QUEUED , <nl> GRPC_STATS_COUNTER_COUNT <nl> } grpc_stats_counters ; <nl> extern const char * grpc_stats_counter_name [ GRPC_STATS_COUNTER_COUNT ] ; <nl> typedef enum { <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_MESSAGE_PER_WRITE , <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_TRAILING_METADATA_PER_WRITE , <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_FLOWCTL_PER_WRITE , <nl> + GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED , <nl> GRPC_STATS_HISTOGRAM_COUNT <nl> } grpc_stats_histograms ; <nl> extern const char * grpc_stats_histogram_name [ GRPC_STATS_HISTOGRAM_COUNT ] ; <nl> typedef enum { <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_TRAILING_METADATA_PER_WRITE_BUCKETS = 64 , <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_FLOWCTL_PER_WRITE_FIRST_SLOT = 576 , <nl> GRPC_STATS_HISTOGRAM_HTTP2_SEND_FLOWCTL_PER_WRITE_BUCKETS = 64 , <nl> - GRPC_STATS_HISTOGRAM_BUCKETS = 640 <nl> + GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED_FIRST_SLOT = 640 , <nl> + GRPC_STATS_HISTOGRAM_SERVER_CQS_CHECKED_BUCKETS = 8 , <nl> + GRPC_STATS_HISTOGRAM_BUCKETS = 648 <nl> } grpc_stats_histogram_constants ; <nl> # define GRPC_STATS_INC_CLIENT_CALLS_CREATED ( exec_ctx ) \ <nl> GRPC_STATS_INC_COUNTER ( ( exec_ctx ) , GRPC_STATS_COUNTER_CLIENT_CALLS_CREATED ) <nl> typedef enum { <nl> GRPC_STATS_INC_COUNTER ( ( exec_ctx ) , GRPC_STATS_COUNTER_EXECUTOR_QUEUE_DRAINED ) <nl> # define GRPC_STATS_INC_EXECUTOR_PUSH_RETRIES ( exec_ctx ) \ <nl> GRPC_STATS_INC_COUNTER ( ( exec_ctx ) , GRPC_STATS_COUNTER_EXECUTOR_PUSH_RETRIES ) <nl> + # define GRPC_STATS_INC_SERVER_REQUESTED_CALLS ( exec_ctx ) \ <nl> + GRPC_STATS_INC_COUNTER ( ( exec_ctx ) , GRPC_STATS_COUNTER_SERVER_REQUESTED_CALLS ) <nl> + # define GRPC_STATS_INC_SERVER_SLOWPATH_REQUESTS_QUEUED ( exec_ctx ) \ <nl> + GRPC_STATS_INC_COUNTER ( ( exec_ctx ) , \ <nl> + GRPC_STATS_COUNTER_SERVER_SLOWPATH_REQUESTS_QUEUED ) <nl> # define GRPC_STATS_INC_TCP_WRITE_SIZE ( exec_ctx , value ) \ <nl> grpc_stats_inc_tcp_write_size ( ( exec_ctx ) , ( int ) ( value ) ) <nl> void grpc_stats_inc_tcp_write_size ( grpc_exec_ctx * exec_ctx , int x ) ; <nl> void grpc_stats_inc_http2_send_trailing_metadata_per_write ( <nl> grpc_stats_inc_http2_send_flowctl_per_write ( ( exec_ctx ) , ( int ) ( value ) ) <nl> void grpc_stats_inc_http2_send_flowctl_per_write ( grpc_exec_ctx * exec_ctx , <nl> int x ) ; <nl> - extern const int grpc_stats_histo_buckets [ 10 ] ; <nl> - extern const int grpc_stats_histo_start [ 10 ] ; <nl> - extern const int * const grpc_stats_histo_bucket_boundaries [ 10 ] ; <nl> - extern void ( * const grpc_stats_inc_histogram [ 10 ] ) ( grpc_exec_ctx * exec_ctx , <nl> + # define GRPC_STATS_INC_SERVER_CQS_CHECKED ( exec_ctx , value ) \ <nl> + grpc_stats_inc_server_cqs_checked ( ( exec_ctx ) , ( int ) ( value ) ) <nl> + void grpc_stats_inc_server_cqs_checked ( grpc_exec_ctx * exec_ctx , int x ) ; <nl> + extern const int grpc_stats_histo_buckets [ 11 ] ; <nl> + extern const int grpc_stats_histo_start [ 11 ] ; <nl> + extern const int * const grpc_stats_histo_bucket_boundaries [ 11 ] ; <nl> + extern void ( * const grpc_stats_inc_histogram [ 11 ] ) ( grpc_exec_ctx * exec_ctx , <nl> int x ) ; <nl> <nl> # endif / * GRPC_CORE_LIB_DEBUG_STATS_DATA_H * / <nl> mmm a / src / core / lib / debug / stats_data . yaml <nl> ppp b / src / core / lib / debug / stats_data . yaml <nl> <nl> - counter : executor_push_retries <nl> doc : Number of times we raced and were forced to retry pushing a closure to <nl> the executor <nl> + # server <nl> + - counter : server_requested_calls <nl> + doc : How many calls were requested ( not necessarily received ) by the server <nl> + - histogram : server_cqs_checked <nl> + buckets : 8 <nl> + max : 64 <nl> + doc : How many completion queues were checked looking for a CQ that had <nl> + requested the incoming call <nl> + - counter : server_slowpath_requests_queued <nl> + doc : How many times was the server slow path taken ( indicates too few <nl> + outstanding requests ) <nl> mmm a / src / core / lib / surface / server . c <nl> ppp b / src / core / lib / surface / server . c <nl> <nl> <nl> # include " src / core / lib / channel / channel_args . h " <nl> # include " src / core / lib / channel / connected_channel . h " <nl> + # include " src / core / lib / debug / stats . h " <nl> # include " src / core / lib / iomgr / executor . h " <nl> # include " src / core / lib / iomgr / iomgr . h " <nl> # include " src / core / lib / slice / slice_internal . h " <nl> static void publish_new_rpc ( grpc_exec_ctx * exec_ctx , void * arg , <nl> if ( request_id = = - 1 ) { <nl> continue ; <nl> } else { <nl> + GRPC_STATS_INC_SERVER_CQS_CHECKED ( exec_ctx , i ) ; <nl> gpr_mu_lock ( & calld - > mu_state ) ; <nl> calld - > state = ACTIVATED ; <nl> gpr_mu_unlock ( & calld - > mu_state ) ; <nl> static void publish_new_rpc ( grpc_exec_ctx * exec_ctx , void * arg , <nl> } <nl> <nl> / * no cq to take the request found : queue it on the slow list * / <nl> + GRPC_STATS_INC_SERVER_SLOWPATH_REQUESTS_QUEUED ( exec_ctx ) ; <nl> gpr_mu_lock ( & server - > mu_call ) ; <nl> gpr_mu_lock ( & calld - > mu_state ) ; <nl> calld - > state = PENDING ; <nl> grpc_call_error grpc_server_request_call ( <nl> grpc_call_error error ; <nl> grpc_exec_ctx exec_ctx = GRPC_EXEC_CTX_INIT ; <nl> requested_call * rc = ( requested_call * ) gpr_malloc ( sizeof ( * rc ) ) ; <nl> + GRPC_STATS_INC_SERVER_REQUESTED_CALLS ( & exec_ctx ) ; <nl> GRPC_API_TRACE ( <nl> " grpc_server_request_call ( " <nl> " server = % p , call = % p , details = % p , initial_metadata = % p , " <nl> grpc_call_error grpc_server_request_registered_call ( <nl> grpc_exec_ctx exec_ctx = GRPC_EXEC_CTX_INIT ; <nl> requested_call * rc = ( requested_call * ) gpr_malloc ( sizeof ( * rc ) ) ; <nl> registered_method * rm = ( registered_method * ) rmp ; <nl> + GRPC_STATS_INC_SERVER_REQUESTED_CALLS ( & exec_ctx ) ; <nl> GRPC_API_TRACE ( <nl> " grpc_server_request_registered_call ( " <nl> " server = % p , rmp = % p , call = % p , deadline = % p , initial_metadata = % p , " <nl> | Merge pull request from ctiller / server_stats | grpc/grpc | af57a57a5aae3249e829500068f93fb8d011f37c | 2017-09-12T02:06:09Z |
mmm a / xbmc / guilib / GUIFontTTFDX . cpp <nl> ppp b / xbmc / guilib / GUIFontTTFDX . cpp <nl> void CGUIFontTTFDX : : LastEnd ( ) <nl> <nl> / / Store current GPU transform <nl> XMMATRIX view = pGUIShader - > GetView ( ) ; <nl> + / / Store currect scissor <nl> + CRect scissor = g_graphicsContext . StereoCorrection ( g_graphicsContext . GetScissors ( ) ) ; <nl> <nl> for ( size_t i = 0 ; i < m_vertexTrans . size ( ) ; i + + ) <nl> { <nl> void CGUIFontTTFDX : : LastEnd ( ) <nl> <nl> / / Apply the clip rectangle <nl> CRect clip = g_Windowing . ClipRectToScissorRect ( m_vertexTrans [ i ] . clip ) ; <nl> + / / Intersect with current scissors <nl> + clip . Intersect ( scissor ) ; <nl> + <nl> + / / skip empty clip , a little improvement to not render invisible text <nl> + if ( clip . IsEmpty ( ) ) <nl> + continue ; <nl> + <nl> g_Windowing . SetScissors ( clip ) ; <nl> <nl> / / Apply the translation to the model view matrix <nl> void CGUIFontTTFDX : : LastEnd ( ) <nl> } <nl> } <nl> <nl> - g_Windowing . ResetScissors ( ) ; <nl> + / / restore scissor <nl> + g_Windowing . SetScissors ( scissor ) ; <nl> + <nl> / / Restore the original transform <nl> pGUIShader - > SetView ( view ) ; <nl> } <nl> mmm a / xbmc / rendering / dx / RenderSystemDX . cpp <nl> ppp b / xbmc / rendering / dx / RenderSystemDX . cpp <nl> <nl> # include " cores / VideoRenderers / RenderManager . h " <nl> # include " guilib / D3DResource . h " <nl> # include " guilib / GUIShaderDX . h " <nl> + # include " guilib / GUITextureD3D . h " <nl> # include " guilib / GUIWindowManager . h " <nl> # include " settings / AdvancedSettings . h " <nl> # include " threads / SingleLock . h " <nl> CRenderSystemDX : : CRenderSystemDX ( ) : CRenderSystemBase ( ) <nl> m_bHWStereoEnabled = false ; <nl> ZeroMemory ( & m_cachedMode , sizeof ( m_cachedMode ) ) ; <nl> ZeroMemory ( & m_viewPort , sizeof ( m_viewPort ) ) ; <nl> + ZeroMemory ( & m_scissor , sizeof ( CRect ) ) ; <nl> } <nl> <nl> CRenderSystemDX : : ~ CRenderSystemDX ( ) <nl> bool CRenderSystemDX : : PresentRenderImpl ( const CDirtyRegionList & dirty ) <nl> <nl> FinishCommandList ( ) ; <nl> <nl> - if ( m_pSwapChain1 ) <nl> + if ( m_pSwapChain1 ) <nl> { <nl> / / will use optimized present with dirty regions . <nl> DXGI_PRESENT_PARAMETERS presentParams = { } ; <nl> bool CRenderSystemDX : : PresentRenderImpl ( const CDirtyRegionList & dirty ) <nl> { <nl> m_bResizeRequred = true ; <nl> if ( CreateWindowSizeDependentResources ( ) ) <nl> - return true ; <nl> + hr = S_OK ; <nl> } <nl> <nl> if ( FAILED ( hr ) ) <nl> bool CRenderSystemDX : : PresentRenderImpl ( const CDirtyRegionList & dirty ) <nl> return false ; <nl> } <nl> <nl> + / / after present swapchain unbinds RT view from immediate context , need to restore it because it can be used by something else <nl> + if ( m_pContext = = m_pImdContext ) <nl> + m_pContext - > OMSetRenderTargets ( 1 , & m_pRenderTargetView , m_depthStencilView ) ; <nl> + <nl> return true ; <nl> } <nl> <nl> bool CRenderSystemDX : : ClearBuffers ( color_t color ) <nl> <nl> float fColor [ 4 ] ; <nl> CD3DHelper : : XMStoreColor ( fColor , color ) ; <nl> + ID3D11RenderTargetView * pRTView = m_pRenderTargetView ; <nl> <nl> - / / Unlike Direct3D 9 , the full extent of the resource view is always cleared . Viewport and scissor settings are not applied . <nl> if ( m_stereoMode ! = RENDER_STEREO_MODE_OFF <nl> & & m_stereoMode ! = RENDER_STEREO_MODE_MONO ) <nl> { <nl> bool CRenderSystemDX : : ClearBuffers ( color_t color ) <nl> & & m_stereoMode ! = RENDER_STEREO_MODE_SPLIT_VERTICAL ) <nl> FinishCommandList ( ) ; <nl> <nl> + / / do not clear RT for anaglyph modes <nl> + if ( m_stereoMode = = RENDER_STEREO_MODE_ANAGLYPH_GREEN_MAGENTA <nl> + | | m_stereoMode = = RENDER_STEREO_MODE_ANAGLYPH_RED_CYAN <nl> + | | m_stereoMode = = RENDER_STEREO_MODE_ANAGLYPH_YELLOW_BLUE ) <nl> + { <nl> + pRTView = nullptr ; <nl> + } <nl> / / for interlaced / checkerboard / hw clear right view <nl> - if ( m_pRenderTargetViewRight ) <nl> - m_pContext - > ClearRenderTargetView ( m_pRenderTargetViewRight , fColor ) ; <nl> + else if ( m_pRenderTargetViewRight ) <nl> + pRTView = m_pRenderTargetViewRight ; <nl> <nl> / / for hw stereo clear depth view also <nl> if ( m_stereoMode = = RENDER_STEREO_MODE_HARDWAREBASED ) <nl> m_pContext - > ClearDepthStencilView ( m_depthStencilView , D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL , 1 . 0 , 0 ) ; <nl> - <nl> - return true ; <nl> } <nl> } <nl> <nl> - m_pContext - > ClearRenderTargetView ( m_pRenderTargetView , fColor ) ; <nl> + if ( pRTView = = nullptr ) <nl> + return true ; <nl> + <nl> + CRect clRect ( 0 , 0 , m_nBackBufferWidth , m_nBackBufferHeight ) ; <nl> + <nl> + / / Unlike Direct3D 9 , D3D11 ClearRenderTargetView always clears full extent of the resource view . <nl> + / / Viewport and scissor settings are not applied . So clear RT by drawing full sized rect with clear color <nl> + if ( m_ScissorsEnabled & & m_scissor ! = clRect ) <nl> + { <nl> + bool alphaEnabled = m_BlendEnabled ; <nl> + if ( alphaEnabled ) <nl> + SetAlphaBlendEnable ( false ) ; <nl> + <nl> + CGUITextureD3D : : DrawQuad ( clRect , color ) ; <nl> + <nl> + if ( alphaEnabled ) <nl> + SetAlphaBlendEnable ( true ) ; <nl> + } <nl> + else <nl> + m_pContext - > ClearRenderTargetView ( pRTView , fColor ) ; <nl> + <nl> m_pContext - > ClearDepthStencilView ( m_depthStencilView , D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL , 1 . 0 , 0 ) ; <nl> return true ; <nl> } <nl> void CRenderSystemDX : : SetScissors ( const CRect & rect ) <nl> if ( ! m_bRenderCreated ) <nl> return ; <nl> <nl> - D3D11_RECT scissor ; <nl> - scissor . left = MathUtils : : round_int ( rect . x1 ) ; <nl> - scissor . top = MathUtils : : round_int ( rect . y1 ) ; <nl> - scissor . right = MathUtils : : round_int ( rect . x2 ) ; <nl> - scissor . bottom = MathUtils : : round_int ( rect . y2 ) ; <nl> + m_scissor = rect ; <nl> + CD3D11_RECT scissor ( MathUtils : : round_int ( rect . x1 ) <nl> + , MathUtils : : round_int ( rect . y1 ) <nl> + , MathUtils : : round_int ( rect . x2 ) <nl> + , MathUtils : : round_int ( rect . y2 ) ) ; <nl> <nl> + m_pContext - > RSSetScissorRects ( 1 , & scissor ) ; <nl> m_pContext - > RSSetState ( m_RSScissorEnable ) ; <nl> m_ScissorsEnabled = true ; <nl> - m_pContext - > RSSetScissorRects ( 1 , & scissor ) ; <nl> } <nl> <nl> void CRenderSystemDX : : ResetScissors ( ) <nl> void CRenderSystemDX : : ResetScissors ( ) <nl> if ( ! m_bRenderCreated ) <nl> return ; <nl> <nl> - D3D11_RECT scissor ; <nl> - scissor . left = 0 ; <nl> - scissor . top = 0 ; <nl> - scissor . right = m_nBackBufferWidth ; <nl> - scissor . bottom = m_nBackBufferHeight ; <nl> + m_scissor . SetRect ( 0 , 0 , m_nBackBufferWidth , m_nBackBufferHeight ) ; <nl> <nl> - m_pContext - > RSSetScissorRects ( 1 , & scissor ) ; <nl> m_pContext - > RSSetState ( m_RSScissorDisable ) ; <nl> m_ScissorsEnabled = false ; <nl> } <nl> mmm a / xbmc / rendering / dx / RenderSystemDX . h <nl> ppp b / xbmc / rendering / dx / RenderSystemDX . h <nl> class CRenderSystemDX : public CRenderSystemBase <nl> ID3D11RenderTargetView * m_pRenderTargetView ; <nl> ID3D11DepthStencilState * m_depthStencilState ; <nl> ID3D11DepthStencilView * m_depthStencilView ; <nl> - D3D11_VIEWPORT m_viewPort ; <nl> + D3D11_VIEWPORT m_viewPort ; <nl> + CRect m_scissor ; <nl> <nl> CGUIShaderDX * m_pGUIShader ; <nl> ID3D11BlendState * m_BlendEnableState ; <nl> | [ rendering ] Fix rendering with dirty region algorithm 1 and 2 | xbmc/xbmc | 799007c9d62ae00bd264613e1180a47255424751 | 2015-07-04T19:30:33Z |
mmm a / hphp / hack / . ocamlformat <nl> ppp b / hphp / hack / . ocamlformat <nl> break - sequences = true <nl> break - string - literals = never <nl> cases - exp - indent = 2 <nl> disambiguate - non - breaking - match = true <nl> + doc - comments = before <nl> exp - grouping = preserve <nl> field - space = tight - decl <nl> if - then - else = k - r <nl> mmm a / hphp / hack / src / annotated_ast / aast . ml <nl> ppp b / hphp / hack / src / annotated_ast / aast . ml <nl> and ( ' ex , ' fb , ' en , ' hi ) fun_ = { <nl> f_static : bool ; <nl> } <nl> <nl> - and ( ' ex , ' fb , ' en , ' hi ) func_body = { <nl> - fb_ast : ( ' ex , ' fb , ' en , ' hi ) block ; <nl> - fb_annotation : ' fb ; <nl> - } <nl> ( * * <nl> * Naming has two phases and the annotation helps to indicate the phase . <nl> * In the first pass , it will perform naming on everything except for function <nl> and ( ' ex , ' fb , ' en , ' hi ) func_body = { <nl> * have named and unnamed variants of the annotation . <nl> * See BodyNamingAnnotation in nast . ml and the comment in naming . ml <nl> * ) <nl> + and ( ' ex , ' fb , ' en , ' hi ) func_body = { <nl> + fb_ast : ( ' ex , ' fb , ' en , ' hi ) block ; <nl> + fb_annotation : ' fb ; <nl> + } <nl> <nl> ( * A type annotation is two things : <nl> - the localized hint , or if the hint is missing , the inferred type <nl> mmm a / hphp / hack / src / annotated_ast / ast_to_aast . ml <nl> ppp b / hphp / hack / src / annotated_ast / ast_to_aast . ml <nl> <nl> - open Ast <nl> - ( * * <nl> + ( * <nl> * Copyright ( c ) 2015 , Facebook , Inc . <nl> * All rights reserved . <nl> * <nl> open Ast <nl> * <nl> * ) <nl> <nl> + open Ast <nl> open Core_kernel <nl> module SN = Naming_special_names <nl> <nl> mmm a / hphp / hack / src / client / clientLsp . ml <nl> ppp b / hphp / hack / src / client / clientLsp . ml <nl> type hh_server_state = <nl> | Hh_server_stolen <nl> | Hh_server_forgot <nl> <nl> - type server_message = { <nl> - push : ServerCommandTypes . push ; <nl> - has_updated_server_state : bool ; <nl> - } <nl> ( * * A push message from the server might come while we ' re waiting for a server - rpc <nl> response , or while we ' re free . The current architecture allows us to have <nl> arbitrary responses to push messages while we ' re free , but only a limited set <nl> type server_message = { <nl> notion of the server_state , or send a message to the client , but we can ' t <nl> update our own state monad . The has_ * fields are ad - hoc push - specific indicators <nl> of whether we ' ve done some part of the response during the rpc . * ) <nl> + type server_message = { <nl> + push : ServerCommandTypes . push ; <nl> + has_updated_server_state : bool ; <nl> + } <nl> <nl> type server_conn = { <nl> ic : Timeout . in_channel ; <nl> mmm a / hphp / hack / src / client / ide_service / clientIdeIncremental . mli <nl> ppp b / hphp / hack / src / client / ide_service / clientIdeIncremental . mli <nl> <nl> * <nl> * ) <nl> <nl> - val process_changed_file : ServerEnv . env - > Path . t - > ServerEnv . env Lwt . t <nl> ( * * Update the forward and reverse naming tables by parsing the file at the <nl> given path and installing their declarations . If the file could not be read , <nl> it ' s assumed to be deleted . <nl> <nl> Returns an updated [ ServerEnv . env ] , but also <nl> modifies the global naming table state in [ NamingGlobal ] . * ) <nl> + val process_changed_file : ServerEnv . env - > Path . t - > ServerEnv . env Lwt . t <nl> mmm a / hphp / hack / src / client / ide_service / clientIdeService . mli <nl> ppp b / hphp / hack / src / client / ide_service / clientIdeService . mli <nl> <nl> * <nl> * ) <nl> <nl> - type t <nl> ( * * Provides IDE services in the client , without an instance of hh_server <nl> running . <nl> <nl> information to provide IDE services for just the files you ' re looking at . When <nl> we need to look up declarations to service an IDE query , we parse and typecheck <nl> the files containing those declarations on - demand , then answer your IDE query . <nl> * ) <nl> + type t <nl> <nl> module Status : sig <nl> type t = <nl> module Status : sig <nl> | Crashed of string ( * * The IDE services are not available . * ) <nl> end <nl> <nl> - val make : unit - > t <nl> ( * * Create an uninitialized IDE service . All queries made to this service will <nl> fail immediately , unless otherwise requested in the initialization procedure . * ) <nl> + val make : unit - > t <nl> <nl> + ( * * Request that the IDE service initialize from the saved state . Queries made <nl> + to the service will fail until it is done initializing , unless <nl> + [ wait_for_initialization ] is [ true ] , in which case queries made to the service <nl> + will block until the initializing is complete . * ) <nl> val initialize_from_saved_state : <nl> t - > <nl> root : Path . t - > <nl> naming_table_saved_state_path : Path . t option - > <nl> wait_for_initialization : bool - > <nl> ( unit , string ) Lwt_result . t <nl> - ( * * Request that the IDE service initialize from the saved state . Queries made <nl> - to the service will fail until it is done initializing , unless <nl> - [ wait_for_initialization ] is [ true ] , in which case queries made to the service <nl> - will block until the initializing is complete . * ) <nl> <nl> - val serve : t - > unit Lwt . t <nl> ( * * Pump the message loop for the IDE service . Exits once the IDE service has <nl> been [ destroy ] ed . * ) <nl> + val serve : t - > unit Lwt . t <nl> <nl> - val destroy : t - > unit Lwt . t <nl> ( * * Clean up any resources held by the IDE service ( such as the message loop and <nl> background processes ) . * ) <nl> + val destroy : t - > unit Lwt . t <nl> <nl> - val notify_file_changed : t - > Path . t - > unit <nl> ( * * The caller is expected to call this function to notify the IDE service <nl> whenever a Hack file changes on disk , so that it can update its indexes <nl> appropriately . * ) <nl> + val notify_file_changed : t - > Path . t - > unit <nl> <nl> - val rpc : t - > ' response ClientIdeMessage . t - > ( ' response , string ) Lwt_result . t <nl> ( * * Make an RPC call to the IDE service . * ) <nl> + val rpc : t - > ' response ClientIdeMessage . t - > ( ' response , string ) Lwt_result . t <nl> <nl> - val get_notifications : t - > ClientIdeMessage . notification Lwt_message_queue . t <nl> ( * * Get a handle to the stream of notifications sent by the IDE service . These <nl> notifications may be sent even during RPC requests , and so should be processed <nl> asynchronously . * ) <nl> + val get_notifications : t - > ClientIdeMessage . notification Lwt_message_queue . t <nl> <nl> - val get_status : t - > Status . t <nl> ( * * Get the status of the IDE services , based on the internal state and any <nl> notifications that the IDE service process has sent to us . * ) <nl> + val get_status : t - > Status . t <nl> mmm a / hphp / hack / src / decl / affectedDeps . mli <nl> ppp b / hphp / hack / src / decl / affectedDeps . mli <nl> <nl> <nl> open Typing_deps <nl> <nl> + ( * * AffectedDeps . t represents the " fanout " of a change , representing the cached <nl> + information we must invalidate and the files we must re - typecheck if we want <nl> + to produce a correct list of all errors in the repository which reflects <nl> + those changes . * ) <nl> type t = { <nl> changed : DepSet . t ; <nl> ( * * The subset of classes in changed files whose decls changed ( excluding <nl> type t = { <nl> rechecking . Instead , Typing_deps maintains a mapping from symbol hash <nl> to filename ( exposed via Typing_deps . get_files ) . * ) <nl> } <nl> - ( * * AffectedDeps . t represents the " fanout " of a change , representing the cached <nl> - information we must invalidate and the files we must re - typecheck if we want <nl> - to produce a correct list of all errors in the repository which reflects <nl> - those changes . * ) <nl> <nl> val empty : t <nl> <nl> mmm a / hphp / hack / src / decl / decl_redecl_service . mli <nl> ppp b / hphp / hack / src / decl / decl_redecl_service . mli <nl> val redo_type_decl : <nl> Naming_table . fast - > <nl> Errors . t * DepSet . t * DepSet . t * DepSet . t <nl> <nl> - val get_dependent_classes : <nl> - MultiWorker . worker list option - > <nl> - bucket_size : int - > <nl> - ( Relative_path . t - > SSet . t ) - > <nl> - SSet . t - > <nl> - SSet . t <nl> ( * * <nl> * Exposed for tests only ! <nl> * For a set of classes , return all the declared classes that share their class <nl> val get_dependent_classes : <nl> * Not for general use case since it doesn ' t use lazy decl and makes sense only <nl> * in a very particular use case of invalidate_type_decl . <nl> * ) <nl> + val get_dependent_classes : <nl> + MultiWorker . worker list option - > <nl> + bucket_size : int - > <nl> + ( Relative_path . t - > SSet . t ) - > <nl> + SSet . t - > <nl> + SSet . t <nl> <nl> val oldify_type_decl : <nl> ? collect_garbage : bool - > <nl> mmm a / hphp / hack / src / decl / decl_to_typing . ml <nl> ppp b / hphp / hack / src / decl / decl_to_typing . ml <nl> open Shallow_decl_defs <nl> open Typing_defs <nl> module Reason = Typing_reason <nl> <nl> - type tagged_elt = { <nl> - id : string ; <nl> - inherit_when_private : bool ; <nl> - elt : class_elt ; <nl> - } <nl> ( * * [ tagged_elt ] is a representation internal to Decl_inheritance which is used <nl> for both methods and properties ( members represented using <nl> { ! Typing_defs . class_elt } ) . Tagging these members with [ inherit_when_private ] <nl> allows us to assign private trait members to the class which used the trait <nl> and to filter out other private members . * ) <nl> + type tagged_elt = { <nl> + id : string ; <nl> + inherit_when_private : bool ; <nl> + elt : class_elt ; <nl> + } <nl> <nl> let method_redeclaration_to_shallow_method smr = <nl> let { <nl> mmm a / hphp / hack / src / decl / shallow_classes_heap . mli <nl> ppp b / hphp / hack / src / decl / shallow_classes_heap . mli <nl> <nl> <nl> open Shallow_decl_defs <nl> <nl> - val get : string - > shallow_class option <nl> ( * * Return the shallow declaration of the class with the given name if it is <nl> present in the cache . Otherwise , compute it , store it in the cache , and <nl> return it . <nl> <nl> Raises [ Failure ] if [ shallow_class_decl ] is not enabled . * ) <nl> + val get : string - > shallow_class option <nl> <nl> - val class_naming_and_decl : Nast . class_ - > shallow_class <nl> ( * * Convert the given class AST to a shallow class declaration and return it . * ) <nl> + val class_naming_and_decl : Nast . class_ - > shallow_class <nl> <nl> - val class_decl_if_missing : Nast . class_ - > shallow_class <nl> ( * * If a shallow declaration for the class with the given name is present in the <nl> cache , return it . Otherwise , convert the given class AST to a shallow class <nl> declaration , store it in the cache , and return it . <nl> <nl> Raises [ Failure ] if [ shallow_class_decl ] is not enabled . * ) <nl> + val class_decl_if_missing : Nast . class_ - > shallow_class <nl> <nl> val push_local_changes : unit - > unit <nl> <nl> mmm a / hphp / hack / src / hh_oxidize / oxidized_module . ml <nl> ppp b / hphp / hack / src / hh_oxidize / oxidized_module . ml <nl> <nl> <nl> open Reordered_argument_collections <nl> <nl> + ( * * This type is mostly strings for the sake of making conversion easy , but we <nl> + retain some structure for the postprocessing and formatting we do in <nl> + { ! Stringify } . * ) <nl> type t = { <nl> extern_uses : SSet . t ; <nl> ( * names of types ( or derive macros ) to import from other Rust crates * ) <nl> type t = { <nl> decls : ( string * string ) list ; <nl> ( * ( name , rust_syntax_for_entire_declaration ) * ) <nl> } <nl> - ( * * This type is mostly strings for the sake of making conversion easy , but we <nl> - retain some structure for the postprocessing and formatting we do in <nl> - { ! Stringify } . * ) <nl> <nl> let empty = <nl> { <nl> mmm a / hphp / hack / src / hh_oxidize / state . mli <nl> ppp b / hphp / hack / src / hh_oxidize / state . mli <nl> <nl> * <nl> * ) <nl> <nl> - val curr_module_name : unit - > string <nl> ( * * The name of the module currently being converted . * ) <nl> + val curr_module_name : unit - > string <nl> <nl> - val with_module_name : string - > ( unit - > ' a ) - > ' a <nl> ( * * Run the given function in a context where { ! curr_module_name } will return <nl> the given module name . Not re - entrant . * ) <nl> + val with_module_name : string - > ( unit - > ' a ) - > ' a <nl> <nl> - val self : unit - > string <nl> ( * * The name of the type currently being converted . * ) <nl> + val self : unit - > string <nl> <nl> - val with_self : string - > ( unit - > ' a ) - > ' a <nl> ( * * Run the given function in a context where { ! self } will return the given type <nl> name . Not re - entrant . * ) <nl> + val with_self : string - > ( unit - > ' a ) - > ' a <nl> mmm a / hphp / hack / src / hh_oxidize / utils . ml <nl> ppp b / hphp / hack / src / hh_oxidize / utils . ml <nl> <nl> open Core_kernel <nl> open Reordered_argument_collections <nl> <nl> - exception Skip_type_decl of string <nl> ( * * HACK : Raised when we encounter a construct in a type declaration which we <nl> have chosen not to handle ( because it occurs in a type declaration which we <nl> do not need to convert at this time ) . * ) <nl> + exception Skip_type_decl of string <nl> <nl> let log_indent = ref 0 <nl> <nl> mmm a / hphp / hack / src / hh_single_compile . ml <nl> ppp b / hphp / hack / src / hh_single_compile . ml <nl> let handle_conversion_errors errors = <nl> ( * Ignore these errors to match legacy AST behavior * ) <nl> | 2086 <nl> ( * Naming . MethodNeedsVisibility * ) <nl> - <nl> + <nl> | 2102 <nl> ( * Naming . UnsupportedTraitUseAs * ) <nl> - <nl> + <nl> | 2103 ( * Naming . UnsupportedInsteadOf * ) - > <nl> false <nl> | _ ( * Emit fatal parse otherwise * ) - > true ) <nl> mmm a / hphp / hack / src / hhbc / hhbc_hhas . mli <nl> ppp b / hphp / hack / src / hhbc / hhbc_hhas . mli <nl> val to_segments : <nl> Hhas_program . t - > <nl> string list <nl> <nl> - val to_string : <nl> - ? path : Relative_path . t - > ? dump_symbol_refs : bool - > Hhas_program . t - > string <nl> ( * * <nl> * Materializing the hhbc as a single string may introduce additional runtime <nl> * memory usage . Prefer to_hhbc_accumulator . <nl> * ) <nl> + val to_string : <nl> + ? path : Relative_path . t - > ? dump_symbol_refs : bool - > Hhas_program . t - > string <nl> <nl> val string_of_instruction : Hhbc_ast . instruct - > string <nl> <nl> mmm a / hphp / hack / src / hhbc / hhbc_string_utils . ml <nl> ppp b / hphp / hack / src / hhbc / hhbc_string_utils . ml <nl> module Integer = struct <nl> | ' b ' <nl> | ' B ' <nl> ( * Hex * ) <nl> - <nl> + <nl> | ' x ' <nl> | ' X ' - > <nl> s <nl> mmm a / hphp / hack / src / libancillary / libancillary . mli <nl> ppp b / hphp / hack / src / libancillary / libancillary . mli <nl> <nl> <nl> exception Receiving_Fd_Exception <nl> <nl> + ( * * Returns 0 for success , - 1 on failure . * ) <nl> val ancil_send_fd : <nl> Unix . file_descr ( * * The fd of the socket to send the payload over * ) - > <nl> Unix . file_descr ( * * The file descriptor you want to send * ) - > <nl> int <nl> - ( * * Returns 0 for success , - 1 on failure . * ) <nl> <nl> + ( * * The fd received * ) <nl> val ancil_recv_fd : <nl> Unix . file_descr ( * * The fd of the socket to receive the payload over * ) - > <nl> Unix . file_descr <nl> - ( * * The fd received * ) <nl> mmm a / hphp / hack / src / monitor / serverMonitor . mli <nl> ppp b / hphp / hack / src / monitor / serverMonitor . mli <nl> module Make_monitor <nl> ( Informant : Informant_sig . S ) : sig <nl> type t <nl> <nl> + ( * * Start a monitor without running the check loop . Useful for testing . * ) <nl> val start_monitor : <nl> current_version : Config_file . version - > <nl> waiting_client : Unix . file_descr option - > <nl> module Make_monitor <nl> Informant . init_env - > <nl> ServerMonitorUtils . monitor_config - > <nl> t <nl> - ( * * Start a monitor without running the check loop . Useful for testing . * ) <nl> <nl> - val check_and_run_loop_once : t - > t <nl> ( * * Run the check loop once . Useful for testing . * ) <nl> + val check_and_run_loop_once : t - > t <nl> <nl> + ( * * Start the monitor and repeatedly run the check and run loop . <nl> + * Does not return . * ) <nl> val start_monitoring : <nl> current_version : Config_file . version - > <nl> waiting_client : Unix . file_descr option - > <nl> module Make_monitor <nl> Informant . init_env - > <nl> ServerMonitorUtils . monitor_config - > <nl> ' a <nl> - ( * * Start the monitor and repeatedly run the check and run loop . <nl> - * Does not return . * ) <nl> end <nl> mmm a / hphp / hack / src / naming / globalNamingOptions . mli <nl> ppp b / hphp / hack / src / naming / globalNamingOptions . mli <nl> <nl> * <nl> * ) <nl> <nl> - val get : unit - > TypecheckerOptions . t <nl> ( * * Get the global [ TypecheckerOptions . t ] to be used in Naming and Decl . <nl> <nl> Raises [ Failure ] if [ set ] has not yet been invoked . * ) <nl> + val get : unit - > TypecheckerOptions . t <nl> <nl> - val set : TypecheckerOptions . t - > unit <nl> ( * * Set the global [ TypecheckerOptions . t ] to be used in Naming and Decl for the <nl> entire lifetime of the server . <nl> <nl> Has no effect if [ set ] has already been invoked . * ) <nl> + val set : TypecheckerOptions . t - > unit <nl> mmm a / hphp / hack / src / naming / naming . ml <nl> ppp b / hphp / hack / src / naming / naming . ml <nl> <nl> * <nl> * ) <nl> <nl> - open Core_kernel <nl> ( * * Module " naming " a program . <nl> * <nl> * The naming phase consists in several things <nl> open Core_kernel <nl> * 2 - transform all the local names into a unique identifier <nl> * ) <nl> <nl> + open Core_kernel <nl> open Common <nl> open Utils <nl> open String_utils <nl> mmm a / hphp / hack / src / naming / namingGlobal . ml <nl> ppp b / hphp / hack / src / naming / namingGlobal . ml <nl> <nl> * <nl> * ) <nl> <nl> - open Core_kernel <nl> ( * * Module " naming " a program . <nl> * <nl> * The naming phase consists in several things <nl> open Core_kernel <nl> * 2 - transform all the local names into a unique identifier <nl> * ) <nl> <nl> + open Core_kernel <nl> open Utils <nl> module SN = Naming_special_names <nl> <nl> mmm a / hphp / hack / src / naming / naming_table . mli <nl> ppp b / hphp / hack / src / naming / naming_table . mli <nl> val combine : t - > t - > t <nl> <nl> val empty : t <nl> <nl> - val filter : t - > f : ( Relative_path . t - > FileInfo . t - > bool ) - > t <nl> ( * * [ filter ] is implemented using tombstones on SQLite - backed naming tables , so <nl> * if your naming table is backed by SQLite you should try to avoid removing <nl> * more than half the table by filtering ( otherwise it would be best to just <nl> * make a new empty one and add elements to it ) . On non - SQLite backed tables <nl> * we remove entries , so it ' s no more or less efficient depending on how many <nl> * are removed . * ) <nl> + val filter : t - > f : ( Relative_path . t - > FileInfo . t - > bool ) - > t <nl> <nl> val fold : t - > init : ' b - > f : ( Relative_path . t - > FileInfo . t - > ' b - > ' b ) - > ' b <nl> <nl> mmm a / hphp / hack / src / options / buildOptions . mli <nl> ppp b / hphp / hack / src / options / buildOptions . mli <nl> <nl> * <nl> * ) <nl> <nl> - val system_config_path : string <nl> ( * * <nl> * Where we look for system - wide configuration files . <nl> * <nl> val system_config_path : string <nl> * - " / etc " : most linux systems <nl> * - " / usr / local / etc " : MacOS homebrew <nl> * ) <nl> + val system_config_path : string <nl> <nl> - val default_hackfmt_path : string <nl> ( * * <nl> * Where to look for hackfmt . <nl> * <nl> val default_hackfmt_path : string <nl> * - " / usr / bin / hackfmt " : most linux binary builds <nl> * - " / usr / local / Cellar / hhvm / VERSION / bin / hackfmt " : MacOS homebrew <nl> * ) <nl> + val default_hackfmt_path : string <nl> mmm a / hphp / hack / src / parser / coroutine / coroutine_lowerer . mli <nl> ppp b / hphp / hack / src / parser / coroutine / coroutine_lowerer . mli <nl> <nl> <nl> module Syntax = Full_fidelity_editable_positioned_syntax <nl> <nl> - val lower_coroutines : Syntax . t - > Syntax . t <nl> ( * * <nl> * Transforms a full - fidelity syntax tree to generate the required constructs <nl> * for coroutines . The resulting full - fidelity syntax tree should contain the <nl> val lower_coroutines : Syntax . t - > Syntax . t <nl> * Kotlin . You can read more about Kotlin ' s design and implementation details at <nl> * https : / / github . com / Kotlin / kotlin - coroutines / blob / master / kotlin - coroutines - informal . md . <nl> * ) <nl> + val lower_coroutines : Syntax . t - > Syntax . t <nl> mmm a / hphp / hack / src / parser / docblock_finder . mli <nl> ppp b / hphp / hack / src / parser / docblock_finder . mli <nl> val make_docblock_finder : ( Pos . t * Prim_defs . comment ) list - > finder <nl> <nl> val find_docblock : finder - > int - > int - > string option <nl> <nl> - val find_inline_comment : finder - > int - > string option <nl> ( * * Find the last comment on ` line ` if it exists . * ) <nl> + val find_inline_comment : finder - > int - > string option <nl> <nl> - val get_docblock : Full_fidelity_positioned_syntax . t - > string option <nl> ( * * Returns the docblock for the passed in syntax node . * ) <nl> + val get_docblock : Full_fidelity_positioned_syntax . t - > string option <nl> mmm a / hphp / hack / src / parser / docblock_parser . mli <nl> ppp b / hphp / hack / src / parser / docblock_parser . mli <nl> <nl> <nl> open Core_kernel <nl> <nl> - val get_param_docs : docblock : string - > string String . Map . t <nl> ( * * Takes a docblock with asterisks and leading / ending slashes removed . <nl> Returns the parameters mentioned in the docblock ( with @ param ) and their <nl> descriptions with newlines removed . <nl> Parameters can be mentioned with the leading ' $ ' or not : they will be <nl> indexed in the map with the ' $ ' regardless . <nl> * ) <nl> + val get_param_docs : docblock : string - > string String . Map . t <nl> mmm a / hphp / hack / src / parser / full_fidelity_ast . ml <nl> ppp b / hphp / hack / src / parser / full_fidelity_ast . ml <nl> if there already is one , since that one will likely be better than this one . * ) <nl> | ( _ , Some TK . HexadecimalLiteral ) <nl> ( * We allow underscores while lexing the integer literals . This gets rid of them before <nl> * the literal is created . * ) <nl> - <nl> + <nl> | ( _ , Some TK . BinaryLiteral ) - > <nl> Int ( Str . global_replace underscore " " s ) <nl> | ( _ , Some TK . FloatingLiteral ) - > Float s <nl> if there already is one , since that one will likely be better than this one . * ) <nl> let rec aux env acc = function <nl> | [ ] <nl> ( * EOF happens only as the last token in the list . * ) <nl> - <nl> + <nl> | [ { syntax = EndOfFile _ ; _ } ] - > <nl> List . concat ( List . rev acc ) <nl> ( * HaltCompiler stops processing the list in PHP but can be disabled in Hack * ) <nl> mmm a / hphp / hack / src / parser / full_fidelity_ast . mli <nl> ppp b / hphp / hack / src / parser / full_fidelity_ast . mli <nl> end <nl> <nl> type lifted_awaits [ @ @ deriving show ] <nl> <nl> - type env [ @ @ deriving show ] <nl> ( * * <nl> * The ` env ` of the lowerer is " full request . " It provides all the settings the <nl> * lowerer needs to produce an AST . <nl> * ) <nl> + type env [ @ @ deriving show ] <nl> <nl> val make_env ( * Optional parts * ) : <nl> ? codegen : bool - > <nl> mmm a / hphp / hack / src / parser / full_fidelity_editable_positioned_original_source_data . ml <nl> ppp b / hphp / hack / src / parser / full_fidelity_editable_positioned_original_source_data . ml <nl> module Syntax = Full_fidelity_positioned_syntax <nl> module Token = Full_fidelity_positioned_token <nl> module Trivia = Full_fidelity_positioned_trivia <nl> <nl> + ( * * <nl> + * Data about the token with respect to the original source text . <nl> + * ) <nl> type t = { <nl> source_text : SourceText . t ; <nl> offset : int ; <nl> type t = { <nl> trailing : Trivia . t list ; <nl> } <nl> [ @ @ deriving show ] <nl> - ( * * <nl> - * Data about the token with respect to the original source text . <nl> - * ) <nl> <nl> let empty = <nl> { <nl> mmm a / hphp / hack / src / parser / full_fidelity_editable_positioned_token . ml <nl> ppp b / hphp / hack / src / parser / full_fidelity_editable_positioned_token . ml <nl> module SourceData = Full_fidelity_editable_positioned_original_source_data <nl> module TokenKind = Full_fidelity_token_kind <nl> module Trivia = Full_fidelity_positioned_trivia <nl> <nl> - type synthetic_token_data = { text : string } [ @ @ deriving show ] <nl> ( * * <nl> * Data about the token with respect to the original source text . <nl> * ) <nl> + type synthetic_token_data = { text : string } [ @ @ deriving show ] <nl> <nl> type token_data = <nl> | Original of SourceData . t <nl> type token_data = <nl> | Synthetic of synthetic_token_data <nl> [ @ @ deriving show ] <nl> <nl> + ( * * <nl> + * Data common to all EditablePositionedTokens . <nl> + * ) <nl> type t = { <nl> kind : TokenKind . t ; <nl> leading_text : string ; <nl> type t = { <nl> token_data : token_data ; <nl> } <nl> [ @ @ deriving show ] <nl> - ( * * <nl> - * Data common to all EditablePositionedTokens . <nl> - * ) <nl> <nl> let from_positioned_token positioned_token = <nl> { <nl> mmm a / hphp / hack / src / parser / full_fidelity_positioned_token . ml <nl> ppp b / hphp / hack / src / parser / full_fidelity_positioned_token . ml <nl> module LazyTrivia : sig <nl> int - > <nl> Trivia . t list <nl> end = struct <nl> - type t = Obj . t <nl> ( * * This looks horrifying , but allow me to explain . For most trivia , we really <nl> don ' t care what it is , and even if we do , we can find out what it is by <nl> running the lexer over the trivia range again . To optimize for this case , <nl> end = struct <nl> store whether a trivia of the [ n ] th [ TriviaKind ] is present . <nl> Special case : [ trivia ] is [ ( Trivia . t list * Trivia . t list ) ] , corresponding <nl> to the leading and trailing trivia . * ) <nl> + type t = Obj . t <nl> <nl> ( * * Internal representation used for printing and pattern matching . * ) <nl> type internal_t = <nl> mmm a / hphp / hack / src / parser / full_fidelity_source_text . mli <nl> ppp b / hphp / hack / src / parser / full_fidelity_source_text . mli <nl> type t = { <nl> <nl> type pos = t * int <nl> <nl> - val make : Relative_path . t - > string - > t <nl> ( * * create a new source_text . t with a path and contents * ) <nl> + val make : Relative_path . t - > string - > t <nl> <nl> - val empty : t <nl> ( * * empty source_text . t located nowhere * ) <nl> + val empty : t <nl> <nl> - val from_file : Relative_path . t - > t <nl> ( * * read a relative path into a source_text . t with the contents at that path * ) <nl> + val from_file : Relative_path . t - > t <nl> <nl> - val file_path : t - > Relative_path . t <nl> ( * * get the relative path * ) <nl> + val file_path : t - > Relative_path . t <nl> <nl> - val length : t - > int <nl> ( * * get the length of the contents * ) <nl> + val length : t - > int <nl> <nl> - val get : t - > int - > char <nl> ( * * get the ith character * ) <nl> + val get : t - > int - > char <nl> <nl> - val text : t - > string <nl> ( * * get the contents as a string * ) <nl> + val text : t - > string <nl> <nl> - val line_text : t - > int - > string <nl> ( * * get just one line as a string * ) <nl> + val line_text : t - > int - > string <nl> <nl> - val sub : t - > int - > int - > string <nl> ( * * get a substring start at the ith char and continuing for length * ) <nl> + val sub : t - > int - > int - > string <nl> <nl> - val offset_to_position : t - > int - > int * int <nl> ( * * convert an absolute offset into a ( line number , column ) pair * ) <nl> + val offset_to_position : t - > int - > int * int <nl> <nl> - val position_to_offset : t - > int * int - > int <nl> ( * * convert a ( line number , column ) pair into an absolute offset * ) <nl> + val position_to_offset : t - > int * int - > int <nl> <nl> - val relative_pos : Relative_path . t - > t - > int - > int - > Pos . t <nl> ( * * construct a relative position associated with the source_text . t virtual file * ) <nl> + val relative_pos : Relative_path . t - > t - > int - > int - > Pos . t <nl> mmm a / hphp / hack / src / parser / ppl / ppl_class_rewriter . mli <nl> ppp b / hphp / hack / src / parser / ppl / ppl_class_rewriter . mli <nl> <nl> <nl> module Syntax = Full_fidelity_editable_positioned_syntax <nl> <nl> - val rewrite_ppl_classes : Syntax . t - > Syntax . t <nl> ( * * <nl> * Takes a script and rewrites all classes with the user attribute < < __PPL > > <nl> * ) <nl> + val rewrite_ppl_classes : Syntax . t - > Syntax . t <nl> mmm a / hphp / hack / src / procs / bucket . mli <nl> ppp b / hphp / hack / src / procs / bucket . mli <nl> val set_max_bucket_size : int - > unit <nl> <nl> val max_size : unit - > int <nl> <nl> - val calculate_bucket_size : <nl> - num_jobs : int - > num_workers : int - > max_size : int - > int <nl> ( * * Given a number of jobs , number of workers , and a maximum bucket size , will <nl> calculate the optimal bucket size to get the work done as quickly as <nl> possible . <nl> val calculate_bucket_size : <nl> Specifically , if the number of jobs is less than the number of workers times <nl> the maximum bucket size , smaller bucket sizes will be returned in order to <nl> utilize as many workers as possible . * ) <nl> + val calculate_bucket_size : <nl> + num_jobs : int - > num_workers : int - > max_size : int - > int <nl> <nl> ( * Makes a bucket out of a list , without regard for number of workers or the <nl> size of the list . * ) <nl> type ' a of_n = { <nl> total : int ; <nl> } <nl> <nl> - val make_n_buckets : buckets : int - > split : ( bucket : int - > ' a ) - > ' a of_n next <nl> ( * * <nl> * Make n buckets ( where n = " buckets " ) . <nl> * <nl> * The " split " function provides the workload for the k ' th bucket . <nl> * ) <nl> + val make_n_buckets : buckets : int - > split : ( bucket : int - > ' a ) - > ' a of_n next <nl> <nl> ( * Specialized version to split into lists only . * ) <nl> val make_list : <nl> mmm a / hphp / hack / src / procs / multiThreadedCall . mli <nl> ppp b / hphp / hack / src / procs / multiThreadedCall . mli <nl> <nl> * <nl> * ) <nl> <nl> - exception Coalesced_failures of WorkerController . worker_failure list <nl> ( * * If a worker process fails , this is raised . <nl> * <nl> * Note : When one worker process fails , the remaining in - progress workers are checked <nl> exception Coalesced_failures of WorkerController . worker_failure list <nl> * No further buckets are distributed to workers . <nl> * <nl> * Still - in - progress workers are left to their own accord . * ) <nl> + exception Coalesced_failures of WorkerController . worker_failure list <nl> <nl> val coalesced_failures_to_string : <nl> WorkerController . worker_failure list - > string <nl> type worker_id = int <nl> <nl> val no_interrupt : ' a - > ' a interrupt_config <nl> <nl> + ( * * Can raise Coalesced_failures exception . * ) <nl> val call : <nl> WorkerController . worker list - > <nl> ( ' c - > ' a - > ' b ) - > <nl> val call : <nl> ' c - > <nl> ' a Bucket . next - > <nl> ' c <nl> - ( * * Can raise Coalesced_failures exception . * ) <nl> <nl> + ( * * Invokes merge with a unique worker id . <nl> + Can raise Coalesced_failures exception . * ) <nl> val call_with_worker_id : <nl> WorkerController . worker list - > <nl> ( worker_id * ' c - > ' a - > ' b ) - > <nl> val call_with_worker_id : <nl> ' c - > <nl> ' a Bucket . next - > <nl> ' c <nl> - ( * * Invokes merge with a unique worker id . <nl> - Can raise Coalesced_failures exception . * ) <nl> <nl> val call_with_interrupt : <nl> WorkerController . worker list - > <nl> mmm a / hphp / hack / src / providers / memory_bounded_lru_cache . mli <nl> ppp b / hphp / hack / src / providers / memory_bounded_lru_cache . mli <nl> <nl> * <nl> * ) <nl> <nl> - type ( ' k , ' v ) t <nl> ( * * An LRU cache that ' s bounded in memory . When the size of all the elements in <nl> the cache exceeds its maximum size , the cache evicts values until the size <nl> falls below the maximum again . <nl> where the pointed - to data is shared with other structures . <nl> Only the sizes of values are tracked . The sizes of keys are not tracked , so they <nl> don ' t count toward eviction . <nl> * ) <nl> + type ( ' k , ' v ) t <nl> <nl> - val make : max_size_in_words : int - > ( ' k , ' v ) t <nl> ( * * Construct a new cache which can store up to [ max_size_in_words ] words of <nl> values . * ) <nl> + val make : max_size_in_words : int - > ( ' k , ' v ) t <nl> <nl> - val clear : ( ' k , ' v ) t - > unit <nl> ( * * Remove all entries from the cache . * ) <nl> + val clear : ( ' k , ' v ) t - > unit <nl> <nl> - val add : ( ' k , ' v ) t - > key : ' k - > value : ' v - > unit <nl> ( * * Add a [ key ] - [ value ] pair to the cache . <nl> <nl> The cache is always resized to fit under the memory limit after any addition <nl> operation . Under some circumstances , this could mean that the given [ value ] is <nl> immediately evicted . ( For example , if the [ value ] is greater than the maximum <nl> size of the cache , then it must be evicted . ) * ) <nl> + val add : ( ' k , ' v ) t - > key : ' k - > value : ' v - > unit <nl> <nl> - val find_or_add : ( ' k , ' v ) t - > key : ' k - > default : ( unit - > ' v ) - > ' v <nl> ( * * Find the element with the given [ key ] in the cache and return the <nl> corresponding value . If the [ key ] is not present , calls [ default ] to calculate <nl> its value , then [ add ] s it to the cache and returns that value . <nl> its value , then [ add ] s it to the cache and returns that value . <nl> The value is always guaranteed to be returned ( whether by lookup or <nl> calculation ) , although it may be evicted immediately from the cache ( see note on <nl> [ add ] ) . * ) <nl> + val find_or_add : ( ' k , ' v ) t - > key : ' k - > default : ( unit - > ' v ) - > ' v <nl> <nl> - val remove : ( ' k , ' v ) t - > key : ' k - > unit <nl> ( * * Remove the entry with the given key from the cache . If the key is not <nl> present , does nothing . * ) <nl> + val remove : ( ' k , ' v ) t - > key : ' k - > unit <nl> mmm a / hphp / hack / src / providers / parser_options_provider . mli <nl> ppp b / hphp / hack / src / providers / parser_options_provider . mli <nl> <nl> * <nl> * ) <nl> <nl> - val get : unit - > ParserOptions . t <nl> ( * * Get the global [ ParserOptions . t ] . <nl> <nl> Raises [ Failure ] if [ set ] has not yet been invoked . * ) <nl> + val get : unit - > ParserOptions . t <nl> <nl> - val set : ParserOptions . t - > unit <nl> ( * * Set the global [ ParserOptions . t ] to be used in parsing for the entire <nl> lifetime of the server . <nl> <nl> Has no effect if [ set ] has already been invoked . * ) <nl> + val set : ParserOptions . t - > unit <nl> mmm a / hphp / hack / src / providers / provider_config . mli <nl> ppp b / hphp / hack / src / providers / provider_config . mli <nl> type decl_cache_key = <nl> | Typedef_decl of string <nl> | Gconst_decl of string <nl> <nl> - type decl_cache = ( decl_cache_key , Obj . t ) Memory_bounded_lru_cache . t <nl> ( * * Maps decl names to types . * ) <nl> + type decl_cache = ( decl_cache_key , Obj . t ) Memory_bounded_lru_cache . t <nl> <nl> type backend = private <nl> | Lru_shared_memory <nl> mmm a / hphp / hack / src / providers / provider_context . mli <nl> ppp b / hphp / hack / src / providers / provider_context . mli <nl> <nl> * <nl> * ) <nl> <nl> + ( * * The information associated with a given file . * ) <nl> type entry = { <nl> file_input : ServerCommandTypes . file_input ; <nl> path : Relative_path . t ; <nl> ast : Nast . program ; <nl> } <nl> [ @ @ deriving show ] <nl> - ( * * The information associated with a given file . * ) <nl> <nl> - type t = { <nl> - tcopt : TypecheckerOptions . t ; <nl> - entries : entry Relative_path . Map . t ; <nl> - } <nl> ( * * A context mapping from file to the [ entry ] for that file . <nl> <nl> This acts as an " overlay " or " delta " on the state of the world , relative to the <nl> files that exist in the repo on disk . <nl> <nl> To load this state of the world for use in a given operation , use <nl> [ ServerIdeUtils . with_context ] . * ) <nl> + type t = { <nl> + tcopt : TypecheckerOptions . t ; <nl> + entries : entry Relative_path . Map . t ; <nl> + } <nl> <nl> - val empty : tcopt : TypecheckerOptions . t - > t <nl> ( * * The empty context , denoting no delta from the current state of the world . * ) <nl> + val empty : tcopt : TypecheckerOptions . t - > t <nl> <nl> - val get_file_input : <nl> - ctx : t - > path : Relative_path . t - > ServerCommandTypes . file_input <nl> ( * * Returns a [ ServerCommandTypes . file_input ] corresponding to the given [ path ] . <nl> <nl> If the [ path ] is in the context , returns its associated <nl> [ ServerCommandTypes . FileContent ] . Otherwise returns the <nl> [ ServerCommandTypes . FileName ] corresponding to that file on disk . * ) <nl> + val get_file_input : <nl> + ctx : t - > path : Relative_path . t - > ServerCommandTypes . file_input <nl> <nl> - val get_fileinfo : entry : entry - > FileInfo . t <nl> ( * * Get the [ FileInfo . t ] associated with the given [ entry ] . * ) <nl> + val get_fileinfo : entry : entry - > FileInfo . t <nl> <nl> - val get_global_context : unit - > t option <nl> ( * * Get the current global context ( which is set with <nl> [ ServerIdeUtils . with_context ] ) , if any . Only one global context can be set at a <nl> time . * ) <nl> + val get_global_context : unit - > t option <nl> <nl> ( * * Internal functions * * ) <nl> <nl> - val set_global_context_internal : t - > unit <nl> ( * * Set the current global context . Should not be used directly ; use <nl> [ ServerIdeUtils . with_context ] instead . * ) <nl> + val set_global_context_internal : t - > unit <nl> <nl> - val unset_global_context_internal : unit - > unit <nl> ( * * Unset the current global context . Should not be used directly ; use <nl> [ ServerIdeUtils . with_context ] instead . * ) <nl> + val unset_global_context_internal : unit - > unit <nl> mmm a / hphp / hack / src / providers / provider_utils . mli <nl> ppp b / hphp / hack / src / providers / provider_utils . mli <nl> <nl> * <nl> * ) <nl> <nl> - val compute_tast : <nl> - ctx : Provider_context . t - > entry : Provider_context . entry - > Tast . program <nl> ( * * Compute the TAST by doing typechecking / type inference for the given entry in <nl> the given context . * ) <nl> + val compute_tast : <nl> + ctx : Provider_context . t - > entry : Provider_context . entry - > Tast . program <nl> <nl> + ( * * Compute the given AST for the given path , and return an updated [ t ] <nl> + containing that entry . * ) <nl> val update_context : <nl> ctx : Provider_context . t - > <nl> path : Relative_path . t - > <nl> file_input : ServerCommandTypes . file_input - > <nl> Provider_context . t * Provider_context . entry <nl> - ( * * Compute the given AST for the given path , and return an updated [ t ] <nl> - containing that entry . * ) <nl> <nl> - val with_context : ctx : Provider_context . t - > f : ( unit - > ' a ) - > ' a <nl> ( * * Load the declarations of [ t ] and call [ f ] , then unload those declarations . <nl> * ) <nl> + val with_context : ctx : Provider_context . t - > f : ( unit - > ' a ) - > ' a <nl> mmm a / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / args . mli <nl> ppp b / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / args . mli <nl> <nl> * <nl> * ) <nl> <nl> - val only : string - > string - > unit <nl> ( * * e . g . Args . parse options ( only " foo " ) , to show that the only anonymous <nl> argument allowed is " foo " ( an anonymous argument is one without - ) , and that <nl> " foo " may only be provided at most once . <nl> Specifically : only , given a string , returns a function which may only be invoked once , and only with that string * ) <nl> + val only : string - > string - > unit <nl> <nl> - val root : string ref - > string * Arg . spec * string <nl> ( * * e . g . Args . parse [ root my_ref ] anon , to show that " - - root " argument can be used . <nl> If present , and if it fails to specify a directory containing . hhconfig , <nl> then Args . parse will throw an exception . * ) <nl> + val root : string ref - > string * Arg . spec * string <nl> <nl> - val prototype_lock_file : string - > string <nl> ( * * Minor helper function for the ' - - root ' argument , to get the filename of the <nl> corresponding / tmp / hh_server / [ root ] . prototype . lock file . <nl> e . g . " Lock . grab ( prototype_lock_file root ) " <nl> val prototype_lock_file : string - > string <nl> except that the OS automatically releases the lock when the process <nl> terminates . If the attempt to grab fails , then prototype knows that another <nl> instance is still alive , and so gives up . * ) <nl> + val prototype_lock_file : string - > string <nl> <nl> - val prototype_sock_file : string - > string <nl> ( * * Minor helper function for the ' - - root ' argument , to get the filename of the <nl> corresponding / tmp / hh_server / [ root ] . prototype . sock file . <nl> e . g . " Unix . ADDR_UNIX ( Args . prototype_sock_file root ) " <nl> val prototype_sock_file : string - > string <nl> saying what kind of worker it wants ( in JSON ) . In response the prototype <nl> will fork a worker , and the orchestrator can communicate with the worker <nl> over the socket fd . * ) <nl> + val prototype_sock_file : string - > string <nl> mmm a / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / dispatch . mli <nl> ppp b / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / dispatch . mli <nl> type kind = <nl> | Prototype <nl> | Typecheck <nl> <nl> + ( * * For each kind , says its string name , plus how to invoke its orchestrator / worker * ) <nl> type info = { <nl> name : string ; <nl> ( * * String name is used ( 1 ) at CLI , ( 2 ) for initial orchestrator - > worker json handshake * ) <nl> type info = { <nl> run_worker : Unix . file_descr - > unit ; <nl> ( * * run_worker is how prototype dispatches the worker * ) <nl> } <nl> - ( * * For each kind , says its string name , plus how to invoke its orchestrator / worker * ) <nl> <nl> - val register : info list - > unit <nl> ( * * register must be called exactly once , prior to find_by_kind or find_by_name , <nl> to provide the full list of modes . The provided list must cover every single <nl> Dispatch . kind ( this invariant isn ' t checked at moment of register , but will throw <nl> an exception down the line ) . The invariant that it ' s called no more than once <nl> is verified at moment of calling ; the invariant that it ' s called at least <nl> once is verified by the find_by_xyz functions . * ) <nl> + val register : info list - > unit <nl> <nl> - val find_by_name : string - > info option <nl> ( * * given a name , finds the Dispatch . info registered for it . Returns None if no <nl> info was associated with this name , e . g . in case of a typo . Throws exception <nl> if register hasn ' t been called . * ) <nl> + val find_by_name : string - > info option <nl> <nl> - val find_by_kind : kind - > info <nl> ( * * given a kind , finds the Dispatch . info registered for it . Throws a Not_found <nl> exception if registration failed to cover this kind . Throws exception if <nl> register hasn ' t yet been called . * ) <nl> + val find_by_kind : kind - > info <nl> mmm a / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / prototype . mli <nl> ppp b / hphp / hack / src / rearchitecture_proposal_1 / hh_mapreduce / prototype . mli <nl> <nl> * <nl> * ) <nl> <nl> - val run : unit - > unit <nl> ( * * This is the main entrypoint when user invokes " hh_mapreduce prototype < args > " * ) <nl> + val run : unit - > unit <nl> <nl> ( * * Errors that might occur during RPC communication with prototype * ) <nl> type rpc_error = <nl> type rpc_error = <nl> | Malformed of string <nl> ( * * The packet we received wasn ' t a valid format , maybe because the other party is a wrong version * ) <nl> <nl> - val rpc_error_to_verbose_string : rpc_error - > string <nl> ( * * Turns an rpc_error into a detailed string suitable for debugging , maybe including stack trace * ) <nl> + val rpc_error_to_verbose_string : rpc_error - > string <nl> <nl> - val rpc_write : Unix . file_descr - > ' a - > ( unit , rpc_error ) result <nl> ( * * Synchronously send over the orchestrator < - > worker fd . * ) <nl> + val rpc_write : Unix . file_descr - > ' a - > ( unit , rpc_error ) result <nl> <nl> - val rpc_read : Unix . file_descr - > ( ' a , rpc_error ) result <nl> ( * * Synchronously receive over the orchestrator < - > worker fd . * ) <nl> + val rpc_read : Unix . file_descr - > ( ' a , rpc_error ) result <nl> <nl> - val rpc_close_no_err : Unix . file_descr - > unit <nl> ( * * Synchronously requests gently shutdown , then closes the fd . * ) <nl> + val rpc_close_no_err : Unix . file_descr - > unit <nl> <nl> - val rpc_request_new_worker : <nl> - string - > Dispatch . kind - > ( Unix . file_descr , rpc_error ) result <nl> ( * * Orchestrator uses this to synchronously request a new worker . It sends a message to the <nl> prototype requesting it to fork a new process which will then invoke the <nl> appropriate Dispatch . kind run_worker method . * ) <nl> + val rpc_request_new_worker : <nl> + string - > Dispatch . kind - > ( Unix . file_descr , rpc_error ) result <nl> mmm a / hphp / hack / src / server / cstSearchService . mli <nl> ppp b / hphp / hack / src / server / cstSearchService . mli <nl> type pattern <nl> <nl> type result <nl> <nl> - val compile_pattern : Hh_json . json - > ( pattern , string ) Result . t <nl> ( * * Compile JSON input into a pattern that can be searched for . * ) <nl> + val compile_pattern : Hh_json . json - > ( pattern , string ) Result . t <nl> <nl> - val result_to_json : sort_results : bool - > result option - > Hh_json . json <nl> ( * * Convert the result of a search into JSON output that can be sent back to the <nl> user . * ) <nl> + val result_to_json : sort_results : bool - > result option - > Hh_json . json <nl> <nl> + ( * * Search for the given pattern across the given set of files . * ) <nl> val go : <nl> ServerEnv . genv - > <nl> ServerEnv . env - > <nl> val go : <nl> files_to_search : string list option - > <nl> Hh_json . json - > <nl> ( Hh_json . json , string ) Result . t <nl> - ( * * Search for the given pattern across the given set of files . * ) <nl> <nl> + ( * * Execute a search on a single syntax tree . This is most useful in debugging <nl> + utilities like ` hh_single_type_check ` . * ) <nl> val search : <nl> TypecheckerOptions . t - > <nl> Relative_path . t - > <nl> FileInfo . t - > <nl> pattern - > <nl> result option <nl> - ( * * Execute a search on a single syntax tree . This is most useful in debugging <nl> - utilities like ` hh_single_type_check ` . * ) <nl> mmm a / hphp / hack / src / server / hhServerMonitor . mli <nl> ppp b / hphp / hack / src / server / hhServerMonitor . mli <nl> <nl> <nl> val start : unit - > unit <nl> <nl> - val start_daemon : <nl> - ServerArgs . options - > proc_stack : string list - > Exit_status . t <nl> ( * * Start a server daemon with these options and exits . <nl> * Warning : use carefully . options . should_detach must be set to true <nl> * for this to work properly . * ) <nl> + val start_daemon : <nl> + ServerArgs . options - > proc_stack : string list - > Exit_status . t <nl> mmm a / hphp / hack / src / server / serverCommandTypes . ml <nl> ppp b / hphp / hack / src / server / serverCommandTypes . ml <nl> type ' a message_type = <nl> * sending RPC response . * ) <nl> | Ping <nl> <nl> - exception Read_command_timeout <nl> ( * * Timeout on reading the command from the client - client probably frozen . * ) <nl> + exception Read_command_timeout <nl> <nl> ( * This data is marshalled by the server to a < pid > . fin file in certain cases * ) <nl> ( * of a controlled exit , so the client can know about it . * ) <nl> mmm a / hphp / hack / src / server / serverCoverageMetricTypes . ml <nl> ppp b / hphp / hack / src / server / serverCoverageMetricTypes . ml <nl> <nl> * <nl> * ) <nl> <nl> - type result = <nl> - Coverage_level_defs . level_stats SMap . t Coverage_level_defs . trie option <nl> ( * * <nl> * The type result is an optional trie . <nl> * The trie leaves are maps from strings of filenames to level_stats for those <nl> type result = <nl> * to another trie . <nl> * <nl> * ) <nl> + type result = <nl> + Coverage_level_defs . level_stats SMap . t Coverage_level_defs . trie option <nl> mmm a / hphp / hack / src / server / serverDocblockAt . mli <nl> ppp b / hphp / hack / src / server / serverDocblockAt . mli <nl> <nl> * <nl> * ) <nl> <nl> + ( * * Returns the documentation comments for the given symbol or expression . * ) <nl> val go_comments_for_symbol : <nl> def : ' a SymbolDefinition . t - > <nl> base_class_name : string option - > <nl> file : ServerCommandTypes . file_input - > <nl> string option <nl> - ( * * Returns the documentation comments for the given symbol or expression . * ) <nl> <nl> + ( * * Returns the docblock most appropriate to this position * ) <nl> val go_docblock_at : <nl> filename : string - > <nl> line : int - > <nl> column : int - > <nl> kind : SearchUtils . si_kind - > <nl> DocblockService . result <nl> - ( * * Returns the docblock most appropriate to this position * ) <nl> <nl> + ( * * Returns the docblock from these file contents * ) <nl> val go_docblock_at_contents : <nl> filename : string - > <nl> contents : string - > <nl> val go_docblock_at_contents : <nl> column : int - > <nl> kind : SearchUtils . si_kind - > <nl> DocblockService . result <nl> - ( * * Returns the docblock from these file contents * ) <nl> <nl> + ( * * Returns the location of a symbol , which can be used to call go_docblock_at * ) <nl> val go_locate_symbol : <nl> env : ServerEnv . env - > <nl> symbol : string - > <nl> kind : SearchUtils . si_kind - > <nl> DocblockService . dbs_symbol_location_result <nl> - ( * * Returns the location of a symbol , which can be used to call go_docblock_at * ) <nl> <nl> + ( * * Simplified one - step symbol / docblock * ) <nl> val go_docblock_for_symbol : <nl> env : ServerEnv . env - > <nl> symbol : string - > <nl> kind : SearchUtils . si_kind - > <nl> DocblockService . result <nl> - ( * * Simplified one - step symbol / docblock * ) <nl> mmm a / hphp / hack / src / server / serverFindLocals . ml <nl> ppp b / hphp / hack / src / server / serverFindLocals . ml <nl> type result = Pos . absolute list <nl> module PosSet = Caml . Set . Make ( Pos ) <nl> <nl> module LocalPositions = struct <nl> - type t = PosSet . t IMap . t <nl> ( * * <nl> * Local positions is a map from an identifier - - a unique integer that <nl> * identifies a local " symbol " - - to a set of all known positions where <nl> module LocalPositions = struct <nl> * Identifiers that identify no known local produce an empty set of <nl> * positions . <nl> * ) <nl> + type t = PosSet . t IMap . t <nl> <nl> let empty = IMap . empty <nl> <nl> end <nl> ( * End of module LocalPositions * ) <nl> <nl> module ScopeChain = struct <nl> - type scope = Ident . t SMap . t <nl> ( * * <nl> * A scope maps from a string ( the text of the use of a local ) to an <nl> * ident ( a unique integer associated with this local symbol ) . <nl> module ScopeChain = struct <nl> * of the stack , that match shadows any matches in the tail . Otherwise , <nl> * the tail is checked . <nl> * ) <nl> + type scope = Ident . t SMap . t <nl> <nl> type t = scope list <nl> <nl> end <nl> ( * End of module ScopeChains * ) <nl> <nl> module LocalMap = struct <nl> - type t = { <nl> - scopechains : ScopeChains . t ; <nl> - locals : LocalPositions . t ; <nl> - target_line : int ; <nl> - target_char : int ; <nl> - target_ident : Ident . t option ; <nl> - } <nl> ( * * <nl> * A " local map " is a scope chain stack and a local positions map . <nl> * When a usage of a local is encountered , there are several possibilities <nl> module LocalMap = struct <nl> * When we encounter a local at that position , we ' ll make a note of the <nl> * ident so that we can look up the associated positions later . <nl> * ) <nl> + type t = { <nl> + scopechains : ScopeChains . t ; <nl> + locals : LocalPositions . t ; <nl> + target_line : int ; <nl> + target_char : int ; <nl> + target_ident : Ident . t option ; <nl> + } <nl> <nl> let make target_line target_char = <nl> { <nl> mmm a / hphp / hack / src / server / serverGoToDefinition . mli <nl> ppp b / hphp / hack / src / server / serverGoToDefinition . mli <nl> <nl> * <nl> * ) <nl> <nl> - val go_ctx : <nl> - ctx : Provider_context . t - > <nl> - entry : Provider_context . entry - > <nl> - line : int - > <nl> - column : int - > <nl> - ServerCommandTypes . Go_to_definition . result <nl> ( * * Returns the definition of the symbol at the given position in the document . <nl> <nl> This function is for interactive use only , as it may return multiple definitions <nl> for the user ' s convenience . For example , when hovering over a constructor call , <nl> it may return both the definition for the class being constructed , and the <nl> ` __construct ` method of the class . Tooling should use <nl> [ ServerCommandTypes . IDENTIFY_FUNCTION ] instead . * ) <nl> + val go_ctx : <nl> + ctx : Provider_context . t - > <nl> + entry : Provider_context . entry - > <nl> + line : int - > <nl> + column : int - > <nl> + ServerCommandTypes . Go_to_definition . result <nl> mmm a / hphp / hack / src / server / serverHover . mli <nl> ppp b / hphp / hack / src / server / serverHover . mli <nl> <nl> * <nl> * ) <nl> <nl> + ( * * Returns detailed information about the symbol or expression at the given <nl> + location . * ) <nl> val go_ctx : <nl> ctx : Provider_context . t - > <nl> entry : Provider_context . entry - > <nl> line : int - > <nl> column : int - > <nl> HoverService . result <nl> - ( * * Returns detailed information about the symbol or expression at the given <nl> - location . * ) <nl> mmm a / hphp / hack / src / server / serverIdeUtils . mli <nl> ppp b / hphp / hack / src / server / serverIdeUtils . mli <nl> val revert_local_changes : unit - > unit <nl> * this is the path that will be assigned to it * ) <nl> val path : Relative_path . t <nl> <nl> + ( * * Runs the declaration , naming , and typecheck phases on a single file . * ) <nl> val check_file_input : <nl> TypecheckerOptions . t - > <nl> ( * What are the definitions in each file . * ) <nl> val check_file_input : <nl> * that . The declarations will be removed from shared memory afterwards . * ) <nl> ServerCommandTypes . file_input - > <nl> Relative_path . t * Tast . program <nl> - ( * * Runs the declaration , naming , and typecheck phases on a single file . * ) <nl> <nl> val check_fileinfo : <nl> TypecheckerOptions . t - > Relative_path . t - > FileInfo . t - > Tast . program <nl> <nl> - val check_ast : TypecheckerOptions . t - > Nast . program - > Tast . program <nl> ( * * Runs the declaration , naming , and typecheck phases on an already - parsed <nl> AST . * ) <nl> + val check_ast : TypecheckerOptions . t - > Nast . program - > Tast . program <nl> <nl> ( * Parses , names , declares and typechecks the content buffer , then run f <nl> * while the declared definitions are still available in shared memory . <nl> mmm a / hphp / hack / src / server / serverRevisionTracker . ml <nl> ppp b / hphp / hack / src / server / serverRevisionTracker . ml <nl> <nl> * LICENSE file in the " hack " directory of this source tree . <nl> * ) <nl> <nl> - open Core_kernel <nl> ( * * Note : the tracking in this module is best effort only ; <nl> * it ' s not guaranteed to always reflect accurate merge base transitions : <nl> * - in some init types , initial merge base is not known so we will only notice <nl> open Core_kernel <nl> * - we only record " new " mergebases as we see them , not detecting transitions <nl> * between already visited revisions <nl> * * ) <nl> + open Core_kernel <nl> <nl> ( * This will be None after init in case of canaries and Precomputed loads * ) <nl> let current_mergebase : Hg . global_rev option ref = ref None <nl> mmm a / hphp / hack / src / server / serverSignatureHelp . mli <nl> ppp b / hphp / hack / src / server / serverSignatureHelp . mli <nl> <nl> * <nl> * ) <nl> <nl> + ( * * Returns signature help for the given location . * ) <nl> val go : <nl> env : ServerEnv . env - > <nl> file : ServerCommandTypes . file_input - > <nl> line : int - > <nl> column : int - > <nl> Lsp . SignatureHelp . result <nl> - ( * * Returns signature help for the given location . * ) <nl> mmm a / hphp / hack / src / stubs / saved_state_loader . ml <nl> ppp b / hphp / hack / src / stubs / saved_state_loader . ml <nl> type _ saved_state_type = <nl> | Naming_table : Naming_table_saved_state_info . t saved_state_type <nl> | Symbol_index : Symbol_index_saved_state_info . t saved_state_type <nl> <nl> - type changed_files = Path . t list <nl> ( * * List of files changed since the saved - state ' s commit . This list of files may <nl> include files other than Hack files , so the caller should filter the given list <nl> as necessary . * ) <nl> + type changed_files = Path . t list <nl> <nl> type load_error <nl> <nl> mmm a / hphp / hack / src / third - party / core / core_result . mli <nl> ppp b / hphp / hack / src / third - party / core / core_result . mli <nl> include Monad . S2 with type ( ' a , ' err ) t : = ( ' a , ' err ) t <nl> <nl> val fail : ' err - > ( _ , ' err ) t <nl> <nl> - val failf : ( ' a , unit , string , ( _ , string ) t ) format4 - > ' a <nl> ( * * e . g . [ failf " Couldn ' t find bloogle % s " ( Bloogle . to_string b ) ] * ) <nl> + val failf : ( ' a , unit , string , ( _ , string ) t ) format4 - > ' a <nl> <nl> val is_ok : ( _ , _ ) t - > bool <nl> <nl> val combine : <nl> err : ( ' err - > ' err - > ' err ) - > <nl> ( ' ok3 , ' err ) t <nl> <nl> - val ok_fst : ( ' ok , ' err ) t - > [ ` Fst of ' ok | ` Snd of ' err ] <nl> ( * * [ ok_fst ] is useful with [ List . partition_map ] . Continuing the above example : <nl> { [ <nl> let rics , errors = List . partition_map ~ f : Core_result . ok_fst <nl> ( List . map ~ f : ric_of_ticker [ " AA " ; " F " ; " CSCO " ; " AAPL " ] ) ] } * ) <nl> + val ok_fst : ( ' ok , ' err ) t - > [ ` Fst of ' ok | ` Snd of ' err ] <nl> <nl> ( * [ ok_if_true ] returns [ Ok ( ) ] if [ bool ] is true , and [ Error error ] if it is false * ) <nl> val ok_if_true : bool - > error : ' err - > ( unit , ' err ) t <nl> <nl> val try_with : ( unit - > ' a ) - > ( ' a , exn ) t <nl> <nl> - val ok_exn : ( ' ok , exn ) t - > ' ok <nl> ( * * [ ok_exn t ] returns [ x ] if [ t = Ok x ] , and raises [ exn ] if [ t = Error exn ] * ) <nl> + val ok_exn : ( ' ok , exn ) t - > ' ok <nl> <nl> ( * raises Failure in the Error case * ) <nl> val ok_or_failwith : ( ' ok , string ) t - > ' ok <nl> <nl> - val ok_unit : ( unit , _ ) t <nl> ( * * [ ok_unit = Ok ( ) ] , used to avoid allocation as a performance hack * ) <nl> + val ok_unit : ( unit , _ ) t <nl> <nl> module Export : sig <nl> type ( ' ok , ' err ) _result = ( ' ok , ' err ) t = <nl> mmm a / hphp / hack / src / third - party / core / monad . ml <nl> ppp b / hphp / hack / src / third - party / core / monad . ml <nl> end <nl> module type Infix = sig <nl> type ' a t <nl> <nl> - val ( > > = ) : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> ( * * [ t > > = f ] returns a computation that sequences the computations represented by two <nl> monad elements . The resulting computation first does [ t ] to yield a value [ v ] , and <nl> then runs the computation returned by [ f v ] . * ) <nl> + val ( > > = ) : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> <nl> - val ( > > | ) : ' a t - > ( ' a - > ' b ) - > ' b t <nl> ( * * [ t > > | f ] is [ t > > = ( fun a - > return ( f a ) ) ] . * ) <nl> + val ( > > | ) : ' a t - > ( ' a - > ' b ) - > ' b t <nl> end <nl> <nl> module type S = sig <nl> - include <nl> - Infix <nl> ( * * A monad is an abstraction of the concept of sequencing of computations . A value of <nl> type ' a monad represents a computation that returns a value of type ' a . * ) <nl> + include <nl> + Infix <nl> <nl> module Monad_infix : Infix with type ' a t : = ' a t <nl> <nl> - val bind : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> ( * * [ bind t f ] = [ t > > = f ] * ) <nl> + val bind : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> <nl> - val return : ' a - > ' a t <nl> ( * * [ return v ] returns the ( trivial ) computation that returns v . * ) <nl> + val return : ' a - > ' a t <nl> <nl> - val map : ' a t - > f : ( ' a - > ' b ) - > ' b t <nl> ( * * [ map t ~ f ] is t > > | f . * ) <nl> + val map : ' a t - > f : ( ' a - > ' b ) - > ' b t <nl> <nl> - val join : ' a t t - > ' a t <nl> ( * * [ join t ] is [ t > > = ( fun t ' - > t ' ) ] . * ) <nl> + val join : ' a t t - > ' a t <nl> <nl> - val ignore : ' a t - > unit t <nl> ( * * [ ignore t ] = map t ~ f : ( fun _ - > ( ) ) . * ) <nl> + val ignore : ' a t - > unit t <nl> <nl> val all : ' a t list - > ' a list t <nl> <nl> mmm a / hphp / hack / src / third - party / core / polymorphic_compare . mli <nl> ppp b / hphp / hack / src / third - party / core / polymorphic_compare . mli <nl> <nl> <nl> val compare : ' a - > ' a - > int <nl> <nl> - val ascending : ' a - > ' a - > int <nl> ( * * [ ascending ] is identical to [ compare ] . [ descending x y = ascending y x ] . These are <nl> intended to be mnemonic when used like [ List . sort ~ cmp : ascending ] and [ List . sort <nl> ~ cmp : descending ] , since they cause the list to be sorted in ascending or descending <nl> order , respectively . * ) <nl> + val ascending : ' a - > ' a - > int <nl> <nl> val descending : ' a - > ' a - > int <nl> <nl> mmm a / hphp / hack / src / third - party / inotify / inotify . mli <nl> ppp b / hphp / hack / src / third - party / inotify / inotify . mli <nl> type event_kind = <nl> | Q_overflow <nl> | Unmount <nl> <nl> - type watch <nl> ( * * Type of watch descriptors . * ) <nl> + type watch <nl> <nl> - type event = watch * event_kind list * int32 * string option <nl> ( * * Type of received events , corresponding to [ struct inotify_event ] . <nl> In event [ wd , kinds , cookie , path ] , [ wd ] corresponds to [ inotify_event . wd ] , <nl> [ kinds ] corresponds to the bits set in [ inotify_event . mask ] , [ cookie ] <nl> corresponds to [ inotify_event . cookie ] , [ path ] is [ Some filename ] if <nl> [ inotify_event . len > 0 ] and [ None ] otherwise . * ) <nl> + type event = watch * event_kind list * int32 * string option <nl> <nl> - val int_of_watch : watch - > int <nl> ( * * [ int_of_watch wd ] returns the underlying integer representation of <nl> watch descriptor [ wd ] . * ) <nl> + val int_of_watch : watch - > int <nl> <nl> ( * * / * * ) <nl> <nl> val watch_of_int : int - > watch <nl> <nl> ( * * / * * ) <nl> <nl> - val string_of_event_kind : event_kind - > string <nl> ( * * [ string_of_event_kind ek ] returns the string representation of event kind [ ek ] , <nl> e . g . [ string_of_event_kind Move_self ] ≡ [ " MOVE_SELF " ] . * ) <nl> + val string_of_event_kind : event_kind - > string <nl> <nl> - val string_of_event : event - > string <nl> ( * * [ string_of_event event ] returns the string representation of event [ ev ] , <nl> e . g . [ string_of_event ] * ) <nl> + val string_of_event : event - > string <nl> <nl> - val create : unit - > Unix . file_descr <nl> ( * * [ create ( ) ] returns a fresh inotify file descriptor or raises <nl> [ Unix . Unix_error ( errno , " inotify_init " , " " ) ] . * ) <nl> + val create : unit - > Unix . file_descr <nl> <nl> - val add_watch : Unix . file_descr - > string - > selector list - > watch <nl> ( * * [ add_watch fd path events ] starts observing events from [ events ] for path [ path ] <nl> at inotify file descriptor [ fd ] and returns a fresh watch descriptor , or raises <nl> [ Unix . Unix_error ( errno , " inotify_add_watch " , path ) ] . * ) <nl> + val add_watch : Unix . file_descr - > string - > selector list - > watch <nl> <nl> - val rm_watch : Unix . file_descr - > watch - > unit <nl> ( * * [ rm_watch fd watch ] stops observing events corresponding to watch descriptor [ watch ] <nl> at inotify file descriptor [ fd ] , or raises <nl> [ Unix . Unix_error ( errno , " inotify_rm_watch " , path ) ] . * ) <nl> + val rm_watch : Unix . file_descr - > watch - > unit <nl> <nl> - val read : Unix . file_descr - > event list <nl> ( * * [ read fd ] requests a list of events for inotify file descriptor [ fd ] . Each event <nl> will include the watch descriptor , which can be used to determine the path that <nl> caused it , and [ Moved_to ] and [ Moved_from ] events will include a cookie that allows <nl> val read : Unix . file_descr - > event list <nl> If { ! read } is not called often enough , the kernel event buffer may overflow , in which <nl> case the event kind list will consist of [ [ Q_overflow ] ] . Such an event would be <nl> associated with a watch descriptor [ - 1 ] , never returned from { ! add_watch } . * ) <nl> + val read : Unix . file_descr - > event list <nl> mmm a / hphp / hack / src / typing / delta . ml <nl> ppp b / hphp / hack / src / typing / delta . ml <nl> module Env = Typing_env <nl> module LEnv = Typing_lenv <nl> module Reason = Typing_reason <nl> <nl> - type gamma = Typing_per_cont_env . per_cont_entry <nl> ( * * <nl> * This type represents the structure refered to using the greek alphabet <nl> * letter ' gamma ' in the type system specification . <nl> type gamma = Typing_per_cont_env . per_cont_entry <nl> * It is essentially a map from local ids to types . <nl> * For now , we reuse Typing_env . local_id_map but this may change . <nl> * ) <nl> + type gamma = Typing_per_cont_env . per_cont_entry <nl> <nl> - type delta = gamma Typing_continuations . Map . t <nl> ( * * <nl> * This type represents the structure refered to using the greek alphabet <nl> * letter ' delta ' in the type system specification . <nl> * <nl> * It is a map from continuations to gammas <nl> * ) <nl> + type delta = gamma Typing_continuations . Map . t <nl> <nl> let empty_gamma : gamma = Typing_per_cont_env . empty_entry <nl> <nl> mmm a / hphp / hack / src / typing / tast_env . mli <nl> ppp b / hphp / hack / src / typing / tast_env . mli <nl> type t = env [ @ @ deriving show ] <nl> <nl> exception Not_in_class <nl> <nl> - val print_ty : env - > Typing_defs . locl_ty - > string <nl> ( * * Return a string representation of the given type using Hack - like syntax . * ) <nl> + val print_ty : env - > Typing_defs . locl_ty - > string <nl> <nl> val print_decl_ty : env - > Typing_defs . decl_ty - > string <nl> <nl> val print_error_ty : <nl> ? ignore_dynamic : bool - > env - > Typing_defs . locl_ty - > string <nl> <nl> + ( * * Return a string representation of the given type using Hack - like syntax , <nl> + formatted with limited width and line breaks , including additional <nl> + information from the { ! SymbolOccurrence . t } and ( if provided ) <nl> + { ! SymbolDefinition . t } . * ) <nl> val print_ty_with_identity : <nl> env - > <nl> Typing_defs . phase_ty - > <nl> ' b SymbolOccurrence . t - > <nl> ' b SymbolDefinition . t option - > <nl> string <nl> - ( * * Return a string representation of the given type using Hack - like syntax , <nl> - formatted with limited width and line breaks , including additional <nl> - information from the { ! SymbolOccurrence . t } and ( if provided ) <nl> - { ! SymbolDefinition . t } . * ) <nl> <nl> - val ty_to_json : env - > Typing_defs . locl_ty - > Hh_json . json <nl> ( * * Return a JSON representation of the given type . * ) <nl> + val ty_to_json : env - > Typing_defs . locl_ty - > Hh_json . json <nl> <nl> + ( * * Convert a JSON representation of a type back into a locl - phase type . * ) <nl> val json_to_locl_ty : <nl> ? keytrace : Hh_json . Access . keytrace - > <nl> Hh_json . json - > <nl> ( Typing_defs . locl_ty , Typing_defs . deserialization_error ) result <nl> - ( * * Convert a JSON representation of a type back into a locl - phase type . * ) <nl> <nl> - val get_self_id_exn : env - > string <nl> ( * * Return the name of the enclosing class definition . <nl> When not in a class definition , raise { ! Not_in_class } . * ) <nl> + val get_self_id_exn : env - > string <nl> <nl> - val get_self_id : env - > string option <nl> ( * * Return the name of the enclosing class definition . <nl> When not in a class definition , return { ! None } . * ) <nl> + val get_self_id : env - > string option <nl> <nl> - val get_self_exn : env - > Tast . ty <nl> ( * * Return the type of the enclosing class definition . <nl> When not in a class definition , raise { ! Not_in_class } . * ) <nl> + val get_self_exn : env - > Tast . ty <nl> <nl> - val get_self : env - > Tast . ty option <nl> ( * * Return the type of the enclosing class definition . <nl> When not in a class definition , return { ! None } . * ) <nl> + val get_self : env - > Tast . ty option <nl> <nl> - val fresh_type : env - > Pos . t - > env * Tast . ty <nl> ( * * Return a type consisting of a fresh type variable * ) <nl> + val fresh_type : env - > Pos . t - > env * Tast . ty <nl> <nl> val open_tyvars : env - > Pos . t - > env <nl> <nl> val close_tyvars_and_solve : env - > Errors . typing_error_callback - > env <nl> <nl> val set_tyvar_variance : env - > Tast . ty - > env <nl> <nl> + ( * * Return the info of the given class from the typing heap . * ) <nl> val get_class : <nl> env - > Decl_provider . class_key - > Decl_provider . class_decl option <nl> - ( * * Return the info of the given class from the typing heap . * ) <nl> <nl> - val is_static : env - > bool <nl> ( * * Return { true } when in the definition of a static property or method . * ) <nl> + val is_static : env - > bool <nl> <nl> - val is_strict : env - > bool <nl> ( * * Return { true } if the containing file was checked in strict mode . * ) <nl> + val is_strict : env - > bool <nl> <nl> - val get_mode : env - > FileInfo . mode <nl> ( * * Return the mode of the containing file * ) <nl> + val get_mode : env - > FileInfo . mode <nl> <nl> - val get_tcopt : env - > TypecheckerOptions . t <nl> ( * * Return the { ! TypecheckerOptions . t } with which this TAST was checked . * ) <nl> + val get_tcopt : env - > TypecheckerOptions . t <nl> <nl> val get_file : env - > Relative_path . t <nl> <nl> ( * Return the { ! Relative_path . t } of the file the env is from * ) <nl> <nl> - val expand_type : env - > Tast . ty - > env * Tast . ty <nl> ( * * Expand a type variable ( { ! Typing_defs . Tvar } ) to the type it refers to . * ) <nl> + val expand_type : env - > Tast . ty - > env * Tast . ty <nl> <nl> - val fully_expand : env - > Tast . ty - > Tast . ty <nl> ( * * Eliminate type variables ( { ! Typing_defs . Tvar } ) in the given type by <nl> recursively replacing them with the type they refer to . * ) <nl> + val fully_expand : env - > Tast . ty - > Tast . ty <nl> <nl> - val get_class_ids : env - > Tast . ty - > string list <nl> ( * * Given some class type or unresolved union of class types , return the <nl> identifiers of all classes the type may represent . * ) <nl> + val get_class_ids : env - > Tast . ty - > string list <nl> <nl> - val flatten_unresolved : env - > Tast . ty - > Tast . ty list - > env * Tast . ty list <nl> ( * * Flatten nested unresolved unions , turning ( ( A | B ) | C ) to ( A | B | C ) . * ) <nl> + val flatten_unresolved : env - > Tast . ty - > Tast . ty list - > env * Tast . ty list <nl> <nl> - val non_null : env - > Pos . t - > Tast . ty - > env * Tast . ty <nl> ( * * Strip away all Toptions that we possibly can in a type , expanding type <nl> variables along the way , turning ? T - > T . * ) <nl> + val non_null : env - > Pos . t - > Tast . ty - > env * Tast . ty <nl> <nl> - val get_concrete_supertypes : env - > Tast . ty - > env * Tast . ty list <nl> ( * * Get the " as " constraints from an abstract type or generic parameter , or <nl> return the type itself if there is no " as " constraint . In the case of a <nl> generic parameter whose " as " constraint is another generic parameter , repeat <nl> the process until a type is reached that is not a generic parameter . Don ' t <nl> loop on cycles . ( For example , function foo < Tu as Tv , Tv as Tu > ( . . . ) ) * ) <nl> + val get_concrete_supertypes : env - > Tast . ty - > env * Tast . ty list <nl> <nl> + ( * * Return { true } if the given { Decl_provider . class_decl } ( referred to by the given <nl> + { class_id_ } , if provided ) allows the current class ( the one returned by <nl> + { ! get_self } ) to access its members with the given { visibility } . * ) <nl> val is_visible : <nl> env - > <nl> Typing_defs . visibility * bool - > <nl> Nast . class_id_ option - > <nl> Decl_provider . class_decl - > <nl> bool <nl> - ( * * Return { true } if the given { Decl_provider . class_decl } ( referred to by the given <nl> - { class_id_ } , if provided ) allows the current class ( the one returned by <nl> - { ! get_self } ) to access its members with the given { visibility } . * ) <nl> <nl> - val assert_nontrivial : <nl> - Pos . t - > Ast_defs . bop - > env - > Tast . ty - > Tast . ty - > unit <nl> ( * * Assert that the types of values involved in a strict ( non - ) equality <nl> comparison are compatible ; e . g . , that the types are not statically <nl> known to be disjoint , in which case the comparison will always return <nl> true or false . * ) <nl> + val assert_nontrivial : <nl> + Pos . t - > Ast_defs . bop - > env - > Tast . ty - > Tast . ty - > unit <nl> <nl> - val assert_nullable : Pos . t - > Ast_defs . bop - > env - > Tast . ty - > unit <nl> ( * * Assert that the type of a value involved in a strict ( non - ) equality <nl> comparsion to null is nullable ( otherwise it is known to always <nl> return true or false ) . * ) <nl> + val assert_nullable : Pos . t - > Ast_defs . bop - > env - > Tast . ty - > unit <nl> <nl> - val hint_to_ty : env - > Aast . hint - > Typing_defs . decl_ty <nl> ( * * Return the declaration - phase type the given hint represents . * ) <nl> + val hint_to_ty : env - > Aast . hint - > Typing_defs . decl_ty <nl> <nl> val localize : <nl> env - > Typing_defs . expand_env - > Typing_defs . decl_ty - > env * Tast . ty <nl> <nl> - val localize_with_self : env - > Typing_defs . decl_ty - > env * Tast . ty <nl> ( * * Transforms a declaration phase type ( { ! Typing_defs . decl_ty } ) <nl> into a localized type ( { ! Typing_defs . locl_ty } = { ! Tast . ty } ) . <nl> Performs no substitutions of generics and initializes the late static bound <nl> val localize_with_self : env - > Typing_defs . decl_ty - > env * Tast . ty <nl> This is mostly provided as legacy support for { ! AutocompleteService } , and <nl> should not be considered a general mechanism for transforming a { decl_ty } to <nl> a { ! Tast . ty } . * ) <nl> + val localize_with_self : env - > Typing_defs . decl_ty - > env * Tast . ty <nl> <nl> - val get_upper_bounds : env - > string - > Type_parameter_env . tparam_bounds <nl> ( * * Get the upper bounds of the type parameter with the given name . * ) <nl> + val get_upper_bounds : env - > string - > Type_parameter_env . tparam_bounds <nl> <nl> - val get_reified : env - > string - > Aast . reify_kind <nl> ( * * Get the reification of the type parameter with the given name . * ) <nl> + val get_reified : env - > string - > Aast . reify_kind <nl> <nl> - val get_enforceable : env - > string - > bool <nl> ( * * Get whether the type parameter supports testing with is / as . * ) <nl> + val get_enforceable : env - > string - > bool <nl> <nl> - val get_newable : env - > string - > bool <nl> ( * * Indicates whether the type parameter with the given name is < < __Newable > > . * ) <nl> + val get_newable : env - > string - > bool <nl> <nl> - val is_fresh_generic_parameter : string - > bool <nl> ( * * Return whether the type parameter with the given name was implicity created <nl> as part of an ` instanceof ` , ` is ` , or ` as ` expression ( instead of being <nl> explicitly declared in code by the user ) . * ) <nl> + val is_fresh_generic_parameter : string - > bool <nl> <nl> + ( * * Assert that one type is a subtype of another , resolving unbound type <nl> + variables in both types ( if any ) , with { ! env } reflecting the new state of <nl> + these type variables . Produce an error if they cannot be subtypes . * ) <nl> val assert_subtype : <nl> Pos . t - > <nl> Typing_reason . ureason - > <nl> val assert_subtype : <nl> Tast . ty - > <nl> Errors . typing_error_callback - > <nl> env <nl> - ( * * Assert that one type is a subtype of another , resolving unbound type <nl> - variables in both types ( if any ) , with { ! env } reflecting the new state of <nl> - these type variables . Produce an error if they cannot be subtypes . * ) <nl> <nl> - val is_sub_type : env - > Tast . ty - > Tast . ty - > bool <nl> ( * * Return { true } when the first type is a subtype of the second type <nl> regardless of the values of unbound type variables in both types ( if any ) . * ) <nl> + val is_sub_type : env - > Tast . ty - > Tast . ty - > bool <nl> <nl> - val can_subtype : env - > Tast . ty - > Tast . ty - > bool <nl> ( * * Return { true } when the first type can be considered a subtype of the second <nl> type after resolving unbound type variables in both types ( if any ) . * ) <nl> + val can_subtype : env - > Tast . ty - > Tast . ty - > bool <nl> <nl> - val is_sub_type_for_union : env - > Tast . ty - > Tast . ty - > bool <nl> ( * * Return { true } when the first type is a subtype of the second type . There is <nl> no type T such that for all T ' , T < : T ' and T ' < : T ( which is the case for Tany <nl> and Terr in ` can_subtype ` ) * ) <nl> + val is_sub_type_for_union : env - > Tast . ty - > Tast . ty - > bool <nl> <nl> - val simplify_unions : env - > Tast . ty - > env * Tast . ty <nl> ( * * Simplify unions in a type . * ) <nl> + val simplify_unions : env - > Tast . ty - > env * Tast . ty <nl> <nl> - val referenced_typeconsts : <nl> - env - > Aast . hint - > Aast . sid list - > ( string * string * Pos . t ) list <nl> ( * * Returns ( class_name , tconst_name , tconst_reference_position ) for each type <nl> constant referenced in the type access path . * ) <nl> + val referenced_typeconsts : <nl> + env - > Aast . hint - > Aast . sid list - > ( string * string * Pos . t ) list <nl> <nl> - val set_static : env - > env <nl> ( * * Return an { ! env } for which { ! is_static } will return { true } . <nl> If you are using { ! Tast_visitor } , you should have no need of this . * ) <nl> + val set_static : env - > env <nl> <nl> - val set_val_kind : env - > Typing_defs . val_kind - > env <nl> ( * * Return an { ! env } for which { ! val_kind } is set to the second argument . * ) <nl> + val set_val_kind : env - > Typing_defs . val_kind - > env <nl> <nl> - val get_val_kind : env - > Typing_defs . val_kind <nl> ( * * Returns the val_kind of the typing environment * ) <nl> + val get_val_kind : env - > Typing_defs . val_kind <nl> <nl> - val set_inside_constructor : env - > env <nl> ( * * Returns an { ! env } for which { ! inside_constructor } is set to { true } . <nl> If you are using { ! Tast_visitor } , you should have no need of this . * ) <nl> + val set_inside_constructor : env - > env <nl> <nl> - val get_inside_constructor : env - > bool <nl> ( * * Returns whether or not the typing environment is inside the <nl> constructor of a class * ) <nl> + val get_inside_constructor : env - > bool <nl> <nl> - val get_decl_env : env - > Decl_env . env <nl> ( * * Returns a { ! Decl_env . env } * ) <nl> + val get_decl_env : env - > Decl_env . env <nl> <nl> - val get_inside_ppl_class : env - > bool <nl> ( * * Returns whether or not the typing environment is <nl> inside a < < __PPL > > annotated class . * ) <nl> + val get_inside_ppl_class : env - > bool <nl> <nl> - val empty : TypecheckerOptions . t - > env <nl> ( * * Construct an empty { ! env } . Unlikely to be the best choice ; prefer using <nl> { ! Tast_visitor } or constructing an { ! env } from a { ! Tast . def } . * ) <nl> + val empty : TypecheckerOptions . t - > env <nl> <nl> - val def_env : Tast . def - > env <nl> ( * * Construct an { ! env } from a toplevel definition . * ) <nl> + val def_env : Tast . def - > env <nl> <nl> - val restore_method_env : env - > Tast . method_ - > env <nl> ( * * Construct an { ! env } from a method definition and the { ! env } of the context <nl> it appears in . * ) <nl> + val restore_method_env : env - > Tast . method_ - > env <nl> <nl> - val restore_fun_env : env - > Tast . fun_ - > env <nl> ( * * Construct an { ! env } from a lambda definition and the { ! env } of the context <nl> it appears in . * ) <nl> + val restore_fun_env : env - > Tast . fun_ - > env <nl> <nl> - val set_ppl_lambda : env - > env <nl> ( * * Construct an { ! env } where inside_ppl_class is { false } . Due to rewriting <nl> limitations , we are unable to rewrite lambdas inside < < __PPL > > classes . <nl> If you are using { ! Tast_visitor } , you should have no need of this . * ) <nl> + val set_ppl_lambda : env - > env <nl> <nl> val get_anonymous_lambda_types : env - > int - > Tast . ty list <nl> <nl> val typing_env_as_tast_env : Typing_env_types . env - > env <nl> <nl> val tast_env_as_typing_env : env - > Typing_env_types . env <nl> <nl> - val is_xhp_child : env - > Pos . t - > Tast . ty - > bool <nl> ( * * Verify that an XHP body expression is legal . * ) <nl> + val is_xhp_child : env - > Pos . t - > Tast . ty - > bool <nl> <nl> val get_enum : env - > string - > Decl_provider . class_decl option <nl> <nl> mmm a / hphp / hack / src / typing / tast_typecheck . ml <nl> ppp b / hphp / hack / src / typing / tast_typecheck . ml <nl> module Env = Typing_env <nl> module Phase = Typing_phase <nl> module Partial = Partial_provider <nl> <nl> - exception Cant_check <nl> ( * * This happens , for example , when there are gotos * ) <nl> + exception Cant_check <nl> <nl> exception Not_implemented <nl> <nl> mmm a / hphp / hack / src / typing / type_parameter_env_ops . mli <nl> ppp b / hphp / hack / src / typing / type_parameter_env_ops . mli <nl> module Env = Typing_env <nl> open Typing_defs <nl> open Typing_env_types <nl> <nl> - val simplify_tpenv : <nl> - env - > <nl> - ( ( ' a tparam * string ) option * locl_ty ) list - > <nl> - Typing_reason . t - > <nl> - env * locl_ty SMap . t <nl> ( * * Given a list of type parameter names , attempt to simplify away those <nl> type parameters by looking for a type to which they are equal in the tpenv . <nl> If such a type exists , remove the type parameter from the tpenv . <nl> Returns a set of substitutions mapping each type parameter name to the type <nl> to which it is equal if found , otherwise to itself . * ) <nl> + val simplify_tpenv : <nl> + env - > <nl> + ( ( ' a tparam * string ) option * locl_ty ) list - > <nl> + Typing_reason . t - > <nl> + env * locl_ty SMap . t <nl> <nl> val join : <nl> env - > <nl> mmm a / hphp / hack / src / typing / typing_classes_heap . mli <nl> ppp b / hphp / hack / src / typing / typing_classes_heap . mli <nl> module Api : sig <nl> <nl> val ppl : t - > bool <nl> <nl> - val deferred_init_members : t - > SSet . t <nl> ( * * To be used only when { ! ServerLocalConfig . shallow_class_decl } is not enabled . <nl> Raises [ Failure ] if used when shallow_class_decl is enabled . * ) <nl> + val deferred_init_members : t - > SSet . t <nl> <nl> val kind : t - > Ast_defs . class_kind <nl> <nl> module Api : sig <nl> <nl> val smethods : t - > ( string * class_elt ) Sequence . t <nl> <nl> - val all_inherited_methods : t - > string - > class_elt list <nl> ( * * The following functions return _all_ class member declarations defined in or <nl> inherited by this class with the given member name , including ones which <nl> were overridden , for purposes such as override checking . The list is ordered <nl> module Api : sig <nl> <nl> To be used only when { ! ServerLocalConfig . shallow_class_decl } is enabled . <nl> Raises [ Failure ] if used when shallow_class_decl is not enabled . * ) <nl> + val all_inherited_methods : t - > string - > class_elt list <nl> <nl> val all_inherited_smethods : t - > string - > class_elt list <nl> <nl> - val shallow_decl : t - > Shallow_decl_defs . shallow_class <nl> ( * * Return the shallow declaration for the given class . <nl> <nl> To be used only when { ! ServerLocalConfig . shallow_class_decl } is enabled . <nl> Raises [ Failure ] if used when shallow_class_decl is not enabled . * ) <nl> + val shallow_decl : t - > Shallow_decl_defs . shallow_class <nl> end <nl> <nl> - val compute_class_decl_no_cache : string - > Classes . t option <nl> ( * * Implementation detail , do not use . For use in [ Decl_provider ] only . * ) <nl> + val compute_class_decl_no_cache : string - > Classes . t option <nl> mmm a / hphp / hack / src / typing / typing_memoize . mli <nl> ppp b / hphp / hack / src / typing / typing_memoize . mli <nl> <nl> * <nl> * ) <nl> <nl> - val check_function : Typing_env_types . env - > Nast . fun_ - > unit <nl> ( * * <nl> * Checks if a function / method can be memoized . If the function cannot be <nl> * memoized this will add an error to the gloabl error list <nl> * ) <nl> + val check_function : Typing_env_types . env - > Nast . fun_ - > unit <nl> <nl> val check_method : Typing_env_types . env - > Nast . method_ - > unit <nl> mmm a / hphp / hack / src / typing / typing_per_cont_env . mli <nl> ppp b / hphp / hack / src / typing / typing_per_cont_env . mli <nl> val empty_entry : per_cont_entry <nl> ( * Get a continuation wrapped in Some , or None if not found * ) <nl> val get_cont_option : C . t - > t - > per_cont_entry option <nl> <nl> - val all_continuations : t - > C . t list <nl> ( * * Get all continuations present in an environment * ) <nl> + val all_continuations : t - > C . t list <nl> <nl> ( * Add the key , value pair to the continuation named ' name ' <nl> * If the continuation doesn ' t exist , create it * ) <nl> mmm a / hphp / hack / src / typing / typing_print . mli <nl> ppp b / hphp / hack / src / typing / typing_print . mli <nl> val full_decl : TypecheckerOptions . t - > Typing_defs . decl_ty - > string <nl> <nl> val fun_type : TypecheckerOptions . t - > Typing_defs . decl_fun_type - > string <nl> <nl> + ( * * Pretty print a type and all of its associated declaration information . * ) <nl> val full_with_identity : <nl> env - > <nl> Typing_defs . locl_ty - > <nl> ' b SymbolOccurrence . t - > <nl> ' b SymbolDefinition . t option - > <nl> string <nl> - ( * * Pretty print a type and all of its associated declaration information . * ) <nl> <nl> val debug : env - > Typing_defs . locl_ty - > string <nl> <nl> mmm a / hphp / hack / src / typing / typing_return . mli <nl> ppp b / hphp / hack / src / typing / typing_return . mli <nl> val implicit_return : <nl> actual : Typing_defs . locl_ty - > <nl> env <nl> <nl> + ( * * For async functions , wrap Awaitable < _ > around the return type * ) <nl> val wrap_awaitable : <nl> env - > Ast_defs . pos - > Typing_defs . locl_ty - > Typing_defs . locl_ty <nl> - ( * * For async functions , wrap Awaitable < _ > around the return type * ) <nl> <nl> val make_return_type : <nl> ( env - > Typing_defs . decl_ty - > env * Typing_defs . locl_ty ) - > <nl> val make_return_type : <nl> Typing_defs . decl_ty - > <nl> env * Typing_defs . locl_ty <nl> <nl> + ( * * For async functions , strip Awaitable < _ > from the return type * ) <nl> val strip_awaitable : <nl> Ast_defs . fun_kind - > <nl> env - > <nl> Typing_defs . locl_possibly_enforced_ty - > <nl> Typing_defs . locl_possibly_enforced_ty <nl> - ( * * For async functions , strip Awaitable < _ > from the return type * ) <nl> <nl> val force_awaitable : <nl> env - > Ast_defs . pos - > Typing_defs . locl_ty - > env * Typing_defs . locl_ty <nl> <nl> + ( * * If there is no return type annotation on method , assume ` void ` for the <nl> + special functions ` __construct ` , otherwise we can either <nl> + introduce a new fresh variable when infer missing is on or assume type Tany * ) <nl> val make_default_return : <nl> is_method : bool - > <nl> is_infer_missing_on : bool - > <nl> env - > <nl> Ast_defs . pos * string - > <nl> env * ( Typing_reason . t * Typing_defs . locl_phase Typing_defs . ty_ ) <nl> - ( * * If there is no return type annotation on method , assume ` void ` for the <nl> - special functions ` __construct ` , otherwise we can either <nl> - introduce a new fresh variable when infer missing is on or assume type Tany * ) <nl> mmm a / hphp / hack / src / typing / typing_subtype . mli <nl> ppp b / hphp / hack / src / typing / typing_subtype . mli <nl> end <nl> <nl> val is_sub_type_LEGACY_DEPRECATED : env - > locl_ty - > locl_ty - > bool <nl> <nl> - val is_sub_type : env - > locl_ty - > locl_ty - > bool <nl> ( * * Non - side - effecting test for subtypes . <nl> result = true implies ty1 < : ty2 <nl> result = false implies NOT ty1 < : ty2 OR we don ' t know <nl> * ) <nl> + val is_sub_type : env - > locl_ty - > locl_ty - > bool <nl> <nl> val is_sub_type_ignore_generic_params : env - > locl_ty - > locl_ty - > bool <nl> <nl> val is_sub_type_for_union : env - > locl_ty - > locl_ty - > bool <nl> <nl> val can_sub_type : env - > locl_ty - > locl_ty - > bool <nl> <nl> - val sub_type : env - > locl_ty - > locl_ty - > Errors . typing_error_callback - > env <nl> ( * * <nl> Checks that ty_sub is a subtype of ty_super , and returns an env . <nl> <nl> val sub_type : env - > locl_ty - > locl_ty - > Errors . typing_error_callback - > env <nl> sub_type env ? int alpha = > env where alpha = = ? int <nl> sub_type env int string = > error <nl> * ) <nl> + val sub_type : env - > locl_ty - > locl_ty - > Errors . typing_error_callback - > env <nl> <nl> val sub_type_with_dynamic_as_bottom : <nl> env - > locl_ty - > locl_ty - > Errors . typing_error_callback - > env <nl> val sub_type_with_dynamic_as_bottom : <nl> val sub_type_i : <nl> env - > internal_type - > internal_type - > Errors . typing_error_callback - > env <nl> <nl> + ( * * Check that the method with signature ft_sub can be used to override <nl> + ( is a subtype of ) method with signature ft_super . * ) <nl> val subtype_method : <nl> check_return : bool - > <nl> extra_info : reactivity_extra_info - > <nl> val subtype_method : <nl> decl_fun_type - > <nl> Errors . typing_error_callback - > <nl> env <nl> - ( * * Check that the method with signature ft_sub can be used to override <nl> - ( is a subtype of ) method with signature ft_super . * ) <nl> <nl> val subtype_reactivity : <nl> ? extra_info : reactivity_extra_info - > <nl> mmm a / hphp / hack / src / typing / typing_subtype_tconst . mli <nl> ppp b / hphp / hack / src / typing / typing_subtype_tconst . mli <nl> open Typing_defs <nl> open Typing_env_types <nl> module Env = Typing_env <nl> <nl> + ( * * For all type constant T of type variable , make its type equal to ` ty ` : : T * ) <nl> val make_all_type_consts_equal : <nl> env - > Ident . t - > internal_type - > as_tyvar_with_cnstr : bool - > env <nl> - ( * * For all type constant T of type variable , make its type equal to ` ty ` : : T * ) <nl> <nl> - val get_tyvar_type_const : env - > Ident . t - > Aast . sid - > env * locl_ty <nl> ( * * Get the type of a type constant of a type variable by looking it up in the <nl> environment . <nl> If that type constant is not present , make a fresh invariant <nl> type variable and add it as the type of the type constant in the environment . <nl> * ) <nl> + val get_tyvar_type_const : env - > Ident . t - > Aast . sid - > env * locl_ty <nl> mmm a / hphp / hack / src / typing / typing_union . mli <nl> ppp b / hphp / hack / src / typing / typing_union . mli <nl> open Typing_defs <nl> open Typing_env_types <nl> module Env = Typing_env <nl> <nl> - val union : env - > locl_ty - > locl_ty - > env * locl_ty <nl> ( * * Performs the union of two types . <nl> The union is the least upper bound of the subtyping relation . <nl> <nl> then <nl> This approximation is necessary to avoid type growing exponentially in size . <nl> We have seen cases where it would otherwise generate unions involving all <nl> the subsets of a set of types . * ) <nl> + val union : env - > locl_ty - > locl_ty - > env * locl_ty <nl> <nl> - val union_list : env - > Reason . t - > locl_ty list - > env * locl_ty <nl> ( * * Computes the union of a list of types by union types two by two . <nl> This is quadratic , so if this requires more than 20 two by two unions , <nl> fall back to simply flatten the unions , bubble up the option and remove <nl> duplicates . * ) <nl> + val union_list : env - > Reason . t - > locl_ty list - > env * locl_ty <nl> <nl> val simplify_unions : <nl> env - > <nl> mmm a / hphp / hack / src / typing / typing_xhp . mli <nl> ppp b / hphp / hack / src / typing / typing_xhp . mli <nl> <nl> <nl> open Typing_env_types <nl> <nl> - val get_spread_attributes : <nl> - env - > <nl> - Pos . t - > <nl> - Decl_provider . class_decl - > <nl> - Typing_defs . locl_ty - > <nl> - env * ( Aast . pstring * ( Pos . t * Typing_defs . locl_ty ) ) list <nl> ( * * <nl> * This is used in computing all possible attributes for XHP spreads . <nl> * <nl> val get_spread_attributes : <nl> * XHP attributes and their localized types to verify compatibility with the <nl> * XHP onto which we are spreading . <nl> * ) <nl> + val get_spread_attributes : <nl> + env - > <nl> + Pos . t - > <nl> + Decl_provider . class_decl - > <nl> + Typing_defs . locl_ty - > <nl> + env * ( Aast . pstring * ( Pos . t * Typing_defs . locl_ty ) ) list <nl> <nl> - val is_xhp_child : env - > Pos . t - > Typing_defs . locl_ty - > bool <nl> ( * * <nl> * Verify that an XHP body expression is legal . <nl> * ) <nl> + val is_xhp_child : env - > Pos . t - > Typing_defs . locl_ty - > bool <nl> mmm a / hphp / hack / src / utils / buffered_line_reader / buffered_line_reader_sig . ml <nl> ppp b / hphp / hack / src / utils / buffered_line_reader / buffered_line_reader_sig . ml <nl> module type S = sig <nl> <nl> val has_buffered_content : t - > bool <nl> <nl> - val is_readable : t - > bool <nl> ( * * <nl> * Returns true if and only if there is content to be read ( does not know if <nl> * the incoming content is newline - terminated . So we can ' t actually know <nl> * if get_next_line will be non - blocking . <nl> * ) <nl> + val is_readable : t - > bool <nl> <nl> val get_fd : t - > fd <nl> <nl> mmm a / hphp / hack / src / utils / collections / lazy_string_table . mli <nl> ppp b / hphp / hack / src / utils / collections / lazy_string_table . mli <nl> <nl> <nl> open Core_kernel <nl> <nl> - type ' a t <nl> ( * * [ Lazy_string_table . t ] provides a memoization cache for any <nl> [ ( string * ' a ) Sequence . t ] where : <nl> <nl> type ' a t <nl> Originally written for caches of class members , where we want to lazily <nl> parse ancestor classes only as necessary , and our implementation of [ merge ] <nl> provides the logic for member overriding . * ) <nl> + type ' a t <nl> <nl> - val make : <nl> - is_canonical : ( ' a - > bool ) - > <nl> - merge : ( earlier : ' a - > later : ' a - > ' a ) - > <nl> - ( string * ' a ) Sequence . t - > <nl> - ' a t <nl> ( * * Create a new [ Lazy_string_table . t ] memoizing the given sequence . <nl> <nl> A good implementation of [ merge ] is necessary for correctness , since [ merge ] <nl> val make : <nl> implementation [ fun _ - > false ] for [ is_canonical ] is always correct , but <nl> will always force the cache to traverse the entire sequence on the first <nl> lookup ) . * ) <nl> + val make : <nl> + is_canonical : ( ' a - > bool ) - > <nl> + merge : ( earlier : ' a - > later : ' a - > ' a ) - > <nl> + ( string * ' a ) Sequence . t - > <nl> + ' a t <nl> <nl> - val get : ' a t - > string - > ' a option <nl> ( * * Return the value associated with the given key . If the value is canonical <nl> and was already emitted by the input sequence , or if the input sequence has <nl> been exhausted , this function is guaranteed to complete in constant time . <nl> val get : ' a t - > string - > ' a option <nl> <nl> Guaranteed not to advance the input sequence if the sequence has previously <nl> emitted a canonical value for the given key . * ) <nl> + val get : ' a t - > string - > ' a option <nl> <nl> - val mem : ' a t - > string - > bool <nl> ( * * Return [ true ] if a value associated with the given key exists . If a value <nl> associated with this key was already emitted by the input sequence , or if <nl> the input sequence has been exhausted , this function is guaranteed to <nl> val mem : ' a t - > string - > bool <nl> <nl> Guaranteed not to advance the input sequence if the sequence has previously <nl> emitted any value for the given key . * ) <nl> + val mem : ' a t - > string - > bool <nl> <nl> - val to_seq : ' a t - > ( string * ' a ) Sequence . t <nl> ( * * Eagerly exhaust the input sequence , then return a sequence iterating over <nl> all values stored in the cache , in undefined order . * ) <nl> + val to_seq : ' a t - > ( string * ' a ) Sequence . t <nl> mmm a / hphp / hack / src / utils / core / local_id . mli <nl> ppp b / hphp / hack / src / utils / core / local_id . mli <nl> val to_int : t - > int <nl> <nl> val get_name : t - > string <nl> <nl> - val make_scoped : string - > t <nl> ( * * Make an id for a scoped variable . Return a fresh id every time . <nl> This is used to enforce that two locals with the same name but with <nl> different scopes have different ids . * ) <nl> + val make_scoped : string - > t <nl> <nl> - val make_unscoped : string - > t <nl> ( * * Make an id for an unscoped variable . Two calls with the same input <nl> * string will return the same id . * ) <nl> + val make_unscoped : string - > t <nl> <nl> val make : int - > string - > t <nl> <nl> mmm a / hphp / hack / src / utils / core / utils . mli <nl> ppp b / hphp / hack / src / utils / core / utils . mli <nl> val strip_both_ns : string - > string <nl> ( * Strip All removes all backslash - based namespaces , but does nothing to XHP * ) <nl> val strip_all_ns : string - > string <nl> <nl> - val add_ns : string - > string <nl> ( * * A \ B \ C - > \ A \ B \ C * ) <nl> + val add_ns : string - > string <nl> <nl> - val add_xhp_ns : string - > string <nl> ( * * A : B : C - > : A : B : C * ) <nl> + val add_xhp_ns : string - > string <nl> <nl> val split_ns_from_name : string - > string * string <nl> <nl> mmm a / hphp / hack / src / utils / file_pos_small . ml <nl> ppp b / hphp / hack / src / utils / file_pos_small . ml <nl> <nl> * <nl> * ) <nl> <nl> - type t = int [ @ @ deriving eq ] <nl> ( * * <nl> * Three values packed into one 64 - bit integer : <nl> * <nl> type t = int [ @ @ deriving eq ] <nl> * <nl> * <nl> * ) <nl> + type t = int [ @ @ deriving eq ] <nl> <nl> let column_bits = 9 <nl> <nl> mmm a / hphp / hack / src / utils / hg / hg_sig . ml <nl> ppp b / hphp / hack / src / utils / hg / hg_sig . ml <nl> module type S = sig <nl> ( * bool indicates if there are working copy changes . * ) <nl> ( hg_rev * bool ) Future . t <nl> <nl> - val current_working_copy_base_rev : string - > global_rev Future . t <nl> ( * * Get the global base revision of the current working copy in the given <nl> * repo dir . * ) <nl> + val current_working_copy_base_rev : string - > global_rev Future . t <nl> <nl> val get_closest_global_ancestor : hg_rev - > string - > global_rev Future . t <nl> <nl> module type S = sig <nl> string - > <nl> string list Future . t <nl> <nl> - val update_to_rev : rev - > string - > unit Future . t <nl> ( * * hg update to the base global revision . * ) <nl> + val update_to_rev : rev - > string - > unit Future . t <nl> <nl> module Mocking : sig <nl> val current_working_copy_hg_rev_returns : ( hg_rev * bool ) Future . t - > unit <nl> mmm a / hphp / hack / src / utils / lwt_message_queue . mli <nl> ppp b / hphp / hack / src / utils / lwt_message_queue . mli <nl> <nl> * <nl> * ) <nl> <nl> - type ' a t <nl> ( * * A mutable queue containing messages of type [ ' a ] . * ) <nl> + type ' a t <nl> <nl> - val create : unit - > ' a t <nl> ( * * Create a new [ Lwt_message_queue . t ] . * ) <nl> + val create : unit - > ' a t <nl> <nl> - val push : ' a t - > ' a - > bool <nl> ( * * Push a message into the queue . Wakes up the task waiting to [ pop ] from it , <nl> if any . * ) <nl> + val push : ' a t - > ' a - > bool <nl> <nl> - val pop : ' a t - > ' a option Lwt . t <nl> ( * * Get and remove the next message in the queue . If there are currently no <nl> messages in the queue , wait until one becomes available . If the queue is or <nl> becomes closed , returns [ None ] ; otherwise returns [ Some message ] . <nl> https : / / github . com / ocsigen / lwt / issues / 250 ) . Only one task should [ pop ] at a <nl> time . The message queue is therefore mostly useful for code organization <nl> purposes , making it possible to split the code for the producer and consumer of <nl> the message queue in a principled way . * ) <nl> + val pop : ' a t - > ' a option Lwt . t <nl> <nl> - val close : ' a t - > unit <nl> ( * * Close the message queue for further reads and writes . All messages currently <nl> in the queue will be dropped . Future calls to [ push ] will return [ false ] , and <nl> future calls to [ pop ] will return [ None ] . <nl> <nl> Either the producer or consumer end of the queue may close it . * ) <nl> + val close : ' a t - > unit <nl> <nl> - val is_empty : ' a t - > bool <nl> ( * * Whether or not the queue has any pending messages at this moment . * ) <nl> + val is_empty : ' a t - > bool <nl> <nl> - val length : ' a t - > int <nl> ( * * Returns the number of messages currently in the queue . If the queue is <nl> closed , returns [ 0 ] . * ) <nl> + val length : ' a t - > int <nl> <nl> - val exists : ' a t - > f : ( ' a - > bool ) - > bool <nl> ( * * Returns whether or not a message satisfying predicate [ f ] exists in the <nl> current queue . * ) <nl> + val exists : ' a t - > f : ( ' a - > bool ) - > bool <nl> mmm a / hphp / hack / src / utils / lwt_utils . mli <nl> ppp b / hphp / hack / src / utils / lwt_utils . mli <nl> <nl> - val select : <nl> - Unix . file_descr list - > <nl> - Unix . file_descr list - > <nl> - Unix . file_descr list - > <nl> - float - > <nl> - ( Unix . file_descr list * Unix . file_descr list * Unix . file_descr list ) Lwt . t <nl> ( * * Drop - in replacement for [ Unix . select ] that works even when the Lwt main loop <nl> is running ( i . e . your function has [ Lwt_main . run ] somewhere higher up in the <nl> call stack ) . <nl> The Lwt main loop is an event loop pumped by [ Unix . select ] , and so regular <nl> function does not use [ Unix . select ] at all , but Lwt primitives that accomplish <nl> the same thing . <nl> * ) <nl> + val select : <nl> + Unix . file_descr list - > <nl> + Unix . file_descr list - > <nl> + Unix . file_descr list - > <nl> + float - > <nl> + ( Unix . file_descr list * Unix . file_descr list * Unix . file_descr list ) Lwt . t <nl> <nl> module Process_success : sig <nl> type t = { <nl> module Process_failure : sig <nl> val to_string : t - > string <nl> end <nl> <nl> - val exec_checked : <nl> - ? input : string - > <nl> - ? env : string array - > <nl> - string - > <nl> - string array - > <nl> - ( Process_success . t , Process_failure . t ) Lwt_result . t <nl> ( * * Run a command with a given input and return the output . If the command exits <nl> with an exit status other than zero , raises [ Process_failure ] instead . <nl> <nl> tried to implement it , but after killing the process , both [ Lwt_io . close ] and <nl> [ Lwt_io . abort ] would hang when trying to close the process ' s <nl> stdin / stdout / stderr . ) <nl> * ) <nl> + val exec_checked : <nl> + ? input : string - > <nl> + ? env : string array - > <nl> + string - > <nl> + string array - > <nl> + ( Process_success . t , Process_failure . t ) Lwt_result . t <nl> <nl> - val try_finally : <nl> - f : ( unit - > ' a Lwt . t ) - > finally : ( unit - > unit Lwt . t ) - > ' a Lwt . t <nl> ( * * Asynchronous version of [ Utils . try_finally ] . Run and wait for [ f ] to <nl> complete , and be sure to invoke [ finally ] asynchronously afterward , even if [ f ] <nl> raises an exception . * ) <nl> + val try_finally : <nl> + f : ( unit - > ' a Lwt . t ) - > finally : ( unit - > unit Lwt . t ) - > ' a Lwt . t <nl> <nl> - val read_all : string - > ( string , string ) Lwt_result . t <nl> ( * * Reads all the contents from the given file on disk , or returns an error <nl> message if unable to do so . * ) <nl> + val read_all : string - > ( string , string ) Lwt_result . t <nl> <nl> module Promise : Promise . S with type ' a t = ' a Lwt . t <nl> mmm a / hphp / hack / src / utils / pos_embedded . mli <nl> ppp b / hphp / hack / src / utils / pos_embedded . mli <nl> <nl> * character * after * the last character of the relevant lexeme . ) * ) <nl> type ' a pos [ @ @ deriving eq ] <nl> <nl> - type b = Pos_source . t <nl> ( * * The underlying type used to construct Pos instances . <nl> * <nl> * See " val make : ' a - > b - > ' a pos " * ) <nl> + type b = Pos_source . t <nl> <nl> type t = Relative_path . t pos [ @ @ deriving eq ] <nl> <nl> mmm a / hphp / hack / src / utils / process / future . ml <nl> ppp b / hphp / hack / src / utils / process / future . ml <nl> type ' a promise = <nl> | Bound : ( ' a t * ( ( ' a , error ) result - > ' b t ) ) - > ' b promise <nl> | Incomplete of Process_types . t * ( string - > ' a ) <nl> <nl> - and ' a t = ' a promise ref * float <nl> ( * * float is the time the Future was constructed . * ) <nl> + and ' a t = ' a promise ref * float <nl> <nl> let make process transformer = <nl> ( ref ( Incomplete ( process , transformer ) ) , Unix . gettimeofday ( ) ) <nl> mmm a / hphp / hack / src / utils / process / future_sig . ml <nl> ppp b / hphp / hack / src / utils / process / future_sig . ml <nl> <nl> * <nl> * ) <nl> <nl> - type ' a deserializer = string - > ' a <nl> ( * * Deserializes the byte sequence . * ) <nl> + type ' a deserializer = string - > ' a <nl> <nl> module Types = struct <nl> type error_mode = <nl> mmm a / hphp / hack / src / utils / process / process . mli <nl> ppp b / hphp / hack / src / utils / process / process . mli <nl> module Entry : sig <nl> val register : string - > ( ' param - > unit ) - > ' param t <nl> end <nl> <nl> - val exec : <nl> - string - > <nl> - ? input : string - > <nl> - ? env : Process_types . environment - > <nl> - string list - > <nl> - Process_types . t <nl> ( * * <nl> * Shells out the program with the given args . <nl> * Sends input to stdin of spawned process if given . <nl> * ) <nl> - <nl> - val exec_with_working_directory : <nl> - dir : string - > <nl> + val exec : <nl> string - > <nl> ? input : string - > <nl> ? env : Process_types . environment - > <nl> string list - > <nl> Process_types . t <nl> + <nl> ( * * <nl> * Shells out the program with the given args . <nl> * Sets the working directory to the one specified before executing . <nl> val exec_with_working_directory : <nl> * Specify the desired environment if you want a different behavior . <nl> * Sends input to stdin of spawned process if given . <nl> * ) <nl> + val exec_with_working_directory : <nl> + dir : string - > <nl> + string - > <nl> + ? input : string - > <nl> + ? env : Process_types . environment - > <nl> + string list - > <nl> + Process_types . t <nl> <nl> val register_entry_point : string - > ( ' param - > unit ) - > ' param Entry . t <nl> <nl> - val run_entry : ? input : string - > ' a Entry . t - > ' a - > Process_types . t <nl> ( * * Wraps a entry point inside a Process , so we get Process ' s <nl> * goodness for free ( read_and_wait_pid and is_ready ) . The entry will be <nl> * spawned into a separate process . * ) <nl> + val run_entry : ? input : string - > ' a Entry . t - > ' a - > Process_types . t <nl> <nl> - val read_and_wait_pid : timeout : int - > Process_types . t - > process_result <nl> ( * * <nl> * Read data from stdout and stderr until EOF is reached . Waits for <nl> * process to terminate returns the stderr and stdout <nl> val read_and_wait_pid : timeout : int - > Process_types . t - > process_result <nl> * If process exits with something other than ( Unix . WEXITED 0 ) , will return a <nl> * Error <nl> * ) <nl> + val read_and_wait_pid : timeout : int - > Process_types . t - > process_result <nl> <nl> val failure_msg : failure - > string <nl> <nl> val status_to_string : Unix . process_status - > string <nl> <nl> - val is_ready : Process_types . t - > bool <nl> ( * * Returns true if read_and_close_pid would be nonblocking . * ) <nl> + val is_ready : Process_types . t - > bool <nl> mmm a / hphp / hack / src / utils / promise . ml <nl> ppp b / hphp / hack / src / utils / promise . ml <nl> <nl> module type S = sig <nl> type ' a t <nl> <nl> - val return : ' a - > ' a t <nl> ( * * Creates a promise that returns the given value immediately . * ) <nl> + val return : ' a - > ' a t <nl> <nl> - val map : ' a t - > ( ' a - > ' b ) - > ' b t <nl> ( * * Returns a new promise that will map the result of the given one . * ) <nl> + val map : ' a t - > ( ' a - > ' b ) - > ' b t <nl> <nl> - val bind : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> ( * * Returns a new promise generated from the results of the given one . * ) <nl> + val bind : ' a t - > ( ' a - > ' b t ) - > ' b t <nl> end <nl> mmm a / hphp / hack / src / watchman / watchman_sig . ml <nl> ppp b / hphp / hack / src / watchman / watchman_sig . ml <nl> module Types = struct <nl> subscription_prefix : string ; <nl> } <nl> <nl> - type clock = string <nl> ( * * The message ' s clock . * ) <nl> + type clock = string <nl> <nl> type pushed_changes = <nl> ( * <nl> mmm a / hphp / hack / src / watchman_event_watcher / watchmanEventWatcherClient_sig . ml <nl> ppp b / hphp / hack / src / watchman_event_watcher / watchmanEventWatcherClient_sig . ml <nl> end <nl> module type S = sig <nl> include module type of Abstract_types <nl> <nl> - val init : Path . t - > t <nl> ( * * <nl> * Initiates a client . <nl> * <nl> module type S = sig <nl> * <nl> * If a connection cannot be made to a Watcher , returns None . <nl> * ) <nl> + val init : Path . t - > t <nl> <nl> - val get_status : t - > WatchmanEventWatcherConfig . Responses . t option <nl> ( * * <nl> * Non - blocking poll on the connection - returns true if the Watcher reports <nl> * settled state , or we have previously already read the settled message . <nl> * <nl> * If Watchman Event Watcher connection fails , None is returned . <nl> * ) <nl> + val get_status : t - > WatchmanEventWatcherConfig . Responses . t option <nl> <nl> module Mocking : sig <nl> val get_status_returns : <nl> mmm a / hphp / hack / test / integration_ml / ide / test_added_parent_ide . ml <nl> ppp b / hphp / hack / test / integration_ml / ide / test_added_parent_ide . ml <nl> <nl> ( * <nl> - open Integration_test_base_types <nl> - * ) <nl> - module Test = Integration_test_base <nl> - ( * * <nl> * Copyright ( c ) 2016 , Facebook , Inc . <nl> * All rights reserved . <nl> * <nl> module Test = Integration_test_base <nl> * <nl> * ) <nl> <nl> + module Test = Integration_test_base <nl> + <nl> let foo_name = " foo . php " <nl> <nl> let foo_contents = <nl> mmm a / hphp / hack / test / integration_ml / ide / test_ide_check . ml <nl> ppp b / hphp / hack / test / integration_ml / ide / test_ide_check . ml <nl> <nl> - open Integration_test_base_types <nl> ( * * <nl> * Copyright ( c ) 2016 , Facebook , Inc . <nl> * All rights reserved . <nl> open Integration_test_base_types <nl> * <nl> * <nl> * ) <nl> + open Integration_test_base_types <nl> <nl> module Test = Integration_test_base <nl> <nl> mmm a / hphp / hack / test / integration_ml / ide / test_status_single . ml <nl> ppp b / hphp / hack / test / integration_ml / ide / test_status_single . ml <nl> <nl> - open Integration_test_base_types <nl> - ( * * <nl> + ( * <nl> * Copyright ( c ) 2016 , Facebook , Inc . <nl> * All rights reserved . <nl> * <nl> open Integration_test_base_types <nl> * <nl> * ) <nl> <nl> + open Integration_test_base_types <nl> module Test = Integration_test_base <nl> <nl> let foo_name = " foo . php " <nl> mmm a / hphp / hack / test / unit / informant / informant_server_lifetime_test . ml <nl> ppp b / hphp / hack / test / unit / informant / informant_server_lifetime_test . ml <nl> module Target_saved_state_opt_comparator = <nl> Asserter . Make_option_comparator ( Target_saved_state_comparator ) <nl> <nl> module Start_server_args_comparator = struct <nl> - type t = ServerMonitorUtils . target_saved_state option <nl> ( * * We only care about this arg and drop the rest . * ) <nl> + type t = ServerMonitorUtils . target_saved_state option <nl> <nl> let to_string state = <nl> let state_string = Target_saved_state_opt_comparator . to_string state in <nl> mmm a / hphp / hack / test / unit / utils / errors_test . ml <nl> ppp b / hphp / hack / test / unit / utils / errors_test . ml <nl> <nl> - open Hh_core <nl> ( * * <nl> * Tests documenting various invariants about the order of things coming out <nl> * of Errors module . Some of them are __probably__ not necessary for <nl> open Hh_core <nl> * significant or not . <nl> * * ) <nl> <nl> + open Hh_core <nl> + <nl> let error_list_to_string_buffer buf x = <nl> List . iter x ~ f : ( fun error - > <nl> Printf . bprintf buf " % s \ n " Errors . ( error | > to_absolute | > to_string ) ) <nl> mmm a / hphp / hack / test / utils / asserter / asserter . ml <nl> ppp b / hphp / hack / test / utils / asserter / asserter . ml <nl> module Relative_path_comparator = struct <nl> end <nl> <nl> module type Pattern_substitutions = sig <nl> - val substitutions : ( string * string ) list <nl> ( * * List of key - value pairs . We perform these key to value <nl> * substitutions in - order . <nl> * <nl> module type Pattern_substitutions = sig <nl> * Note : in actuality , the keys and values aren ' t treated as string literals <nl> * but as a pattern for regex and a template for replacement . <nl> * ) <nl> + val substitutions : ( string * string ) list <nl> end <nl> <nl> ( * * Comparison between an expected pattern and an actual string . * ) <nl> | Prefer doc comments before definitions | facebook/hhvm | 50664c7a1e9227f94afef9bfe5a1133909b1e0e4 | 2019-10-09T16:30:22Z |
mmm a / xbmc / network / linux / NetworkLinux . cpp <nl> ppp b / xbmc / network / linux / NetworkLinux . cpp <nl> <nl> # ifdef TARGET_ANDROID <nl> # include " android / bionic_supplement / bionic_supplement . h " <nl> # include " sys / system_properties . h " <nl> + # include < sys / wait . h > <nl> # endif <nl> # include < errno . h > <nl> # include < resolv . h > <nl> | droid : fix build . This header may be needed elsewhere as well ? | xbmc/xbmc | 8303cf64862202f538e54befd4a02939a7813c2f | 2013-05-10T23:29:52Z |
mmm a / lib / Sema / ConstraintSystem . h <nl> ppp b / lib / Sema / ConstraintSystem . h <nl> class ConstraintSystem { <nl> } <nl> } <nl> <nl> - void dump ( TypeVariableType * typeVar , llvm : : raw_ostream & out , <nl> - unsigned indent ) const { <nl> + void dump ( llvm : : raw_ostream & out , unsigned indent = 0 ) const { <nl> out . indent ( indent ) ; <nl> - out < < " ( " ; <nl> - if ( typeVar ) <nl> - out < < " $ T " < < typeVar - > getImpl ( ) . getID ( ) ; <nl> if ( FullyBound ) <nl> - out < < " fully_bound " ; <nl> + out < < " fully_bound " ; <nl> if ( SubtypeOfExistentialType ) <nl> - out < < " subtype_of_existential " ; <nl> + out < < " subtype_of_existential " ; <nl> if ( LiteralBinding ! = LiteralBindingKind : : None ) <nl> - out < < " literal = " < < static_cast < int > ( LiteralBinding ) ; <nl> + out < < " literal = " < < static_cast < int > ( LiteralBinding ) < < " " ; <nl> if ( InvolvesTypeVariables ) <nl> - out < < " involves_type_vars " ; <nl> + out < < " involves_type_vars " ; <nl> if ( NumDefaultableBindings > 0 ) <nl> - out < < " defaultable_bindings = " < < NumDefaultableBindings ; <nl> - out < < " bindings = " ; <nl> - interleave ( Bindings , <nl> - [ & ] ( const PotentialBinding & binding ) { <nl> - auto type = binding . BindingType ; <nl> - auto & ctx = type - > getASTContext ( ) ; <nl> - llvm : : SaveAndRestore < bool > debugConstraints ( <nl> - ctx . LangOpts . DebugConstraintSolver , true ) ; <nl> - switch ( binding . Kind ) { <nl> - case AllowedBindingKind : : Exact : <nl> - break ; <nl> - <nl> - case AllowedBindingKind : : Subtypes : <nl> - out < < " ( subtypes of ) " ; <nl> - break ; <nl> - <nl> - case AllowedBindingKind : : Supertypes : <nl> - out < < " ( supertypes of ) " ; <nl> - break ; <nl> - } <nl> - if ( binding . DefaultedProtocol ) <nl> - out < < " ( default from " <nl> - < < ( * binding . DefaultedProtocol ) - > getName ( ) < < " ) " ; <nl> - out < < type . getString ( ) ; <nl> - } , <nl> - [ & ] ( ) { out < < " " ; } ) ; <nl> + out < < " # defaultable_bindings = " < < NumDefaultableBindings < < " " ; <nl> + <nl> + out < < " bindings = " ; <nl> + if ( ! Bindings . empty ( ) ) { <nl> + interleave ( Bindings , <nl> + [ & ] ( const PotentialBinding & binding ) { <nl> + auto type = binding . BindingType ; <nl> + auto & ctx = type - > getASTContext ( ) ; <nl> + llvm : : SaveAndRestore < bool > debugConstraints ( <nl> + ctx . LangOpts . DebugConstraintSolver , true ) ; <nl> + switch ( binding . Kind ) { <nl> + case AllowedBindingKind : : Exact : <nl> + break ; <nl> + <nl> + case AllowedBindingKind : : Subtypes : <nl> + out < < " ( subtypes of ) " ; <nl> + break ; <nl> + <nl> + case AllowedBindingKind : : Supertypes : <nl> + out < < " ( supertypes of ) " ; <nl> + break ; <nl> + } <nl> + if ( binding . DefaultedProtocol ) <nl> + out < < " ( default from " <nl> + < < ( * binding . DefaultedProtocol ) - > getName ( ) < < " ) " ; <nl> + out < < type . getString ( ) ; <nl> + } , <nl> + [ & ] ( ) { out < < " " ; } ) ; <nl> + } else { <nl> + out < < " { } " ; <nl> + } <nl> + } <nl> + <nl> + void dump ( TypeVariableType * typeVar , llvm : : raw_ostream & out , <nl> + unsigned indent = 0 ) const { <nl> + out . indent ( indent ) ; <nl> + out < < " ( " ; <nl> + if ( typeVar ) <nl> + out < < " $ T " < < typeVar - > getImpl ( ) . getID ( ) ; <nl> + dump ( out ) ; <nl> out < < " ) \ n " ; <nl> } <nl> } ; <nl> mmm a / lib / Sema / TypeCheckConstraints . cpp <nl> ppp b / lib / Sema / TypeCheckConstraints . cpp <nl> void ConstraintSystem : : print ( raw_ostream & out ) { <nl> if ( auto fixed = getFixedType ( tv ) ) { <nl> out < < " as " ; <nl> fixed - > print ( out ) ; <nl> + } else { <nl> + getPotentialBindings ( tv ) . dump ( out , 1 ) ; <nl> } <nl> } else { <nl> out < < " equivalent to " ; <nl> | Merge remote - tracking branch ' origin / master ' into master - next | apple/swift | 429375c45093ea89c88b597aa90263fb2b1674f0 | 2017-09-07T19:49:02Z |
mmm a / contracts / eosiolib / multi_index . hpp <nl> ppp b / contracts / eosiolib / multi_index . hpp <nl> class multi_index <nl> template < typename Lambda > <nl> void modify ( const T & obj , uint64_t payer , Lambda & & updater ) { <nl> const auto & objitem = static_cast < const item & > ( obj ) ; <nl> + eosio_assert ( objitem . __idx = = this , " object passed to modify is not in multi_index " ) ; <nl> auto & mutableitem = const_cast < item & > ( objitem ) ; <nl> <nl> auto secondary_keys = boost : : hana : : transform ( _indices , [ & ] ( auto & & idx ) { <nl> | add check to see if object belongs to this multi_index in modify | EOSIO/eos | 891d89ed62aac5ae6a7e373a59f8c2cec7852bbd | 2018-04-05T14:01:11Z |
mmm a / stdlib / private / StdlibUnittest / LoggingWrappers . swift <nl> ppp b / stdlib / private / StdlibUnittest / LoggingWrappers . swift <nl> public func expectCustomizable < <nl> collectMoreInfo : ( ( ) - > String ) ? = nil <nl> ) { <nl> expectNotEqual ( <nl> - 0 , counters [ T . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . self ] , <nl> + collectMoreInfo ? ( ) ? ? " " , <nl> + file : file , <nl> + line : line , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) ) <nl> <nl> expectEqual ( <nl> - counters [ T . self ] , counters [ T . Base . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + counters [ T . self ] , counters [ T . Base . self ] , <nl> + collectMoreInfo ? ( ) ? ? " " , <nl> + file : file , <nl> + line : line , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) ) <nl> } <nl> <nl> public func expectNotCustomizable < <nl> public func expectNotCustomizable < <nl> collectMoreInfo : ( ( ) - > String ) ? = nil <nl> ) { <nl> expectNotEqual ( <nl> - 0 , counters [ T . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . self ] , <nl> + collectMoreInfo ? ( ) ? ? " " , <nl> + file : file , <nl> + line : line , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) ) <nl> <nl> expectEqual ( <nl> - 0 , counters [ T . Base . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . Base . self ] , <nl> + collectMoreInfo ? ( ) ? ? " " , <nl> + file : file , <nl> + line : line , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) ) <nl> } <nl> mmm a / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> ppp b / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> public func expectEqual < T : Equatable , U : Equatable > ( <nl> expectEqual ( expected . 1 , actual . 1 , $ { trace } , showFrame : false ) { $ 0 = = $ 1 } <nl> } <nl> <nl> + public func expectationFailure ( <nl> + reason : String , <nl> + trace message : String , <nl> + stackTrace : SourceLocStack ) { <nl> + _anyExpectFailed = true <nl> + stackTrace . print ( ) <nl> + print ( reason , appendNewline : reason ! = " " ) <nl> + print ( message , appendNewline : message ! = " " ) <nl> + } <nl> + <nl> public func expectEqual < T > ( <nl> expected : T , _ actual : T , $ { TRACE } , sameValue equal : ( T , T ) - > Bool <nl> ) { <nl> if ! equal ( expected , actual ) { <nl> - _anyExpectFailed = true <nl> - stackTrace . pushIf ( showFrame , file : file , line : line ) . print ( ) <nl> - print ( " expected : \ " \ ( expected ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( expected ) ) ) " ) <nl> - print ( " actual : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( expected ) ) ) " ) <nl> - print ( message ( ) + " \ n " ) <nl> + expectationFailure ( <nl> + " expected : \ " \ ( expected ) \ " ( of type \ ( expected . dynamicType ) ) \ n " <nl> + + " actual : \ " \ ( actual ) \ " ( of type \ ( expected . dynamicType ) ) " , <nl> + trace : $ { trace } <nl> + ) <nl> } <nl> } <nl> <nl> - public func expectNotEqual < T : Equatable > ( <nl> - expected : T , _ actual : T , <nl> - stackTrace : SourceLocStack ? = nil , <nl> - file : String = __FILE__ , line : UWord = __LINE__ , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> - ) { <nl> + public func expectNotEqual < T : Equatable > ( expected : T , _ actual : T , $ { TRACE } ) { <nl> if expected = = actual { <nl> - _anyExpectFailed = true <nl> - print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> - _printStackTrace ( stackTrace ) <nl> - print ( " unexpected value : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> - if collectMoreInfo ! = nil { print ( collectMoreInfo ! ( ) ) } <nl> - print ( " " ) <nl> + expectationFailure ( <nl> + " unexpected value : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " , <nl> + trace : $ { trace } <nl> + ) <nl> } <nl> } <nl> <nl> / / Can not write a sane set of overloads using generics because of : <nl> / / < rdar : / / problem / 17015923 > Array - > NSArray implicit conversion insanity <nl> public func expectOptionalEqual < T : Equatable > ( <nl> - expected : T , _ actual : T ? , <nl> - stackTrace : SourceLocStack ? = nil , <nl> - file : String = __FILE__ , line : UWord = __LINE__ <nl> + expected : T , _ actual : T ? , $ { TRACE } <nl> ) { <nl> if ( actual = = nil ) | | expected ! = actual ! { <nl> - _anyExpectFailed = true <nl> - print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> - _printStackTrace ( stackTrace ) <nl> - print ( " expected : \ " \ ( expected ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( expected ) ) ) " ) <nl> - print ( " actual : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> - print ( " " ) <nl> + expectationFailure ( <nl> + " expected : \ " \ ( expected ) \ " ( of type \ ( expected . dynamicType ) ) \ n " <nl> + + " actual : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " , <nl> + trace : $ { trace } ) <nl> } <nl> } <nl> <nl> - public func expectEqual < T : Equatable > ( <nl> - expected : T ? , _ actual : T ? , <nl> - stackTrace : SourceLocStack ? = nil , <nl> - file : String = __FILE__ , line : UWord = __LINE__ , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> - ) { <nl> + public func expectEqual < T : Equatable > ( expected : T ? , _ actual : T ? , $ { TRACE } ) { <nl> if ( actual = = nil ) ! = ( expected = = nil ) <nl> | | actual ! = nil & & expected ! ! = actual ! { <nl> - _anyExpectFailed = true <nl> - print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> - _printStackTrace ( stackTrace ) <nl> - print ( " expected : \ " \ ( expected ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( expected ) ) ) " ) <nl> - print ( " actual : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> - if collectMoreInfo ! = nil { print ( collectMoreInfo ! ( ) ) } <nl> - print ( " " ) <nl> + expectationFailure ( <nl> + " expected : \ " \ ( expected ) \ " ( of type \ ( expected . dynamicType ) ) \ n " <nl> + + " actual : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " , <nl> + trace : $ { trace } ) <nl> } <nl> } <nl> <nl> func _expectNotEqual $ { Generic } ( <nl> if expected = = actual { <nl> _anyExpectFailed = true <nl> print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> - print ( " unexpected value : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> + print ( " unexpected value : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " ) <nl> if collectMoreInfo ! = nil { print ( collectMoreInfo ! ( ) ) } <nl> print ( " " ) <nl> } <nl> public func expectOptionalEqual $ { Generic } ( <nl> if ( actual = = nil ) | | expected ! = actual ! { <nl> _anyExpectFailed = true <nl> print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> - print ( " expected : \ " \ ( expected ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( expected ) ) ) " ) <nl> - print ( " actual : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> + print ( " expected : \ " \ ( expected ) \ " ( of type \ ( expected . dynamicType ) ) " ) <nl> + print ( " actual : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " ) <nl> print ( " " ) <nl> } <nl> } <nl> public enum TestRunPredicate : CustomStringConvertible { <nl> / / <nl> <nl> public func checkEquatable < T : Equatable > ( <nl> - expectedEqual : Bool , _ lhs : T , _ rhs : T , _ stackTrace : SourceLocStack , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> + expectedEqual : Bool , _ lhs : T , _ rhs : T , $ { TRACE } <nl> ) { <nl> / / Test operator ' = = ' that is found through witness tables . <nl> - expectEqual ( <nl> - expectedEqual , lhs = = rhs , stackTrace : stackTrace , <nl> - collectMoreInfo : collectMoreInfo ) <nl> - expectEqual ( <nl> - ! expectedEqual , lhs ! = rhs , stackTrace : stackTrace , <nl> - collectMoreInfo : collectMoreInfo ) <nl> - } <nl> - <nl> - public func checkEquatable < T : Equatable > ( <nl> - expectedEqual : Bool , _ lhs : T , _ rhs : T , <nl> - file : String = __FILE__ , line : UWord = __LINE__ , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> - ) { <nl> - checkEquatable ( <nl> - expectedEqual , lhs , rhs , SourceLocStack ( ) . with ( SourceLoc ( file , line ) ) ) <nl> + expectEqual ( expectedEqual , lhs = = rhs , $ { trace } ) <nl> + expectEqual ( ! expectedEqual , lhs ! = rhs , $ { trace } ) <nl> } <nl> <nl> public func checkHashable < T : Hashable > ( <nl> - expectedEqual : Bool , _ lhs : T , _ rhs : T , _ stackTrace : SourceLocStack , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> + expectedEqual : Bool , _ lhs : T , _ rhs : T , $ { TRACE } <nl> ) { <nl> - / / Test operator ' = = ' that is found through witness tables . <nl> - expectEqual ( <nl> - expectedEqual , lhs = = rhs , stackTrace : stackTrace , <nl> - collectMoreInfo : collectMoreInfo ) <nl> - expectEqual ( <nl> - ! expectedEqual , lhs ! = rhs , stackTrace : stackTrace , <nl> - collectMoreInfo : collectMoreInfo ) <nl> + checkEquatable ( expectedEqual , lhs , rhs , $ { trace } ) <nl> <nl> / / Test ' hashValue ' . <nl> / / <nl> / / If objects are not equal , then the hash value can be different or it can <nl> / / collide . <nl> if expectedEqual { <nl> - expectEqual ( lhs . hashValue , rhs . hashValue , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + expectEqual ( lhs . hashValue , rhs . hashValue , $ { trace } ) <nl> } <nl> } <nl> <nl> - public func checkHashable < T : Hashable > ( <nl> - expectedEqual : Bool , _ lhs : T , _ rhs : T , <nl> - file : String = __FILE__ , line : UWord = __LINE__ , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> - ) { <nl> - checkHashable ( <nl> - expectedEqual , lhs , rhs , SourceLocStack ( SourceLoc ( file , line ) ) , <nl> - collectMoreInfo : collectMoreInfo ) <nl> - } <nl> - <nl> public enum ExpectedComparisonResult { <nl> case LT , EQ , GT <nl> <nl> extension ExpectedComparisonResult : CustomStringConvertible { <nl> } <nl> <nl> public func checkComparable < T : Comparable > ( <nl> - expected : ExpectedComparisonResult , <nl> - _ lhs : T , _ rhs : T , _ stackTrace : SourceLocStack , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> + expected : ExpectedComparisonResult , _ lhs : T , _ rhs : T , $ { TRACE } <nl> ) { <nl> - expectEqual ( expected . isLT ( ) , lhs < rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> - expectEqual ( expected . isLE ( ) , lhs < = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> - expectEqual ( expected . isGE ( ) , lhs > = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> - expectEqual ( expected . isGT ( ) , lhs > rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> - } <nl> - <nl> - public func checkComparable < T : Comparable > ( <nl> - expected : ExpectedComparisonResult , <nl> - _ lhs : T , _ rhs : T , <nl> - file : String = __FILE__ , line : UWord = __LINE__ , <nl> - collectMoreInfo : ( ( ) - > String ) ? = nil <nl> - ) { <nl> - checkComparable ( expected , lhs , rhs , SourceLocStack ( SourceLoc ( file , line ) ) , <nl> - collectMoreInfo : collectMoreInfo ) <nl> + expectEqual ( expected . isLT ( ) , lhs < rhs , $ { trace } ) <nl> + expectEqual ( expected . isLE ( ) , lhs < = rhs , $ { trace } ) <nl> + expectEqual ( expected . isGE ( ) , lhs > = rhs , $ { trace } ) <nl> + expectEqual ( expected . isGT ( ) , lhs > rhs , $ { trace } ) <nl> } <nl> <nl> public struct CollectionMisuseResiliencyChecks { <nl> public func expectEqualSequence < <nl> print ( " check failed at \ ( file ) , line \ ( line ) " ) <nl> _printStackTrace ( stackTrace ) <nl> print ( " expected elements : \ " \ ( expected ) \ " " ) <nl> - print ( " actual : \ " \ ( actual ) \ " ( of type \ ( _stdlib_getDemangledTypeName ( actual ) ) ) " ) <nl> + print ( " actual : \ " \ ( actual ) \ " ( of type \ ( actual . dynamicType ) ) " ) <nl> if collectMoreInfo ! = nil { print ( collectMoreInfo ! ( ) ) } <nl> print ( " " ) <nl> } <nl> public func expectEqualFunctionsForDomain < ArgumentType , Result : Equatable > ( <nl> for a in arguments { <nl> let expected = function1 ( a ) <nl> let actual = function2 ( a ) <nl> - expectEqual ( expected , actual ) { <nl> - " where the argument is : \ ( a ) " <nl> - } <nl> + expectEqual ( expected , actual , " where the argument is : \ ( a ) " ) <nl> } <nl> } <nl> <nl> public func expectEqualMethodsForDomain < <nl> for a in arguments { <nl> let expected = function1 ( s ) ( a ) <nl> let actual = function2 ( s ) ( a ) <nl> - expectEqual ( expected , actual ) { <nl> + expectEqual ( <nl> + expected , actual , <nl> " where the first argument is : \ ( s ) \ nand the second argument is : \ ( a ) " <nl> - } <nl> + ) <nl> } <nl> } <nl> } <nl> public func expectSameBuffer ( <nl> file : String = __FILE__ , line : UWord = __LINE__ , <nl> collectMoreInfo : ( ( ) - > String ) ? = nil <nl> ) { <nl> - expectEqual ( expected ? . buffer . _id , actual ? . buffer . _id ) { <nl> - " lhs and rhs point to different buffers " <nl> - } <nl> + expectEqual ( <nl> + expected ? . buffer . _id , actual ? . buffer . _id , <nl> + " lhs and rhs point to different buffers " ) <nl> + <nl> if expected ? . buffer . _id = = actual ? . buffer . _id { <nl> - expectEqual ( expected ? . mutationEpoch , actual ? . mutationEpoch ) { <nl> + expectEqual ( <nl> + expected ? . mutationEpoch , actual ? . mutationEpoch , <nl> " lhs and rhs are from different mutation epochs " <nl> - } <nl> + ) <nl> } <nl> } <nl> <nl> mmm a / test / 1_stdlib / Character . swift <nl> ppp b / test / 1_stdlib / Character . swift <nl> CharacterTests . test ( " Hashable " ) { <nl> for j in characters . indices { <nl> var ci = Character ( characters [ i ] ) <nl> var cj = Character ( characters [ j ] ) <nl> - checkHashable ( i = = j , ci , cj , SourceLocStack ( ) . withCurrentLoc ( ) ) { <nl> - " i = \ ( i ) , j = \ ( j ) " <nl> - } <nl> + checkHashable ( i = = j , ci , cj , " i = \ ( i ) , j = \ ( j ) " ) <nl> } <nl> } <nl> } <nl> func checkRepresentation ( s : String ) { <nl> let expectSmall = s . utf8 . count < = 8 <nl> let isSmall = isSmallRepresentation ( s ) <nl> <nl> - expectEqual ( expectSmall , isSmall ) { <nl> - let expectedSize = expectSmall ? " small " : " large " <nl> - return " expected \ " \ ( s ) \ " to use the \ ( expectedSize ) representation " <nl> - } <nl> + let expectedSize = expectSmall ? " small " : " large " <nl> + expectEqual ( <nl> + expectSmall , isSmall , <nl> + " expected \ " \ ( s ) \ " to use the \ ( expectedSize ) representation " ) <nl> } <nl> <nl> CharacterTests . test ( " RoundTripping " ) { <nl> mmm a / test / 1_stdlib / FloatingPoint . swift . gyb <nl> ppp b / test / 1_stdlib / FloatingPoint . swift . gyb <nl> FloatingPoint . test ( " Float80 / HashValueZero " ) { <nl> <nl> func getPositiveSubnormal_Float32 ( ) - > Float32 { <nl> var result : Float32 = 1 . 0 <nl> - for i in 0 . . < 127 { <nl> + for _ in 0 . . < 127 { <nl> result / = 2 . 0 as Float32 <nl> } <nl> expectTrue ( result . isSubnormal ) <nl> func getPositiveSubnormal_Float32 ( ) - > Float32 { <nl> <nl> func getPositiveSubnormal_Float64 ( ) - > Float64 { <nl> var result : Float64 = 1 . 0 <nl> - for i in 0 . . < 1023 { <nl> + for _ in 0 . . < 1023 { <nl> result / = 2 . 0 as Float64 <nl> } <nl> expectTrue ( result . isSubnormal ) <nl> func checkFloatingPointComparison_ $ { FloatSelf } ( <nl> " expected : lhs = \ ( lhs ) \ ( expected ) rhs = \ ( rhs ) " <nl> } <nl> expectEqual ( expected . isEQ ( ) , lhs = = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> expectEqual ( expected . isNE ( ) , lhs ! = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> checkHashable ( expected . isEQ ( ) , lhs , rhs , <nl> - stackTrace . withCurrentLoc ( ) , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> <nl> expectEqual ( expected . isLT ( ) , lhs < rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> expectEqual ( expected . isLE ( ) , lhs < = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> expectEqual ( expected . isGE ( ) , lhs > = rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> expectEqual ( expected . isGT ( ) , lhs > rhs , <nl> - stackTrace : stackTrace , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> checkComparable ( expected , lhs , rhs , <nl> - stackTrace . withCurrentLoc ( ) , collectMoreInfo : collectMoreInfo ) <nl> + collectMoreInfo ( ) , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> } <nl> <nl> FloatingPoint . test ( " $ { FloatSelf } / { Comparable , Hashable , Equatable } " ) { <nl> mmm a / test / 1_stdlib / Inputs / DictionaryKeyValueTypes . swift <nl> ppp b / test / 1_stdlib / Inputs / DictionaryKeyValueTypes . swift <nl> func resetLeaksOfDictionaryKeysValues ( ) { <nl> } <nl> <nl> func expectNoLeaksOfDictionaryKeysValues ( ) { <nl> - expectEqual ( 0 , TestKeyTy . objectCount ) { " TestKeyTy leak " } <nl> - expectEqual ( 0 , TestValueTy . objectCount ) { " TestValueTy leak " } <nl> - expectEqual ( 0 , TestEquatableValueTy . objectCount ) { <nl> - " TestEquatableValueTy leak " <nl> - } <nl> - <nl> - expectEqual ( 0 , TestObjCKeyTy . objectCount ) { " TestObjCKeyTy leak " } <nl> - expectEqual ( 0 , TestObjCValueTy . objectCount ) { " TestObjCValueTy leak " } <nl> - expectEqual ( 0 , TestObjCEquatableValueTy . objectCount ) { <nl> - " TestObjCEquatableValueTy leak " <nl> - } <nl> + expectEqual ( 0 , TestKeyTy . objectCount , " TestKeyTy leak " ) <nl> + expectEqual ( 0 , TestValueTy . objectCount , " TestValueTy leak " ) <nl> + expectEqual ( 0 , TestEquatableValueTy . objectCount , " TestEquatableValueTy leak " ) <nl> + <nl> + expectEqual ( 0 , TestObjCKeyTy . objectCount , " TestObjCKeyTy leak " ) <nl> + expectEqual ( 0 , TestObjCValueTy . objectCount , " TestObjCValueTy leak " ) <nl> + expectEqual ( <nl> + 0 , TestObjCEquatableValueTy . objectCount , " TestObjCEquatableValueTy leak " ) <nl> } <nl> <nl> func getBridgedNSDictionaryOfRefTypesBridgedVerbatim ( ) - > NSDictionary { <nl> mmm a / test / 1_stdlib / Lazy . swift . gyb <nl> ppp b / test / 1_stdlib / Lazy . swift . gyb <nl> extension Repeat where Element : TestProtocol1 { <nl> LazyTestSuite . test ( " Repeat " ) { <nl> checkRandomAccessCollection ( <nl> [ ] as Array < OpaqueValue < Int > > , <nl> - Repeat ( count : 0 , repeatedValue : OpaqueValue ( 42 ) ) , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + Repeat ( count : 0 , repeatedValue : OpaqueValue ( 42 ) ) ) <nl> + { $ 0 . value = = $ 1 . value } <nl> <nl> checkRandomAccessCollection ( <nl> [ OpaqueValue ( 42 ) ] as Array < OpaqueValue < Int > > , <nl> - Repeat ( count : 1 , repeatedValue : OpaqueValue ( 42 ) ) , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + Repeat ( count : 1 , repeatedValue : OpaqueValue ( 42 ) ) ) <nl> + { $ 0 . value = = $ 1 . value } <nl> <nl> checkRandomAccessCollection ( <nl> [ OpaqueValue ( 42 ) , OpaqueValue ( 42 ) , OpaqueValue ( 42 ) ] as Array < OpaqueValue < Int > > , <nl> - Repeat ( count : 3 , repeatedValue : OpaqueValue ( 42 ) ) , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + Repeat ( count : 3 , repeatedValue : OpaqueValue ( 42 ) ) ) <nl> + { $ 0 . value = = $ 1 . value } <nl> } <nl> <nl> / / FIXME : trap tests . <nl> extension CollectionOfOne where Element : TestProtocol1 { <nl> LazyTestSuite . test ( " CollectionOfOne " ) { <nl> checkRandomAccessCollection ( <nl> [ OpaqueValue ( 42 ) ] , <nl> - CollectionOfOne ( OpaqueValue ( 42 ) ) , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + CollectionOfOne ( OpaqueValue ( 42 ) ) ) { $ 0 . value = = $ 1 . value } <nl> } <nl> <nl> / / FIXME : trap tests . <nl> extension EmptyCollection where Element : TestProtocol1 { <nl> LazyTestSuite . test ( " EmptyCollection " ) { <nl> checkRandomAccessCollection ( <nl> [ ] , <nl> - EmptyCollection < OpaqueValue < Int > > ( ) , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + EmptyCollection < OpaqueValue < Int > > ( ) ) { $ 0 . value = = $ 1 . value } <nl> } <nl> <nl> / / FIXME : trap tests . <nl> LazyTestSuite . test ( " Lazy $ { traversal } Collection . reverse ( ) " ) { <nl> <nl> check $ { traversal } Collection ( <nl> [ 90 , 10 , 30 , 0 ] . map { OpaqueValue ( $ 0 ) } as [ OpaqueValue < Int > ] , <nl> - reversed , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + reversed ) { $ 0 . value = = $ 1 . value } <nl> <nl> var reversedTwice = reversed . reverse ( ) <nl> expectType ( <nl> LazyTestSuite . test ( " Lazy $ { traversal } Collection . reverse ( ) " ) { <nl> <nl> check $ { traversal } Collection ( <nl> [ 0 , 30 , 10 , 90 ] . map { OpaqueValue ( $ 0 ) } as [ OpaqueValue < Int > ] , <nl> - reversedTwice , <nl> - { $ 0 . value = = $ 1 . value } ) <nl> + reversedTwice ) { $ 0 . value = = $ 1 . value } <nl> } <nl> % end <nl> <nl> mmm a / test / 1_stdlib / Mirror . swift <nl> ppp b / test / 1_stdlib / Mirror . swift <nl> mirrors . test ( " Legacy " ) { <nl> let mb = Mirror ( reflecting : B ( ) ) <nl> <nl> func expectBMirror ( <nl> - mb : Mirror , stackTrace : SourceLocStack ? = nil , <nl> + mb : Mirror , stackTrace : SourceLocStack = SourceLocStack ( ) , <nl> file : String = __FILE__ , line : UWord = __LINE__ <nl> ) { <nl> expectTrue ( mb . subjectType = = B . self , <nl> mmm a / test / 1_stdlib / NSStringAPI . swift <nl> ppp b / test / 1_stdlib / NSStringAPI . swift <nl> func checkStringComparison ( <nl> / / String / String <nl> expectEqual ( expected . isEQ ( ) , lhs = = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isNE ( ) , lhs ! = rhs , stackTrace : stackTrace ) <nl> - checkHashable ( expected . isEQ ( ) , lhs , rhs , stackTrace . withCurrentLoc ( ) ) <nl> + checkHashable ( <nl> + expected . isEQ ( ) , lhs , rhs , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> <nl> expectEqual ( expected . isLT ( ) , lhs < rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isLE ( ) , lhs < = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isGE ( ) , lhs > = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isGT ( ) , lhs > rhs , stackTrace : stackTrace ) <nl> - checkComparable ( expected , lhs , rhs , stackTrace . withCurrentLoc ( ) ) <nl> + checkComparable ( expected , lhs , rhs , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> <nl> / / NSString / NSString <nl> let lhsNSString = lhs as NSString <nl> func checkStringComparison ( <nl> stackTrace : stackTrace ) <nl> checkHashable ( <nl> expectedEqualUnicodeScalars , lhsNSString , rhsNSString , <nl> - stackTrace . withCurrentLoc ( ) ) <nl> + stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> } <nl> <nl> NSStringAPIs . test ( " String . { Equatable , Hashable , Comparable } " ) { <nl> func checkCharacterComparison ( <nl> / / Character / Character <nl> expectEqual ( expected . isEQ ( ) , lhs = = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isNE ( ) , lhs ! = rhs , stackTrace : stackTrace ) <nl> - checkHashable ( expected . isEQ ( ) , lhs , rhs , stackTrace . withCurrentLoc ( ) ) <nl> + checkHashable ( <nl> + expected . isEQ ( ) , lhs , rhs , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> <nl> expectEqual ( expected . isLT ( ) , lhs < rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isLE ( ) , lhs < = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isGE ( ) , lhs > = rhs , stackTrace : stackTrace ) <nl> expectEqual ( expected . isGT ( ) , lhs > rhs , stackTrace : stackTrace ) <nl> - checkComparable ( expected , lhs , rhs , stackTrace . withCurrentLoc ( ) ) <nl> + checkComparable ( expected , lhs , rhs , stackTrace : stackTrace . withCurrentLoc ( ) ) <nl> } <nl> <nl> NSStringAPIs . test ( " Character . { Equatable , Hashable , Comparable } " ) { <nl> func getNonASCIICString ( ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) { <nl> return ( UnsafeMutablePointer ( up ) , { up . dealloc ( 100 ) } ) <nl> } <nl> <nl> - func getIllFormedUTF8String1 ( ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) { <nl> + func getIllFormedUTF8String1 ( <nl> + ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) { <nl> let up = UnsafeMutablePointer < UInt8 > . alloc ( 100 ) <nl> up [ 0 ] = 0x41 <nl> up [ 1 ] = 0xed <nl> func getIllFormedUTF8String1 ( ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) <nl> return ( UnsafeMutablePointer ( up ) , { up . dealloc ( 100 ) } ) <nl> } <nl> <nl> - func getIllFormedUTF8String2 ( ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) { <nl> + func getIllFormedUTF8String2 ( <nl> + ) - > ( UnsafeMutablePointer < CChar > , dealloc : ( ) - > ( ) ) { <nl> let up = UnsafeMutablePointer < UInt8 > . alloc ( 100 ) <nl> up [ 0 ] = 0x41 <nl> up [ 1 ] = 0xed <nl> mmm a / test / 1_stdlib / UnsafePointer . swift . gyb <nl> ppp b / test / 1_stdlib / UnsafePointer . swift . gyb <nl> $ { SelfName } TestSuite . test ( " Hashable " ) { <nl> for j in ptrs . indices { <nl> var pi = ptrs [ i ] <nl> var pj = ptrs [ j ] <nl> - checkHashable ( i = = j , pi , pj , SourceLocStack ( ) . withCurrentLoc ( ) ) { <nl> - " i = \ ( i ) , j = \ ( j ) " <nl> - } <nl> + checkHashable ( i = = j , pi , pj , " i = \ ( i ) , j = \ ( j ) " ) <nl> } <nl> } <nl> } <nl> mmm a / validation - test / compiler_crashers_2_fixed / 0022 - rdar21625478 . swift <nl> ppp b / validation - test / compiler_crashers_2_fixed / 0022 - rdar21625478 . swift <nl> public func expectCustomizable < <nl> collectMoreInfo : ( ( ) - > String ) ? = nil <nl> ) { <nl> expectNotEqual ( <nl> - 0 , counters [ T . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . self ] , collectMoreInfo ? ( ) ? ? " " , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) , file : file , line : line ) <nl> <nl> expectEqual ( <nl> - counters [ T . self ] , counters [ T . Base . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + counters [ T . self ] , counters [ T . Base . self ] , collectMoreInfo ? ( ) ? ? " " , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) , file : file , line : line ) <nl> } <nl> <nl> public func expectNotCustomizable < <nl> public func expectNotCustomizable < <nl> collectMoreInfo : ( ( ) - > String ) ? = nil <nl> ) { <nl> expectNotEqual ( <nl> - 0 , counters [ T . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . self ] , collectMoreInfo ? ( ) ? ? " " , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) , file : file , line : line ) <nl> <nl> expectEqual ( <nl> - 0 , counters [ T . Base . self ] , stackTrace : stackTrace , <nl> - file : file , line : line , collectMoreInfo : collectMoreInfo ) <nl> + 0 , counters [ T . Base . self ] , collectMoreInfo ? ( ) ? ? " " , <nl> + stackTrace : stackTrace ? ? SourceLocStack ( ) , file : file , line : line ) <nl> } <nl> mmm a / validation - test / stdlib / Algorithm . swift . gyb <nl> ppp b / validation - test / stdlib / Algorithm . swift . gyb <nl> SequenceTypeAlgorithms . test ( " $ { algorithmKind } Element / Predicate " ) { <nl> stackTrace : test . loc . withCurrentLoc ( ) ) <nl> } <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( max ( 0 , test . sequence . count - 1 ) , timesClosureWasCalled ) { <nl> - " maxElement ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( <nl> + max ( 0 , test . sequence . count - 1 ) , timesClosureWasCalled , <nl> + " maxElement ( ) should be eager and should only call its predicate " <nl> + + " once per element " ) <nl> } <nl> } <nl> <nl> SequenceTypeAlgorithms . test ( " reduce " ) { <nl> } <nl> expectEqual ( test . sequence , result . value ) <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " reduce ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( <nl> + test . sequence . count , timesClosureWasCalled , <nl> + " reduce ( ) should be eager and should only call its predicate " <nl> + + " once per element " ) <nl> } <nl> } <nl> <nl> SequenceTypeAlgorithms . test ( " filter / SequenceType / Semantics " ) { <nl> expectType ( [ OpaqueValue < Int > ] . self , & result ) <nl> expectEqual ( test . expected , result . map { $ 0 . value } ) <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " filter ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( <nl> + test . sequence . count , timesClosureWasCalled , <nl> + " filter ( ) should be eager and should only call its predicate " <nl> + + " once per element " ) <nl> expectGE ( 2 * result . count , result . capacity ) { <nl> - " filter ( ) should not reserve capacity ( it does not know how much the predicate will filter out ) " <nl> + " filter ( ) should not reserve capacity ( it does not know how much the " <nl> + + " predicate will filter out ) " <nl> } <nl> } <nl> } <nl> func callGenericSequenceMap < S : SequenceType , T > ( <nl> <nl> % for dispatch in [ ' Static ' , ' Generic ' ] : <nl> <nl> - SequenceTypeAlgorithms . test ( " map / SequenceType / $ { Implementation } Implementation / $ { dispatch } " ) { <nl> + SequenceTypeAlgorithms . test ( <nl> + " map / SequenceType / $ { Implementation } Implementation / $ { dispatch } " <nl> + ) { <nl> for test in mapTests { <nl> for underestimateCountBehavior in [ <nl> UnderestimateCountBehavior . Precise , <nl> SequenceTypeAlgorithms . test ( " map / SequenceType / $ { Implementation } Implementation / $ { <nl> expectType ( [ OpaqueValue < Int32 > ] . self , & result ) <nl> expectEqual ( test . expected , result . map { $ 0 . value } ) <nl> % if Implementation = = ' Custom ' : <nl> - expectEqual ( 1 , MinimalSequenceWithCustomMap < OpaqueValue < Int > > . timesMapWasCalled ) <nl> + expectEqual ( <nl> + 1 , MinimalSequenceWithCustomMap < OpaqueValue < Int > > . timesMapWasCalled ) <nl> % end <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " map ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( <nl> + test . sequence . count , timesClosureWasCalled , <nl> + " map ( ) should be eager and should only call its predicate " <nl> + + " once per element " ) <nl> } <nl> } <nl> } <nl> func callGenericCollectionMap < C : CollectionType , T > ( <nl> <nl> % for dispatch in [ ' Static ' , ' Generic ' ] : <nl> <nl> - SequenceTypeAlgorithms . test ( " map / CollectionType / $ { Implementation } Implementation / $ { dispatch } " ) { <nl> + SequenceTypeAlgorithms . test ( <nl> + " map / CollectionType / $ { Implementation } Implementation / $ { dispatch } " <nl> + ) { <nl> for test in mapTests { <nl> for underestimateCountBehavior in [ <nl> UnderestimateCountBehavior . Precise , <nl> UnderestimateCountBehavior . Half , <nl> UnderestimateCountBehavior . Value ( 0 ) <nl> ] { <nl> - let s = MinimalForwardCollectionWith $ { Implementation } Map < OpaqueValue < Int > > ( <nl> + let s = MinimalForwardCollectionWith $ { Implementation } Map < <nl> + OpaqueValue < Int > <nl> + > ( <nl> test . sequence . map { OpaqueValue ( $ 0 ) } , <nl> underestimatedCount : underestimateCountBehavior ) <nl> let closureLifetimeTracker = LifetimeTracked ( 0 ) <nl> expectEqual ( 1 , LifetimeTracked . instances ) <nl> var timesClosureWasCalled = 0 <nl> % if Implementation = = ' Custom ' : <nl> - MinimalForwardCollectionWithCustomMap < OpaqueValue < Int > > . timesMapWasCalled = 0 <nl> + MinimalForwardCollectionWithCustomMap < <nl> + OpaqueValue < Int > <nl> + > . timesMapWasCalled = 0 <nl> % end <nl> var result = call $ { dispatch } CollectionMap ( s ) { <nl> ( element : OpaqueValue < Int > ) - > OpaqueValue < Int32 > in <nl> SequenceTypeAlgorithms . test ( " map / CollectionType / $ { Implementation } Implementation / <nl> expectType ( [ OpaqueValue < Int32 > ] . self , & result ) <nl> expectEqual ( test . expected , result . map { $ 0 . value } ) <nl> % if Implementation = = ' Custom ' : <nl> - expectEqual ( 1 , MinimalForwardCollectionWithCustomMap < OpaqueValue < Int > > . timesMapWasCalled ) <nl> + expectEqual ( <nl> + 1 , MinimalForwardCollectionWithCustomMap < <nl> + OpaqueValue < Int > <nl> + > . timesMapWasCalled ) <nl> % end <nl> expectEqual ( test . sequence , s . map { $ 0 . value } , <nl> " collection should not be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " map ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( test . sequence . count , timesClosureWasCalled , <nl> + " map ( ) should be eager and should only call its predicate " <nl> + + " once per element " ) <nl> } <nl> } <nl> } <nl> SequenceTypeAlgorithms . test ( " flatMap / SequenceType " ) { <nl> test . expected , result . map { $ 0 . value } , <nl> stackTrace : test . loc . withCurrentLoc ( ) ) <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " map ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( test . sequence . count , timesClosureWasCalled , " map ( ) should be eager and should only call its predicate once per element " ) <nl> expectGE ( 2 * result . count , result . capacity ) { <nl> " flatMap ( ) should not reserve capacity " <nl> } <nl> SequenceTypeAlgorithms . test ( " flatMap / SequenceType / TransformProducesOptional " ) { <nl> test . expected , result . map { $ 0 . value } , <nl> stackTrace : test . loc . withCurrentLoc ( ) ) <nl> expectEqual ( [ ] , s . map { $ 0 . value } , " sequence should be consumed " ) <nl> - expectEqual ( test . sequence . count , timesClosureWasCalled ) { <nl> - " flatMap ( ) should be eager and should only call its predicate once per element " <nl> - } <nl> + expectEqual ( test . sequence . count , timesClosureWasCalled , " flatMap ( ) should be eager and should only call its predicate once per element " ) <nl> expectGE ( 2 * result . count , result . capacity ) { <nl> " flatMap ( ) should not reserve capacity " <nl> } <nl> mmm a / validation - test / stdlib / ObjectiveC . swift <nl> ppp b / validation - test / stdlib / ObjectiveC . swift <nl> ObjectiveCTests . test ( " NSObject / Hashable " ) { <nl> object1 . _value = = object2 . _value , <nl> object1 , <nl> object2 , <nl> - SourceLocStack ( ) . withCurrentLoc ( ) ) { <nl> - " i = \ ( i ) , j = \ ( j ) " <nl> - } <nl> + " i = \ ( i ) , j = \ ( j ) " ) <nl> } <nl> } <nl> } <nl> mmm a / validation - test / stdlib / Unicode . swift <nl> ppp b / validation - test / stdlib / Unicode . swift <nl> var UTF8Decoder = TestSuite ( " UTF8Decoder " ) <nl> <nl> UTF8Decoder . test ( " Internal / _numTrailingBytes " ) { <nl> for i in UInt8 ( 0x00 ) . . . UInt8 ( 0x7f ) { <nl> - expectEqual ( 0 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 0 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> for i in UInt8 ( 0x80 ) . . . UInt8 ( 0xc1 ) { <nl> - expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> for i in UInt8 ( 0xc2 ) . . . UInt8 ( 0xdf ) { <nl> - expectEqual ( 1 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 1 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> for i in UInt8 ( 0xe0 ) . . . UInt8 ( 0xef ) { <nl> - expectEqual ( 2 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 2 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> for i in UInt8 ( 0xf0 ) . . . UInt8 ( 0xf4 ) { <nl> - expectEqual ( 3 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 3 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> for i in UInt8 ( 0xf5 ) . . . UInt8 ( 0xfe ) { <nl> - expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> / / Separate test for 0xff because of : <nl> / / < rdar : / / problem / 17376512 > Range UInt8 ( 0x00 ) . . . UInt8 ( 0xff ) invokes a <nl> / / runtime trap <nl> var i = UInt8 ( 0xff ) <nl> - expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) ) { " i = \ ( i ) " } <nl> + expectEqual ( 4 , UTF8 . _numTrailingBytes ( i ) , " i = \ ( i ) " ) <nl> } <nl> <nl> UTF8Decoder . test ( " Empty " ) { <nl> mmm a / validation - test / stdlib / UnicodeTrie . swift . gyb <nl> ppp b / validation - test / stdlib / UnicodeTrie . swift . gyb <nl> UnicodeTrie . test ( " _UnicodeGraphemeClusterBreakPropertyTrie " ) { <nl> if cp % 0x10000 = = 0 { <nl> print ( " \ ( cp ) . . . " ) <nl> } <nl> - expectEqual ( expected [ Int ( cp ) ] , trie . getPropertyValue ( cp ) ) { <nl> - " code point \ ( cp ) " <nl> - } <nl> + expectEqual ( <nl> + expected [ Int ( cp ) ] , trie . getPropertyValue ( cp ) , " code point \ ( cp ) " ) <nl> } <nl> } <nl> <nl> % { <nl> <nl> grapheme_cluster_break_tests = \ <nl> - get_grapheme_cluster_break_tests_as_unicode_scalars ( unicodeGraphemeBreakTestFile ) <nl> + get_grapheme_cluster_break_tests_as_unicode_scalars ( <nl> + unicodeGraphemeBreakTestFile ) <nl> <nl> } % <nl> <nl> mmm a / validation - test / stdlib / UnicodeUTFEncoders . swift <nl> ppp b / validation - test / stdlib / UnicodeUTFEncoders . swift <nl> class CodecTest < Codec : TestableUnicodeCodec > { <nl> default : <nl> fatalError ( " decoding failed " ) <nl> } <nl> - expectEqual ( scalar , decoded ) { <nl> + expectEqual ( <nl> + scalar , decoded , <nl> " Decoding failed : \ ( asHex ( scalar . value ) ) = > " + <nl> - " \ ( asHex ( nsEncoded ) ) = > \ ( asHex ( decoded . value ) ) " <nl> - } <nl> + " \ ( asHex ( nsEncoded ) ) = > \ ( asHex ( decoded . value ) ) " <nl> + ) <nl> <nl> encodeIndex = encodeBuffer . startIndex <nl> Codec . encode ( scalar , output : encodeOutput ) <nl> | [ stdlibunittest ] More de - boilerplating WIP | apple/swift | 70ee2adc84af04578c2798febefaf4d1d9c8ac6d | 2015-07-07T04:54:03Z |
mmm a / contrib / gitian - descriptors / boost - win . yml <nl> ppp b / contrib / gitian - descriptors / boost - win . yml <nl> files : <nl> - " boost - mingw - gas - cross - compile - 2013 - 03 - 03 . patch " <nl> script : | <nl> # Defines <nl> + export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> + export FAKETIME = $ REFERENCE_DATETIME <nl> export TZ = UTC <nl> INDIR = $ HOME / build <nl> + TEMPDIR = $ HOME / tmp <nl> # Input Integrity Check <nl> echo " fff00023dd79486d444c8e29922f4072e1d451fc5a4d2b6075852ead7f2b7b52 boost_1_55_0 . tar . bz2 " | shasum - c <nl> echo " d2b7f6a1d7051faef3c9cf41a92fa3671d905ef1e1da920d07651a43299f6268 boost - mingw - gas - cross - compile - 2013 - 03 - 03 . patch " | shasum - c <nl> script : | <nl> mkdir - p $ INSTALLPREFIX $ BUILDDIR <nl> cd $ BUILDDIR <nl> # <nl> - tar xjf $ INDIR / boost_1_55_0 . tar . bz2 <nl> + tar - - warning = no - timestamp - xjf $ INDIR / boost_1_55_0 . tar . bz2 <nl> cd boost_1_55_0 <nl> GCCVERSION = $ ( $ HOST - g + + - E - dM $ ( mktemp - - suffix = . h ) | grep __VERSION__ | cut - d ' ' - f 3 | cut - d ' " ' - f 2 ) <nl> echo " using gcc : $ GCCVERSION : $ HOST - g + + <nl> script : | <nl> # http : / / statmt . org / ~ s0565741 / software / boost_1_52_0 / libs / context / doc / html / context / requirements . html <nl> # " For cross - compiling the lib you must specify certain additional properties at bjam command line : target - os , abi , binary - format , architecture and address - model . " <nl> . / bjam toolset = gcc binary - format = pe target - os = windows threadapi = win32 address - model = $ BITS threading = multi variant = release link = static runtime - link = static - - user - config = user - config . jam - - without - mpi - - without - python - sNO_BZIP2 = 1 - sNO_ZLIB = 1 - - layout = tagged - - build - type = complete - - prefix = " $ INSTALLPREFIX " $ MAKEOPTS install <nl> + # post - process all generated libraries to be deterministic <nl> + # extract them to a temporary directory then re - build them deterministically <nl> + for LIB in $ ( find $ INSTALLPREFIX - name \ * . a ) ; do <nl> + rm - rf $ TEMPDIR & & mkdir $ TEMPDIR & & cd $ TEMPDIR <nl> + $ HOST - ar xv $ LIB | cut - b5 - > / tmp / list . txt <nl> + rm $ LIB <nl> + $ HOST - ar crsD $ LIB $ ( cat / tmp / list . txt ) <nl> + done <nl> # <nl> cd " $ INSTALLPREFIX " <nl> - export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> - export FAKETIME = $ REFERENCE_DATETIME <nl> - zip - r $ OUTDIR / boost - win $ BITS - 1 . 55 . 0 - gitian - r6 . zip * <nl> - unset LD_PRELOAD <nl> - unset FAKETIME <nl> + find | sort | zip - X @ $ OUTDIR / boost - win $ BITS - 1 . 55 . 0 - gitian - r6 . zip <nl> done # for BITS in <nl> <nl> mmm a / contrib / gitian - descriptors / deps - win . yml <nl> ppp b / contrib / gitian - descriptors / deps - win . yml <nl> script : | <nl> export FAKETIME = $ REFERENCE_DATETIME <nl> export TZ = UTC <nl> INDIR = $ HOME / build <nl> + TEMPDIR = $ HOME / tmp <nl> # Input Integrity Check <nl> echo " f74f15e8c8ff11aa3d5bb5f276d202ec18d7246e95f961db76054199c69c1ae3 openssl - 1 . 0 . 1e . tar . gz " | sha256sum - c <nl> echo " 12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef db - 4 . 8 . 30 . NC . tar . gz " | sha256sum - c <nl> script : | <nl> # <nl> tar xjf $ INDIR / qrencode - 3 . 4 . 3 . tar . bz2 <nl> cd qrencode - 3 . 4 . 3 <nl> - png_CFLAGS = " - I $ INSTALLPREFIX / include " png_LIBS = " - L $ INSTALLPREFIX / lib - lpng " . / configure - - prefix = $ INSTALLPREFIX - - host = $ HOST - - enable - static - - disable - shared - - without - tools - - disable - maintainer - mode - - disable - dependency - tracking <nl> + png_CFLAGS = " - I $ INSTALLPREFIX / include " png_LIBS = " - L $ INSTALLPREFIX / lib - lpng " . / configure - - prefix = $ INSTALLPREFIX - - host = $ HOST - - enable - static - - disable - shared - - without - tools - - disable - dependency - tracking <nl> + # Workaround to prevent re - configuring by make ( resulting in missing m4 error ) ; make all files have a date in the past <nl> + find . - print0 | xargs - r0 touch - t 200001010000 <nl> make <nl> make install <nl> cd . . <nl> + # post - process all generated libraries to be deterministic <nl> + # extract them to a temporary directory then re - build them deterministically <nl> + for LIB in $ ( find $ INSTALLPREFIX - name \ * . a ) ; do <nl> + rm - rf $ TEMPDIR & & mkdir $ TEMPDIR & & cd $ TEMPDIR <nl> + $ HOST - ar xv $ LIB | cut - b5 - > / tmp / list . txt <nl> + rm $ LIB <nl> + $ HOST - ar crsD $ LIB $ ( cat / tmp / list . txt ) <nl> + done <nl> # <nl> cd $ INSTALLPREFIX <nl> - zip - r $ OUTDIR / bitcoin - deps - win $ BITS - gitian - r10 . zip include lib <nl> + find include lib | sort | zip - X @ $ OUTDIR / bitcoin - deps - win $ BITS - gitian - r10 . zip <nl> done # for BITS in <nl> mmm a / contrib / gitian - descriptors / gitian - win . yml <nl> ppp b / contrib / gitian - descriptors / gitian - win . yml <nl> script : | <nl> export TZ = UTC <nl> INDIR = $ HOME / build <nl> OPTFLAGS = ' - O2 ' <nl> + TEMPDIR = " $ HOME / tempdir " <nl> NEEDDIST = 1 <nl> # Qt : workaround for determinism in resource ordering <nl> # Qt5 ' s rcc uses a QHash to store the files for the resource . <nl> script : | <nl> else <nl> HOST = x86_64 - w64 - mingw32 <nl> fi <nl> + export PATH = $ STAGING / host / bin : $ PATH <nl> mkdir - p $ STAGING $ BUILDDIR $ BINDIR <nl> # <nl> cd $ STAGING <nl> script : | <nl> unzip $ INDIR / protobuf - win $ { BITS } - 2 . 5 . 0 - gitian - r4 . zip <nl> if [ " $ NEEDDIST " = = " 1 " ] ; then <nl> # Make source code archive which is architecture independent so it only needs to be done once <nl> - cd $ HOME / build / <nl> - cd bitcoin <nl> - export PATH = $ STAGING / host / bin : $ PATH <nl> - export TAR_OPTIONS = - - mtime = ` echo $ REFERENCE_DATETIME | awk ' { print $ 1 } ' ` <nl> + cd $ HOME / build / bitcoin <nl> . / autogen . sh <nl> . / configure - - bindir = $ OUTDIR - - prefix = $ STAGING - - host = $ HOST - - with - qt - plugindir = $ STAGING / plugins - - with - qt - incdir = $ STAGING / include - - with - qt - bindir = $ STAGING / host / bin - - with - boost = $ STAGING - - disable - maintainer - mode - - with - protoc - bindir = $ STAGING / host / bin - - disable - dependency - tracking CPPFLAGS = " - I $ STAGING / include $ { OPTFLAGS } " LDFLAGS = " - L $ STAGING / lib $ { OPTFLAGS } " CXXFLAGS = " - frandom - seed = bitcoin $ { OPTFLAGS } " <nl> make dist <nl> - mkdir - p $ OUTDIR / src <nl> - cp - f bitcoin - * . tar . * $ OUTDIR / src <nl> + DISTNAME = ` echo bitcoin - * . tar . gz ` <nl> NEEDDIST = 0 <nl> fi <nl> # Build platform - dependent executables from source archive <nl> cd $ BUILDDIR <nl> mkdir - p distsrc <nl> cd distsrc <nl> - tar - - strip - components = 1 - xf $ HOME / build / bitcoin / bitcoin - * . tar . * <nl> + tar - - strip - components = 1 - xf $ HOME / build / bitcoin / $ DISTNAME <nl> . / configure - - bindir = $ BINDIR - - prefix = $ STAGING - - host = $ HOST - - with - qt - plugindir = $ STAGING / plugins - - with - qt - incdir = $ STAGING / include - - with - qt - bindir = $ STAGING / host / bin - - with - boost = $ STAGING - - disable - maintainer - mode - - with - protoc - bindir = $ STAGING / host / bin - - disable - dependency - tracking CPPFLAGS = " - I $ STAGING / include $ { OPTFLAGS } " LDFLAGS = " - L $ STAGING / lib $ { OPTFLAGS } " CXXFLAGS = " - frandom - seed = bitcoin $ { OPTFLAGS } " <nl> export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> export FAKETIME = $ REFERENCE_DATETIME <nl> script : | <nl> unset FAKETIME <nl> done # for BITS in <nl> <nl> + # sort distribution tar file and normalize user / group / mtime information for deterministic output <nl> + mkdir - p $ OUTDIR / src <nl> + rm - rf $ TEMPDIR <nl> + mkdir - p $ TEMPDIR <nl> + cd $ TEMPDIR <nl> + tar - xvf $ HOME / build / bitcoin / $ DISTNAME | sort | tar - - no - recursion - cT / dev / stdin - - mode = ' u + rw , go + r - w , a + X ' - - owner = 0 - - group = 0 - - mtime = " $ REFERENCE_DATETIME " | gzip - n > $ OUTDIR / src / $ DISTNAME <nl> + <nl> mmm a / contrib / gitian - descriptors / protobuf - win . yml <nl> ppp b / contrib / gitian - descriptors / protobuf - win . yml <nl> script : | <nl> # <nl> export TZ = UTC <nl> INDIR = $ HOME / build <nl> + TEMPDIR = $ HOME / tmp <nl> OPTFLAGS = " - O2 " <nl> # Integrity Check <nl> echo " 13bfc5ae543cf3aa180ac2485c0bc89495e3ae711fc6fab4f8ffe90dfb4bb677 protobuf - 2 . 5 . 0 . tar . bz2 " | sha256sum - c <nl> script : | <nl> # Now recompile with the mingw cross - compiler : <nl> make distclean <nl> . / configure - - prefix = $ INSTALLPREFIX - - enable - shared = no - - disable - dependency - tracking - - with - protoc = $ INSTALLPREFIX / host / bin / protoc - - host = $ HOST CXXFLAGS = " - frandom - seed = 11 $ { OPTFLAGS } " <nl> + export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> + export FAKETIME = $ REFERENCE_DATETIME <nl> make <nl> make install <nl> + # post - process all generated libraries to be deterministic <nl> + # extract them to a temporary directory then re - build them deterministically <nl> + for LIB in $ ( find $ INSTALLPREFIX - name \ * . a ) ; do <nl> + rm - rf $ TEMPDIR & & mkdir $ TEMPDIR & & cd $ TEMPDIR <nl> + $ HOST - ar xv $ LIB | cut - b5 - > / tmp / list . txt <nl> + rm $ LIB <nl> + $ HOST - ar crsD $ LIB $ ( cat / tmp / list . txt ) <nl> + done <nl> # <nl> cd $ INSTALLPREFIX <nl> - export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> - export FAKETIME = $ REFERENCE_DATETIME <nl> - zip - r $ OUTDIR / protobuf - win $ BITS - 2 . 5 . 0 - gitian - r4 . zip include lib host <nl> + find include lib host | sort | zip - X @ $ OUTDIR / protobuf - win $ BITS - 2 . 5 . 0 - gitian - r4 . zip <nl> unset LD_PRELOAD <nl> unset FAKETIME <nl> done # for BITS in <nl> mmm a / contrib / gitian - descriptors / qt - win . yml <nl> ppp b / contrib / gitian - descriptors / qt - win . yml <nl> script : | <nl> # Defines <nl> export TZ = UTC <nl> INDIR = $ HOME / build <nl> + TEMPDIR = $ HOME / tmp <nl> # Qt : workaround for determinism in resource ordering <nl> # Qt5 ' s rcc uses a QHash to store the files for the resource . <nl> # A security fix in QHash makes the ordering of keys to be different on every run <nl> script : | <nl> cd qt - everywhere - opensource - src - 5 . 2 . 0 <nl> SPECNAME = " win32 - g + + " <nl> SPECFILE = " qtbase / mkspecs / $ { SPECNAME } / qmake . conf " <nl> - sed ' s / $ TODAY / 2011 - 01 - 30 / ' - i configure <nl> + sed ' s / qt_instdate = ` date + % Y - % m - % d ` / qt_instdate = 2011 - 01 - 30 / ' - i qtbase / configure <nl> sed - - posix " s | QMAKE_CFLAGS = - pipe - fno - keep - inline - dllexport | QMAKE_CFLAGS \ t \ t = - pipe - fno - keep - inline - dllexport - isystem / usr / $ HOST / include / - frandom - seed = qtbuild - I $ DEPSDIR / include | " - i $ { SPECFILE } <nl> sed - - posix " s | QMAKE_LFLAGS = | QMAKE_LFLAGS \ t \ t = - L $ DEPSDIR / lib | " - i $ { SPECFILE } <nl> - # ar adds timestamps to every object file included in the static library <nl> - # providing - D as ar argument is supposed to solve it , but doesn ' t work as qmake strips off the arguments and adds - M to pass a script . . . <nl> - # which somehow cannot be combined with other flags . <nl> - # use faketime only for ar , as it confuses make / qmake into hanging sometimes <nl> - sed - - posix " s | QMAKE_LIB = \ \ \ $ \ \ \ $ { CROSS_COMPILE } ar - ru | QMAKE_LIB \ t \ t = $ HOME / ar - Dr | " - i $ { SPECFILE } <nl> - echo ' # ! / bin / bash ' > $ HOME / ar <nl> - echo ' export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 ' > > $ HOME / ar <nl> - echo " $ HOST - ar \ " \ $ @ \ " " > > $ HOME / ar <nl> - chmod + x $ HOME / ar <nl> + # Before we tried to pass arguments to ar ( static linking ) in using QMAKE_LIB , however <nl> + # qt removes the arguments for ar and provides a script which makes it impossible to pass the determinism flag - <nl> + # so rather than try to replace ar , post - process all libraries and plugins at the end . <nl> + # <nl> # Don ' t load faketime while compiling Qt , qmake will get stuck in nearly infinite loops <nl> # export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> - export FAKETIME = $ REFERENCE_DATETIME <nl> + # export FAKETIME = $ REFERENCE_DATETIME <nl> # <nl> # Compile static libraries , and use statically linked openssl ( - openssl - linked ) : <nl> OPENSSL_LIBS = " - L $ DEPSDIR / lib - lssl - lcrypto - lgdi32 " . / configure - prefix $ INSTALLPREFIX - bindir $ INSTALLPREFIX / host / bin - confirm - license - release - opensource - static - xplatform $ SPECNAME - device - option CROSS_COMPILE = " $ HOST - " - no - audio - backend - no - javascript - jit - no - sql - sqlite - no - sql - odbc - no - nis - no - cups - no - iconv - no - dbus - no - gif - no - opengl - no - compile - examples - no - feature - style - windowsce - no - feature - style - windowsmobile - no - qml - debug - openssl - linked - skip qtsvg - skip qtwebkit - skip qtwebkit - examples - skip qtserialport - skip qtdeclarative - skip qtmultimedia - skip qtimageformats - skip qtlocation - skip qtsensors - skip qtquick1 - skip qtquickcontrols - skip qtactiveqt - skip qtconnectivity - skip qtwinextras - skip qtxmlpatterns - skip qtscript - skip qtdoc - system - libpng - system - zlib <nl> make $ MAKEOPTS install <nl> + # post - process all generated libraries and plugins to be deterministic <nl> + # extract them to a temporary directory then re - build them deterministically <nl> + for LIB in $ ( find $ INSTALLPREFIX - name * . a ) ; do <nl> + rm - rf $ TEMPDIR & & mkdir $ TEMPDIR & & cd $ TEMPDIR <nl> + $ HOST - ar xv $ LIB | cut - b5 - > / tmp / list . txt <nl> + rm $ LIB <nl> + $ HOST - ar crsD $ LIB $ ( cat / tmp / list . txt ) <nl> + done <nl> # <nl> cd $ INSTALLPREFIX <nl> - <nl> + # Remove unused non - deterministic stuff <nl> + rm host / bin / qtpaths . exe lib / libQt5Bootstrap . a lib / libQt5Bootstrap . la <nl> # as zip stores file timestamps , use faketime to intercept stat calls to set dates for all files to reference date <nl> export LD_PRELOAD = / usr / lib / faketime / libfaketime . so . 1 <nl> - zip - r $ OUTDIR / qt - win $ { BITS } - 5 . 2 . 0 - gitian - r2 . zip * <nl> + export FAKETIME = $ REFERENCE_DATETIME <nl> + find - print0 | xargs - r0 touch # fix up timestamps before packaging <nl> + find | sort | zip - X @ $ OUTDIR / qt - win $ { BITS } - 5 . 2 . 0 - gitian - r2 . zip <nl> unset LD_PRELOAD <nl> unset FAKETIME <nl> done # for BITS in <nl> | Merge pull request | bitcoin/bitcoin | 3e3c25dd6334f7e8172239f4154c931e5f79b99b | 2014-02-10T17:23:21Z |
mmm a / CHANGELOG . txt <nl> ppp b / CHANGELOG . txt <nl> Breaking Changes : <nl> - TreeNodeEx ( ) : The helper ImGuiTreeNodeFlags_CollapsingHeader flag now include ImGuiTreeNodeFlags_NoTreePushOnOpen . The flag was already set by CollapsingHeader ( ) . <nl> The only difference is if you were using TreeNodeEx ( ) manually with ImGuiTreeNodeFlags_CollapsingHeader and without ImGuiTreeNodeFlags_NoTreePushOnOpen . In which case <nl> you can remove the ImGuiTreeNodeFlags_NoTreePushOnOpen flag from your call ( ImGuiTreeNodeFlags_CollapsingHeader & ~ ImGuiTreeNodeFlags_NoTreePushOnOpen ) . ( # 1864 ) <nl> + - ImFontAtlas : Renamed GetGlyphRangesChinese ( ) to GetGlyphRangesChineseFull ( ) to distinguish other variants and discourage using the full set . ( # 1859 ) <nl> <nl> Other Changes : <nl> <nl> Other Changes : <nl> - BeginCombo ( ) , BeginMainMenuBar ( ) , BeginChildFrame ( ) : Temporary style modification are restored at the end of BeginXXX instead of EndXXX , to not affect tooltips and child windows . <nl> - InputTextMultiline ( ) : Fixed double navigation highlight when scrollbar is active . ( # 787 ) <nl> - InputText ( ) : Fixed Undo after pasting large amount of text ( Redo will still fail when undo buffers are exhausted , but text won ' t be corrupted ) . <nl> + - ImFontAtlas : Added GetGlyphRangesChineseSimplifiedCommon ( ) helper that returns a list of ~ 2500 most common Simplified Chinese characters . ( # 1859 ) [ @ JX - Master , @ ocornut ] <nl> - Examples : GLFW : Made it possible to Shutdown / Init the backend again ( by reseting the time storage properly ) . ( # 1827 ) [ @ ice1000 ] <nl> - Misc : Updated stb_textedit from 1 . 09 + patches to 1 . 12 + minor patches . <nl> - Internals : PushItemFlag ( ) flags are inherited by BeginChild ( ) . <nl> mmm a / imgui . cpp <nl> ppp b / imgui . cpp <nl> <nl> When you are not sure about a old symbol or function name , try using the Search / Find function of your IDE to look for comments or references in all imgui files . <nl> You can read releases logs https : / / github . com / ocornut / imgui / releases for more details . <nl> <nl> - - 2018 / 05 / 03 ( 1 . 61 ) - DragInt ( ) : The default compile - time format string has been changed from " % . 0f " to " % d " , as we are not using integers internally any more . <nl> + - 2018 / 06 / 06 ( 1 . 62 ) - renamed GetGlyphRangesChinese ( ) to GetGlyphRangesChineseFull ( ) to distinguish other variants and discourage using the full set . <nl> + - 2018 / 06 / 06 ( 1 . 62 ) - TreeNodeEx ( ) : the ImGuiTreeNodeFlags_CollapsingHeader helper now include the ImGuiTreeNodeFlags_NoTreePushOnOpen flag . See Changelog for details . <nl> + - 2018 / 05 / 03 ( 1 . 61 ) - DragInt ( ) : the default compile - time format string has been changed from " % . 0f " to " % d " , as we are not using integers internally any more . <nl> If you used DragInt ( ) with custom format strings , make sure you change them to use % d or an integer - compatible format . <nl> To honor backward - compatibility , the DragInt ( ) code will currently parse and modify format strings to replace % * f with % d , giving time to users to upgrade their code . <nl> If you have IMGUI_DISABLE_OBSOLETE_FUNCTIONS enabled , the code will instead assert ! You may run a reg - exp search on your codebase for e . g . " DragInt . * % f " to help you find them . <nl> mmm a / imgui . h <nl> ppp b / imgui . h <nl> struct ImFontAtlas <nl> <nl> / / Helpers to retrieve list of common Unicode ranges ( 2 value per range , values are inclusive , zero - terminated list ) <nl> / / NB : Make sure that your string are UTF - 8 and NOT in your local code page . In C + + 11 , you can create UTF - 8 string literal using the u8 " Hello world " syntax . See FAQ for details . <nl> - IMGUI_API const ImWchar * GetGlyphRangesDefault ( ) ; / / Basic Latin , Extended Latin <nl> - IMGUI_API const ImWchar * GetGlyphRangesKorean ( ) ; / / Default + Korean characters <nl> - IMGUI_API const ImWchar * GetGlyphRangesJapanese ( ) ; / / Default + Hiragana , Katakana , Half - Width , Selection of 1946 Ideographs <nl> - IMGUI_API const ImWchar * GetGlyphRangesChinese ( ) ; / / Default + Japanese + full set of about 21000 CJK Unified Ideographs <nl> - IMGUI_API const ImWchar * GetGlyphRangesCyrillic ( ) ; / / Default + about 400 Cyrillic characters <nl> - IMGUI_API const ImWchar * GetGlyphRangesThai ( ) ; / / Default + Thai characters <nl> + / / NB : Consider using GlyphRangesBuilder to build glyph ranges from textual data . <nl> + IMGUI_API const ImWchar * GetGlyphRangesDefault ( ) ; / / Basic Latin , Extended Latin <nl> + IMGUI_API const ImWchar * GetGlyphRangesKorean ( ) ; / / Default + Korean characters <nl> + IMGUI_API const ImWchar * GetGlyphRangesJapanese ( ) ; / / Default + Hiragana , Katakana , Half - Width , Selection of 1946 Ideographs <nl> + IMGUI_API const ImWchar * GetGlyphRangesChineseFull ( ) ; / / Default + Half - Width + Japanese Hiragana / Katakana + full set of about 21000 CJK Unified Ideographs <nl> + IMGUI_API const ImWchar * GetGlyphRangesChineseSimplifiedCommon ( ) ; / / Default + Half - Width + Japanese Hiragana / Katakana + set of 2500 CJK Unified Ideographs for common simplified Chinese <nl> + IMGUI_API const ImWchar * GetGlyphRangesCyrillic ( ) ; / / Default + about 400 Cyrillic characters <nl> + IMGUI_API const ImWchar * GetGlyphRangesThai ( ) ; / / Default + Thai characters <nl> <nl> / / Helpers to build glyph ranges from text data . Feed your application strings / characters to it then call BuildRanges ( ) . <nl> struct GlyphRangesBuilder <nl> { <nl> ImVector < unsigned char > UsedChars ; / / Store 1 - bit per Unicode code point ( 0 = unused , 1 = used ) <nl> GlyphRangesBuilder ( ) { UsedChars . resize ( 0x10000 / 8 ) ; memset ( UsedChars . Data , 0 , 0x10000 / 8 ) ; } <nl> - bool GetBit ( int n ) { return ( UsedChars [ n > > 3 ] & ( 1 < < ( n & 7 ) ) ) ! = 0 ; } <nl> + bool GetBit ( int n ) const { return ( UsedChars [ n > > 3 ] & ( 1 < < ( n & 7 ) ) ) ! = 0 ; } <nl> void SetBit ( int n ) { UsedChars [ n > > 3 ] | = 1 < < ( n & 7 ) ; } / / Set bit ' c ' in the array <nl> void AddChar ( ImWchar c ) { SetBit ( c ) ; } / / Add character <nl> IMGUI_API void AddText ( const char * text , const char * text_end = NULL ) ; / / Add string ( each character of the UTF - 8 string are added ) <nl> mmm a / imgui_draw . cpp <nl> ppp b / imgui_draw . cpp <nl> const ImWchar * ImFontAtlas : : GetGlyphRangesKorean ( ) <nl> return & ranges [ 0 ] ; <nl> } <nl> <nl> - const ImWchar * ImFontAtlas : : GetGlyphRangesChinese ( ) <nl> + const ImWchar * ImFontAtlas : : GetGlyphRangesChineseFull ( ) <nl> { <nl> static const ImWchar ranges [ ] = <nl> { <nl> const ImWchar * ImFontAtlas : : GetGlyphRangesChinese ( ) <nl> return & ranges [ 0 ] ; <nl> } <nl> <nl> + static void UnpackAccumulativeOffsetsIntoRanges ( int base_codepoint , const short * accumulative_offsets , int accumulative_offsets_count , ImWchar * out_ranges ) <nl> + { <nl> + for ( int n = 0 ; n < accumulative_offsets_count ; n + + , out_ranges + = 2 ) <nl> + { <nl> + out_ranges [ 0 ] = out_ranges [ 1 ] = ( ImWchar ) ( base_codepoint + accumulative_offsets [ n ] ) ; <nl> + base_codepoint + = accumulative_offsets [ n ] ; <nl> + } <nl> + out_ranges [ 0 ] = 0 ; <nl> + } <nl> + <nl> + const ImWchar * ImFontAtlas : : GetGlyphRangesChineseSimplifiedCommon ( ) <nl> + { <nl> + / / Store 2500 regularly used characters for Simplified Chinese . <nl> + / / Sourced from https : / / zh . wiktionary . org / wiki / % E9 % 99 % 84 % E5 % BD % 95 : % E7 % 8E % B0 % E4 % BB % A3 % E6 % B1 % 89 % E8 % AF % AD % E5 % B8 % B8 % E7 % 94 % A8 % E5 % AD % 97 % E8 % A1 % A8 <nl> + / / This table covers 97 . 97 % of all characters used during the month in July , 1987 . <nl> + / / You can use ImFontAtlas : : GlyphRangesBuilder to create your own ranges derived from this , by merging existing ranges or adding new characters . <nl> + / / ( Stored as accumulative offsets from the initial unicode codepoint 0x4E00 . This encoding is designed to helps us compact the source code size . ) <nl> + static const short accumulative_offsets_from_0x4E00 [ ] = <nl> + { <nl> + 0 , 1 , 2 , 4 , 1 , 1 , 1 , 1 , 2 , 1 , 3 , 2 , 1 , 2 , 2 , 1 , 1 , 1 , 1 , 1 , 5 , 2 , 1 , 2 , 3 , 3 , 3 , 2 , 2 , 4 , 1 , 1 , 1 , 2 , 1 , 5 , 2 , 3 , 1 , 2 , 1 , 2 , 1 , 1 , 2 , 1 , 1 , 2 , 2 , 1 , 4 , 1 , 1 , 1 , 1 , 5 , 10 , 1 , 2 , 19 , 2 , 1 , 2 , 1 , 2 , 1 , 2 , 1 , 2 , <nl> + 1 , 5 , 1 , 6 , 3 , 2 , 1 , 2 , 2 , 1 , 1 , 1 , 4 , 8 , 5 , 1 , 1 , 4 , 1 , 1 , 3 , 1 , 2 , 1 , 5 , 1 , 2 , 1 , 1 , 1 , 10 , 1 , 1 , 5 , 2 , 4 , 6 , 1 , 4 , 2 , 2 , 2 , 12 , 2 , 1 , 1 , 6 , 1 , 1 , 1 , 4 , 1 , 1 , 4 , 6 , 5 , 1 , 4 , 2 , 2 , 4 , 10 , 7 , 1 , 1 , 4 , 2 , 4 , <nl> + 2 , 1 , 4 , 3 , 6 , 10 , 12 , 5 , 7 , 2 , 14 , 2 , 9 , 1 , 1 , 6 , 7 , 10 , 4 , 7 , 13 , 1 , 5 , 4 , 8 , 4 , 1 , 1 , 2 , 28 , 5 , 6 , 1 , 1 , 5 , 2 , 5 , 20 , 2 , 2 , 9 , 8 , 11 , 2 , 9 , 17 , 1 , 8 , 6 , 8 , 27 , 4 , 6 , 9 , 20 , 11 , 27 , 6 , 68 , 2 , 2 , 1 , 1 , <nl> + 1 , 2 , 1 , 2 , 2 , 7 , 6 , 11 , 3 , 3 , 1 , 1 , 3 , 1 , 2 , 1 , 1 , 1 , 1 , 1 , 3 , 1 , 1 , 8 , 3 , 4 , 1 , 5 , 7 , 2 , 1 , 4 , 4 , 8 , 4 , 2 , 1 , 2 , 1 , 1 , 4 , 5 , 6 , 3 , 6 , 2 , 12 , 3 , 1 , 3 , 9 , 2 , 4 , 3 , 4 , 1 , 5 , 3 , 3 , 1 , 3 , 7 , 1 , 5 , 1 , 1 , 1 , 1 , 2 , <nl> + 3 , 4 , 5 , 2 , 3 , 2 , 6 , 1 , 1 , 2 , 1 , 7 , 1 , 7 , 3 , 4 , 5 , 15 , 2 , 2 , 1 , 5 , 3 , 22 , 19 , 2 , 1 , 1 , 1 , 1 , 2 , 5 , 1 , 1 , 1 , 6 , 1 , 1 , 12 , 8 , 2 , 9 , 18 , 22 , 4 , 1 , 1 , 5 , 1 , 16 , 1 , 2 , 7 , 10 , 15 , 1 , 1 , 6 , 2 , 4 , 1 , 2 , 4 , 1 , 6 , <nl> + 1 , 1 , 3 , 2 , 4 , 1 , 6 , 4 , 5 , 1 , 2 , 1 , 1 , 2 , 1 , 10 , 3 , 1 , 3 , 2 , 1 , 9 , 3 , 2 , 5 , 7 , 2 , 19 , 4 , 3 , 6 , 1 , 1 , 1 , 1 , 1 , 4 , 3 , 2 , 1 , 1 , 1 , 2 , 5 , 3 , 1 , 1 , 1 , 2 , 2 , 1 , 1 , 2 , 1 , 1 , 2 , 1 , 3 , 1 , 1 , 1 , 3 , 7 , 1 , 4 , 1 , 1 , 2 , 1 , <nl> + 1 , 2 , 1 , 2 , 4 , 4 , 3 , 8 , 1 , 1 , 1 , 2 , 1 , 3 , 5 , 1 , 3 , 1 , 3 , 4 , 6 , 2 , 2 , 14 , 4 , 6 , 6 , 11 , 9 , 1 , 15 , 3 , 1 , 28 , 5 , 2 , 5 , 5 , 3 , 1 , 3 , 4 , 5 , 4 , 6 , 14 , 3 , 2 , 3 , 5 , 21 , 2 , 7 , 20 , 10 , 1 , 2 , 19 , 2 , 4 , 28 , 28 , 2 , 3 , <nl> + 2 , 1 , 14 , 4 , 1 , 26 , 28 , 42 , 12 , 40 , 3 , 52 , 79 , 5 , 14 , 17 , 3 , 2 , 2 , 11 , 3 , 4 , 6 , 3 , 1 , 8 , 2 , 23 , 4 , 5 , 8 , 10 , 4 , 2 , 7 , 3 , 5 , 1 , 1 , 6 , 3 , 1 , 2 , 2 , 2 , 5 , 28 , 1 , 1 , 7 , 7 , 20 , 5 , 3 , 29 , 3 , 17 , 26 , 1 , 8 , 4 , <nl> + 27 , 3 , 6 , 11 , 23 , 5 , 3 , 4 , 6 , 13 , 24 , 16 , 6 , 5 , 10 , 25 , 35 , 7 , 3 , 2 , 3 , 3 , 14 , 3 , 6 , 2 , 6 , 1 , 4 , 2 , 3 , 8 , 2 , 1 , 1 , 3 , 3 , 3 , 4 , 1 , 1 , 13 , 2 , 2 , 4 , 5 , 2 , 1 , 14 , 14 , 1 , 2 , 2 , 1 , 4 , 5 , 2 , 3 , 1 , 14 , 3 , 12 , <nl> + 3 , 17 , 2 , 16 , 5 , 1 , 2 , 1 , 8 , 9 , 3 , 19 , 4 , 2 , 2 , 4 , 17 , 25 , 21 , 20 , 28 , 75 , 1 , 10 , 29 , 103 , 4 , 1 , 2 , 1 , 1 , 4 , 2 , 4 , 1 , 2 , 3 , 24 , 2 , 2 , 2 , 1 , 1 , 2 , 1 , 3 , 8 , 1 , 1 , 1 , 2 , 1 , 1 , 3 , 1 , 1 , 1 , 6 , 1 , 5 , 3 , 1 , 1 , <nl> + 1 , 3 , 4 , 1 , 1 , 5 , 2 , 1 , 5 , 6 , 13 , 9 , 16 , 1 , 1 , 1 , 1 , 3 , 2 , 3 , 2 , 4 , 5 , 2 , 5 , 2 , 2 , 3 , 7 , 13 , 7 , 2 , 2 , 1 , 1 , 1 , 1 , 2 , 3 , 3 , 2 , 1 , 6 , 4 , 9 , 2 , 1 , 14 , 2 , 14 , 2 , 1 , 18 , 3 , 4 , 14 , 4 , 11 , 41 , 15 , 23 , 15 , 23 , <nl> + 176 , 1 , 3 , 4 , 1 , 1 , 1 , 1 , 5 , 3 , 1 , 2 , 3 , 7 , 3 , 1 , 1 , 2 , 1 , 2 , 4 , 4 , 6 , 2 , 4 , 1 , 9 , 7 , 1 , 10 , 5 , 8 , 16 , 29 , 1 , 1 , 2 , 2 , 3 , 1 , 3 , 5 , 2 , 4 , 5 , 4 , 1 , 1 , 2 , 2 , 3 , 3 , 7 , 1 , 6 , 10 , 1 , 17 , 1 , 44 , 4 , 6 , 2 , 1 , 1 , 6 , <nl> + 5 , 4 , 2 , 10 , 1 , 6 , 9 , 2 , 8 , 1 , 24 , 1 , 2 , 13 , 7 , 8 , 8 , 2 , 1 , 4 , 1 , 3 , 1 , 3 , 3 , 5 , 2 , 5 , 10 , 9 , 4 , 9 , 12 , 2 , 1 , 6 , 1 , 10 , 1 , 1 , 7 , 7 , 4 , 10 , 8 , 3 , 1 , 13 , 4 , 3 , 1 , 6 , 1 , 3 , 5 , 2 , 1 , 2 , 17 , 16 , 5 , 2 , 16 , 6 , <nl> + 1 , 4 , 2 , 1 , 3 , 3 , 6 , 8 , 5 , 11 , 11 , 1 , 3 , 3 , 2 , 4 , 6 , 10 , 9 , 5 , 7 , 4 , 7 , 4 , 7 , 1 , 1 , 4 , 2 , 1 , 3 , 6 , 8 , 7 , 1 , 6 , 11 , 5 , 5 , 3 , 24 , 9 , 4 , 2 , 7 , 13 , 5 , 1 , 8 , 82 , 16 , 61 , 1 , 1 , 1 , 4 , 2 , 2 , 16 , 10 , 3 , 8 , 1 , 1 , <nl> + 6 , 4 , 2 , 1 , 3 , 1 , 1 , 1 , 4 , 3 , 8 , 4 , 2 , 2 , 1 , 1 , 1 , 1 , 1 , 6 , 3 , 5 , 1 , 1 , 4 , 6 , 9 , 2 , 1 , 1 , 1 , 2 , 1 , 7 , 2 , 1 , 6 , 1 , 5 , 4 , 4 , 3 , 1 , 8 , 1 , 3 , 3 , 1 , 3 , 2 , 2 , 2 , 2 , 3 , 1 , 6 , 1 , 2 , 1 , 2 , 1 , 3 , 7 , 1 , 8 , 2 , 1 , 2 , 1 , 5 , <nl> + 2 , 5 , 3 , 5 , 10 , 1 , 2 , 1 , 1 , 3 , 2 , 5 , 11 , 3 , 9 , 3 , 5 , 1 , 1 , 5 , 9 , 1 , 2 , 1 , 5 , 7 , 9 , 9 , 8 , 1 , 3 , 3 , 3 , 6 , 8 , 2 , 3 , 2 , 1 , 1 , 32 , 6 , 1 , 2 , 15 , 9 , 3 , 7 , 13 , 1 , 3 , 10 , 13 , 2 , 14 , 1 , 13 , 10 , 2 , 1 , 3 , 10 , 4 , 15 , <nl> + 2 , 15 , 15 , 10 , 1 , 3 , 9 , 6 , 9 , 32 , 25 , 26 , 47 , 7 , 3 , 2 , 3 , 1 , 6 , 3 , 4 , 3 , 2 , 8 , 5 , 4 , 1 , 9 , 4 , 2 , 2 , 19 , 10 , 6 , 2 , 3 , 8 , 1 , 2 , 2 , 4 , 2 , 1 , 9 , 4 , 4 , 4 , 6 , 4 , 8 , 9 , 2 , 3 , 1 , 1 , 1 , 1 , 3 , 5 , 5 , 1 , 3 , 8 , 4 , 6 , <nl> + 2 , 1 , 4 , 12 , 1 , 5 , 3 , 7 , 13 , 2 , 5 , 8 , 1 , 6 , 1 , 2 , 5 , 14 , 6 , 1 , 5 , 2 , 4 , 8 , 15 , 5 , 1 , 23 , 6 , 62 , 2 , 10 , 1 , 1 , 8 , 1 , 2 , 2 , 10 , 4 , 2 , 2 , 9 , 2 , 1 , 1 , 3 , 2 , 3 , 1 , 5 , 3 , 3 , 2 , 1 , 3 , 8 , 1 , 1 , 1 , 11 , 3 , 1 , 1 , 4 , <nl> + 3 , 7 , 1 , 14 , 1 , 2 , 3 , 12 , 5 , 2 , 5 , 1 , 6 , 7 , 5 , 7 , 14 , 11 , 1 , 3 , 1 , 8 , 9 , 12 , 2 , 1 , 11 , 8 , 4 , 4 , 2 , 6 , 10 , 9 , 13 , 1 , 1 , 3 , 1 , 5 , 1 , 3 , 2 , 4 , 4 , 1 , 18 , 2 , 3 , 14 , 11 , 4 , 29 , 4 , 2 , 7 , 1 , 3 , 13 , 9 , 2 , 2 , 5 , <nl> + 3 , 5 , 20 , 7 , 16 , 8 , 5 , 72 , 34 , 6 , 4 , 22 , 12 , 12 , 28 , 45 , 36 , 9 , 7 , 39 , 9 , 191 , 1 , 1 , 1 , 4 , 11 , 8 , 4 , 9 , 2 , 3 , 22 , 1 , 1 , 1 , 1 , 4 , 17 , 1 , 7 , 7 , 1 , 11 , 31 , 10 , 2 , 4 , 8 , 2 , 3 , 2 , 1 , 4 , 2 , 16 , 4 , 32 , 2 , <nl> + 3 , 19 , 13 , 4 , 9 , 1 , 5 , 2 , 14 , 8 , 1 , 1 , 3 , 6 , 19 , 6 , 5 , 1 , 16 , 6 , 2 , 10 , 8 , 5 , 1 , 2 , 3 , 1 , 5 , 5 , 1 , 11 , 6 , 6 , 1 , 3 , 3 , 2 , 6 , 3 , 8 , 1 , 1 , 4 , 10 , 7 , 5 , 7 , 7 , 5 , 8 , 9 , 2 , 1 , 3 , 4 , 1 , 1 , 3 , 1 , 3 , 3 , 2 , 6 , 16 , <nl> + 1 , 4 , 6 , 3 , 1 , 10 , 6 , 1 , 3 , 15 , 2 , 9 , 2 , 10 , 25 , 13 , 9 , 16 , 6 , 2 , 2 , 10 , 11 , 4 , 3 , 9 , 1 , 2 , 6 , 6 , 5 , 4 , 30 , 40 , 1 , 10 , 7 , 12 , 14 , 33 , 6 , 3 , 6 , 7 , 3 , 1 , 3 , 1 , 11 , 14 , 4 , 9 , 5 , 12 , 11 , 49 , 18 , 51 , 31 , <nl> + 140 , 31 , 2 , 2 , 1 , 5 , 1 , 8 , 1 , 10 , 1 , 4 , 4 , 3 , 24 , 1 , 10 , 1 , 3 , 6 , 6 , 16 , 3 , 4 , 5 , 2 , 1 , 4 , 2 , 57 , 10 , 6 , 22 , 2 , 22 , 3 , 7 , 22 , 6 , 10 , 11 , 36 , 18 , 16 , 33 , 36 , 2 , 5 , 5 , 1 , 1 , 1 , 4 , 10 , 1 , 4 , 13 , 2 , 7 , <nl> + 5 , 2 , 9 , 3 , 4 , 1 , 7 , 43 , 3 , 7 , 3 , 9 , 14 , 7 , 9 , 1 , 11 , 1 , 1 , 3 , 7 , 4 , 18 , 13 , 1 , 14 , 1 , 3 , 6 , 10 , 73 , 2 , 2 , 30 , 6 , 1 , 11 , 18 , 19 , 13 , 22 , 3 , 46 , 42 , 37 , 89 , 7 , 3 , 16 , 34 , 2 , 2 , 3 , 9 , 1 , 7 , 1 , 1 , 1 , 2 , <nl> + 2 , 4 , 10 , 7 , 3 , 10 , 3 , 9 , 5 , 28 , 9 , 2 , 6 , 13 , 7 , 3 , 1 , 3 , 10 , 2 , 7 , 2 , 11 , 3 , 6 , 21 , 54 , 85 , 2 , 1 , 4 , 2 , 2 , 1 , 39 , 3 , 21 , 2 , 2 , 5 , 1 , 1 , 1 , 4 , 1 , 1 , 3 , 4 , 15 , 1 , 3 , 2 , 4 , 4 , 2 , 3 , 8 , 2 , 20 , 1 , 8 , 7 , 13 , <nl> + 4 , 1 , 26 , 6 , 2 , 9 , 34 , 4 , 21 , 52 , 10 , 4 , 4 , 1 , 5 , 12 , 2 , 11 , 1 , 7 , 2 , 30 , 12 , 44 , 2 , 30 , 1 , 1 , 3 , 6 , 16 , 9 , 17 , 39 , 82 , 2 , 2 , 24 , 7 , 1 , 7 , 3 , 16 , 9 , 14 , 44 , 2 , 1 , 2 , 1 , 2 , 3 , 5 , 2 , 4 , 1 , 6 , 7 , 5 , 3 , <nl> + 2 , 6 , 1 , 11 , 5 , 11 , 2 , 1 , 18 , 19 , 8 , 1 , 3 , 24 , 29 , 2 , 1 , 3 , 5 , 2 , 2 , 1 , 13 , 6 , 5 , 1 , 46 , 11 , 3 , 5 , 1 , 1 , 5 , 8 , 2 , 10 , 6 , 12 , 6 , 3 , 7 , 11 , 2 , 4 , 16 , 13 , 2 , 5 , 1 , 1 , 2 , 2 , 5 , 2 , 28 , 5 , 2 , 23 , 10 , 8 , 4 , <nl> + 4 , 22 , 39 , 95 , 38 , 8 , 14 , 9 , 5 , 1 , 13 , 5 , 4 , 3 , 13 , 12 , 11 , 1 , 9 , 1 , 27 , 37 , 2 , 5 , 4 , 4 , 63 , 211 , 95 , 2 , 2 , 2 , 1 , 3 , 5 , 2 , 1 , 1 , 2 , 2 , 1 , 1 , 1 , 3 , 2 , 4 , 1 , 2 , 1 , 1 , 5 , 2 , 2 , 1 , 1 , 2 , 3 , 1 , 3 , 1 , 1 , 1 , <nl> + 3 , 1 , 4 , 2 , 1 , 3 , 6 , 1 , 1 , 3 , 7 , 15 , 5 , 3 , 2 , 5 , 3 , 9 , 11 , 4 , 2 , 22 , 1 , 6 , 3 , 8 , 7 , 1 , 4 , 28 , 4 , 16 , 3 , 3 , 25 , 4 , 4 , 27 , 27 , 1 , 4 , 1 , 2 , 2 , 7 , 1 , 3 , 5 , 2 , 28 , 8 , 2 , 14 , 1 , 8 , 6 , 16 , 25 , 3 , 3 , 3 , 14 , 3 , <nl> + 3 , 1 , 1 , 2 , 1 , 4 , 6 , 3 , 8 , 4 , 1 , 1 , 1 , 2 , 3 , 6 , 10 , 6 , 2 , 3 , 18 , 3 , 2 , 5 , 5 , 4 , 3 , 1 , 5 , 2 , 5 , 4 , 23 , 7 , 6 , 12 , 6 , 4 , 17 , 11 , 9 , 5 , 1 , 1 , 10 , 5 , 12 , 1 , 1 , 11 , 26 , 33 , 7 , 3 , 6 , 1 , 17 , 7 , 1 , 5 , 12 , 1 , 11 , <nl> + 2 , 4 , 1 , 8 , 14 , 17 , 23 , 1 , 2 , 1 , 7 , 8 , 16 , 11 , 9 , 6 , 5 , 2 , 6 , 4 , 16 , 2 , 8 , 14 , 1 , 11 , 8 , 9 , 1 , 1 , 1 , 9 , 25 , 4 , 11 , 19 , 7 , 2 , 15 , 2 , 12 , 8 , 52 , 7 , 5 , 19 , 2 , 16 , 4 , 36 , 8 , 1 , 16 , 8 , 24 , 26 , 4 , 6 , 2 , 9 , <nl> + 5 , 4 , 36 , 3 , 28 , 12 , 25 , 15 , 37 , 27 , 17 , 12 , 59 , 38 , 5 , 32 , 127 , 1 , 2 , 9 , 17 , 14 , 4 , 1 , 2 , 1 , 1 , 8 , 11 , 50 , 4 , 14 , 2 , 19 , 16 , 4 , 17 , 5 , 4 , 5 , 26 , 12 , 45 , 2 , 23 , 45 , 104 , 30 , 12 , 8 , 3 , 10 , 2 , 2 , <nl> + 3 , 3 , 1 , 4 , 20 , 7 , 2 , 9 , 6 , 15 , 2 , 20 , 1 , 3 , 16 , 4 , 11 , 15 , 6 , 134 , 2 , 5 , 59 , 1 , 2 , 2 , 2 , 1 , 9 , 17 , 3 , 26 , 137 , 10 , 211 , 59 , 1 , 2 , 4 , 1 , 4 , 1 , 1 , 1 , 2 , 6 , 2 , 3 , 1 , 1 , 2 , 3 , 2 , 3 , 1 , 3 , 4 , 4 , 2 , 3 , 3 , <nl> + 1 , 4 , 3 , 1 , 7 , 2 , 2 , 3 , 1 , 2 , 1 , 3 , 3 , 3 , 2 , 2 , 3 , 2 , 1 , 3 , 14 , 6 , 1 , 3 , 2 , 9 , 6 , 15 , 27 , 9 , 34 , 145 , 1 , 1 , 2 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 1 , 2 , 2 , 2 , 3 , 1 , 2 , 1 , 1 , 1 , 2 , 3 , 5 , 8 , 3 , 5 , 2 , 4 , 1 , 3 , 2 , 2 , 2 , 12 , <nl> + 4 , 1 , 1 , 1 , 10 , 4 , 5 , 1 , 20 , 4 , 16 , 1 , 15 , 9 , 5 , 12 , 2 , 9 , 2 , 5 , 4 , 2 , 26 , 19 , 7 , 1 , 26 , 4 , 30 , 12 , 15 , 42 , 1 , 6 , 8 , 172 , 1 , 1 , 4 , 2 , 1 , 1 , 11 , 2 , 2 , 4 , 2 , 1 , 2 , 1 , 10 , 8 , 1 , 2 , 1 , 4 , 5 , 1 , 2 , 5 , 1 , 8 , <nl> + 4 , 1 , 3 , 4 , 2 , 1 , 6 , 2 , 1 , 3 , 4 , 1 , 2 , 1 , 1 , 1 , 1 , 12 , 5 , 7 , 2 , 4 , 3 , 1 , 1 , 1 , 3 , 3 , 6 , 1 , 2 , 2 , 3 , 3 , 3 , 2 , 1 , 2 , 12 , 14 , 11 , 6 , 6 , 4 , 12 , 2 , 8 , 1 , 7 , 10 , 1 , 35 , 7 , 4 , 13 , 15 , 4 , 3 , 23 , 21 , 28 , 52 , 5 , <nl> + 26 , 5 , 6 , 1 , 7 , 10 , 2 , 7 , 53 , 3 , 2 , 1 , 1 , 1 , 2 , 163 , 532 , 1 , 10 , 11 , 1 , 3 , 3 , 4 , 8 , 2 , 8 , 6 , 2 , 2 , 23 , 22 , 4 , 2 , 2 , 4 , 2 , 1 , 3 , 1 , 3 , 3 , 5 , 9 , 8 , 2 , 1 , 2 , 8 , 1 , 10 , 2 , 12 , 21 , 20 , 15 , 105 , 2 , 3 , 1 , 1 , <nl> + 3 , 2 , 3 , 1 , 1 , 2 , 5 , 1 , 4 , 15 , 11 , 19 , 1 , 1 , 1 , 1 , 5 , 4 , 5 , 1 , 1 , 2 , 5 , 3 , 5 , 12 , 1 , 2 , 5 , 1 , 11 , 1 , 1 , 15 , 9 , 1 , 4 , 5 , 3 , 26 , 8 , 2 , 1 , 3 , 1 , 1 , 15 , 19 , 2 , 12 , 1 , 2 , 5 , 2 , 7 , 2 , 19 , 2 , 20 , 6 , 26 , 7 , 5 , <nl> + 2 , 2 , 7 , 34 , 21 , 13 , 70 , 2 , 128 , 1 , 1 , 2 , 1 , 1 , 2 , 1 , 1 , 3 , 2 , 2 , 2 , 15 , 1 , 4 , 1 , 3 , 4 , 42 , 10 , 6 , 1 , 49 , 85 , 8 , 1 , 2 , 1 , 1 , 4 , 4 , 2 , 3 , 6 , 1 , 5 , 7 , 4 , 3 , 211 , 4 , 1 , 2 , 1 , 2 , 5 , 1 , 2 , 4 , 2 , 2 , 6 , 5 , 6 , <nl> + 10 , 3 , 4 , 48 , 100 , 6 , 2 , 16 , 296 , 5 , 27 , 387 , 2 , 2 , 3 , 7 , 16 , 8 , 5 , 38 , 15 , 39 , 21 , 9 , 10 , 3 , 7 , 59 , 13 , 27 , 21 , 47 , 5 , 21 , 6 <nl> + } ; <nl> + static ImWchar base_ranges [ ] = / / not zero - terminated <nl> + { <nl> + 0x0020 , 0x00FF , / / Basic Latin + Latin Supplement <nl> + 0x3000 , 0x30FF , / / Punctuations , Hiragana , Katakana <nl> + 0x31F0 , 0x31FF , / / Katakana Phonetic Extensions <nl> + 0xFF00 , 0xFFEF , / / Half - width characters <nl> + } ; <nl> + static ImWchar full_ranges [ IM_ARRAYSIZE ( base_ranges ) + IM_ARRAYSIZE ( accumulative_offsets_from_0x4E00 ) * 2 + 1 ] = { 0 } ; <nl> + if ( ! full_ranges [ 0 ] ) <nl> + { <nl> + memcpy ( full_ranges , base_ranges , sizeof ( base_ranges ) ) ; <nl> + UnpackAccumulativeOffsetsIntoRanges ( 0x4E00 , accumulative_offsets_from_0x4E00 , IM_ARRAYSIZE ( accumulative_offsets_from_0x4E00 ) , full_ranges + IM_ARRAYSIZE ( base_ranges ) ) ; <nl> + } <nl> + return & full_ranges [ 0 ] ; <nl> + } <nl> + <nl> const ImWchar * ImFontAtlas : : GetGlyphRangesJapanese ( ) <nl> { <nl> - / / Store the 1946 ideograms code points as successive offsets from the initial unicode codepoint 0x4E00 . Each offset has an implicit + 1 . <nl> - / / This encoding is designed to helps us reduce the source code size . <nl> - / / FIXME : Source a list of the revised 2136 joyo kanji list from 2010 and rebuild this . <nl> - / / The current list was sourced from http : / / theinstructionlimit . com / author / renaudbedardrenaudbedard / page / 3 <nl> - / / Note that you may use ImFontAtlas : : GlyphRangesBuilder to create your own ranges , by merging existing ranges or adding new characters . <nl> - static const short offsets_from_0x4E00 [ ] = <nl> - { <nl> - - 1 , 0 , 1 , 3 , 0 , 0 , 0 , 0 , 1 , 0 , 5 , 1 , 1 , 0 , 7 , 4 , 6 , 10 , 0 , 1 , 9 , 9 , 7 , 1 , 3 , 19 , 1 , 10 , 7 , 1 , 0 , 1 , 0 , 5 , 1 , 0 , 6 , 4 , 2 , 6 , 0 , 0 , 12 , 6 , 8 , 0 , 3 , 5 , 0 , 1 , 0 , 9 , 0 , 0 , 8 , 1 , 1 , 3 , 4 , 5 , 13 , 0 , 0 , 8 , 2 , 17 , <nl> - 4 , 3 , 1 , 1 , 9 , 6 , 0 , 0 , 0 , 2 , 1 , 3 , 2 , 22 , 1 , 9 , 11 , 1 , 13 , 1 , 3 , 12 , 0 , 5 , 9 , 2 , 0 , 6 , 12 , 5 , 3 , 12 , 4 , 1 , 2 , 16 , 1 , 1 , 4 , 6 , 5 , 3 , 0 , 6 , 13 , 15 , 5 , 12 , 8 , 14 , 0 , 0 , 6 , 15 , 3 , 6 , 0 , 18 , 8 , 1 , 6 , 14 , 1 , <nl> - 5 , 4 , 12 , 24 , 3 , 13 , 12 , 10 , 24 , 0 , 0 , 0 , 1 , 0 , 1 , 1 , 2 , 9 , 10 , 2 , 2 , 0 , 0 , 3 , 3 , 1 , 0 , 3 , 8 , 0 , 3 , 2 , 4 , 4 , 1 , 6 , 11 , 10 , 14 , 6 , 15 , 3 , 4 , 15 , 1 , 0 , 0 , 5 , 2 , 2 , 0 , 0 , 1 , 6 , 5 , 5 , 6 , 0 , 3 , 6 , 5 , 0 , 0 , 1 , 0 , <nl> - 11 , 2 , 2 , 8 , 4 , 7 , 0 , 10 , 0 , 1 , 2 , 17 , 19 , 3 , 0 , 2 , 5 , 0 , 6 , 2 , 4 , 4 , 6 , 1 , 1 , 11 , 2 , 0 , 3 , 1 , 2 , 1 , 2 , 10 , 7 , 6 , 3 , 16 , 0 , 8 , 24 , 0 , 0 , 3 , 1 , 1 , 3 , 0 , 1 , 6 , 0 , 0 , 0 , 2 , 0 , 1 , 5 , 15 , 0 , 1 , 0 , 0 , 2 , 11 , 19 , <nl> - 1 , 4 , 19 , 7 , 6 , 5 , 1 , 0 , 0 , 0 , 0 , 5 , 1 , 0 , 1 , 9 , 0 , 0 , 5 , 0 , 2 , 0 , 1 , 0 , 3 , 0 , 11 , 3 , 0 , 2 , 0 , 0 , 0 , 0 , 0 , 9 , 3 , 6 , 4 , 12 , 0 , 14 , 0 , 0 , 29 , 10 , 8 , 0 , 14 , 37 , 13 , 0 , 31 , 16 , 19 , 0 , 8 , 30 , 1 , 20 , 8 , 3 , 48 , <nl> - 21 , 1 , 0 , 12 , 0 , 10 , 44 , 34 , 42 , 54 , 11 , 18 , 82 , 0 , 2 , 1 , 2 , 12 , 1 , 0 , 6 , 2 , 17 , 2 , 12 , 7 , 0 , 7 , 17 , 4 , 2 , 6 , 24 , 23 , 8 , 23 , 39 , 2 , 16 , 23 , 1 , 0 , 5 , 1 , 2 , 15 , 14 , 5 , 6 , 2 , 11 , 0 , 8 , 6 , 2 , 2 , 2 , 14 , <nl> - 20 , 4 , 15 , 3 , 4 , 11 , 10 , 10 , 2 , 5 , 2 , 1 , 30 , 2 , 1 , 0 , 0 , 22 , 5 , 5 , 0 , 3 , 1 , 5 , 4 , 1 , 0 , 0 , 2 , 2 , 21 , 1 , 5 , 1 , 2 , 16 , 2 , 1 , 3 , 4 , 0 , 8 , 4 , 0 , 0 , 5 , 14 , 11 , 2 , 16 , 1 , 13 , 1 , 7 , 0 , 22 , 15 , 3 , 1 , 22 , 7 , 14 , <nl> - 22 , 19 , 11 , 24 , 18 , 46 , 10 , 20 , 64 , 45 , 3 , 2 , 0 , 4 , 5 , 0 , 1 , 4 , 25 , 1 , 0 , 0 , 2 , 10 , 0 , 0 , 0 , 1 , 0 , 1 , 2 , 0 , 0 , 9 , 1 , 2 , 0 , 0 , 0 , 2 , 5 , 2 , 1 , 1 , 5 , 5 , 8 , 1 , 1 , 1 , 5 , 1 , 4 , 9 , 1 , 3 , 0 , 1 , 0 , 1 , 1 , 2 , 0 , 0 , <nl> - 2 , 0 , 1 , 8 , 22 , 8 , 1 , 0 , 0 , 0 , 0 , 4 , 2 , 1 , 0 , 9 , 8 , 5 , 0 , 9 , 1 , 30 , 24 , 2 , 6 , 4 , 39 , 0 , 14 , 5 , 16 , 6 , 26 , 179 , 0 , 2 , 1 , 1 , 0 , 0 , 0 , 5 , 2 , 9 , 6 , 0 , 2 , 5 , 16 , 7 , 5 , 1 , 1 , 0 , 2 , 4 , 4 , 7 , 15 , 13 , 14 , 0 , 0 , <nl> - 3 , 0 , 1 , 0 , 0 , 0 , 2 , 1 , 6 , 4 , 5 , 1 , 4 , 9 , 0 , 3 , 1 , 8 , 0 , 0 , 10 , 5 , 0 , 43 , 0 , 2 , 6 , 8 , 4 , 0 , 2 , 0 , 0 , 9 , 6 , 0 , 9 , 3 , 1 , 6 , 20 , 14 , 6 , 1 , 4 , 0 , 7 , 2 , 3 , 0 , 2 , 0 , 5 , 0 , 3 , 1 , 0 , 3 , 9 , 7 , 0 , 3 , 4 , 0 , 4 , 9 , 1 , 6 , 0 , <nl> - 9 , 0 , 0 , 2 , 3 , 10 , 9 , 28 , 3 , 6 , 2 , 4 , 1 , 2 , 32 , 4 , 1 , 18 , 2 , 0 , 3 , 1 , 5 , 30 , 10 , 0 , 2 , 2 , 2 , 0 , 7 , 9 , 8 , 11 , 10 , 11 , 7 , 2 , 13 , 7 , 5 , 10 , 0 , 3 , 40 , 2 , 0 , 1 , 6 , 12 , 0 , 4 , 5 , 1 , 5 , 11 , 11 , 21 , 4 , 8 , 3 , 7 , <nl> - 8 , 8 , 33 , 5 , 23 , 0 , 0 , 19 , 8 , 8 , 2 , 3 , 0 , 6 , 1 , 1 , 1 , 5 , 1 , 27 , 4 , 2 , 5 , 0 , 3 , 5 , 6 , 3 , 1 , 0 , 3 , 1 , 12 , 5 , 3 , 3 , 2 , 0 , 7 , 7 , 2 , 1 , 0 , 4 , 0 , 1 , 1 , 2 , 0 , 10 , 10 , 6 , 2 , 5 , 9 , 7 , 5 , 15 , 15 , 21 , 6 , 11 , 5 , 20 , <nl> - 4 , 3 , 5 , 5 , 2 , 5 , 0 , 2 , 1 , 0 , 1 , 7 , 28 , 0 , 9 , 0 , 5 , 12 , 5 , 5 , 18 , 30 , 0 , 12 , 3 , 3 , 21 , 16 , 25 , 32 , 9 , 3 , 14 , 11 , 24 , 5 , 66 , 9 , 1 , 2 , 0 , 5 , 9 , 1 , 5 , 1 , 8 , 0 , 8 , 3 , 3 , 0 , 1 , 15 , 1 , 4 , 8 , 1 , 2 , 7 , 0 , 7 , 2 , <nl> - 8 , 3 , 7 , 5 , 3 , 7 , 10 , 2 , 1 , 0 , 0 , 2 , 25 , 0 , 6 , 4 , 0 , 10 , 0 , 4 , 2 , 4 , 1 , 12 , 5 , 38 , 4 , 0 , 4 , 1 , 10 , 5 , 9 , 4 , 0 , 14 , 4 , 2 , 5 , 18 , 20 , 21 , 1 , 3 , 0 , 5 , 0 , 7 , 0 , 3 , 7 , 1 , 3 , 1 , 1 , 8 , 1 , 0 , 0 , 0 , 3 , 2 , 5 , 2 , 11 , <nl> - 6 , 0 , 13 , 1 , 3 , 9 , 1 , 12 , 0 , 16 , 6 , 2 , 1 , 0 , 2 , 1 , 12 , 6 , 13 , 11 , 2 , 0 , 28 , 1 , 7 , 8 , 14 , 13 , 8 , 13 , 0 , 2 , 0 , 5 , 4 , 8 , 10 , 2 , 37 , 42 , 19 , 6 , 6 , 7 , 4 , 14 , 11 , 18 , 14 , 80 , 7 , 6 , 0 , 4 , 72 , 12 , 36 , 27 , <nl> - 7 , 7 , 0 , 14 , 17 , 19 , 164 , 27 , 0 , 5 , 10 , 7 , 3 , 13 , 6 , 14 , 0 , 2 , 2 , 5 , 3 , 0 , 6 , 13 , 0 , 0 , 10 , 29 , 0 , 4 , 0 , 3 , 13 , 0 , 3 , 1 , 6 , 51 , 1 , 5 , 28 , 2 , 0 , 8 , 0 , 20 , 2 , 4 , 0 , 25 , 2 , 10 , 13 , 10 , 0 , 16 , 4 , 0 , 1 , 0 , <nl> - 2 , 1 , 7 , 0 , 1 , 8 , 11 , 0 , 0 , 1 , 2 , 7 , 2 , 23 , 11 , 6 , 6 , 4 , 16 , 2 , 2 , 2 , 0 , 22 , 9 , 3 , 3 , 5 , 2 , 0 , 15 , 16 , 21 , 2 , 9 , 20 , 15 , 15 , 5 , 3 , 9 , 1 , 0 , 0 , 1 , 7 , 7 , 5 , 4 , 2 , 2 , 2 , 38 , 24 , 14 , 0 , 0 , 15 , 5 , 6 , 24 , 14 , <nl> - 5 , 5 , 11 , 0 , 21 , 12 , 0 , 3 , 8 , 4 , 11 , 1 , 8 , 0 , 11 , 27 , 7 , 2 , 4 , 9 , 21 , 59 , 0 , 1 , 39 , 3 , 60 , 62 , 3 , 0 , 12 , 11 , 0 , 3 , 30 , 11 , 0 , 13 , 88 , 4 , 15 , 5 , 28 , 13 , 1 , 4 , 48 , 17 , 17 , 4 , 28 , 32 , 46 , 0 , 16 , 0 , <nl> - 18 , 11 , 1 , 8 , 6 , 38 , 11 , 2 , 6 , 11 , 38 , 2 , 0 , 45 , 3 , 11 , 2 , 7 , 8 , 4 , 30 , 14 , 17 , 2 , 1 , 1 , 65 , 18 , 12 , 16 , 4 , 2 , 45 , 123 , 12 , 56 , 33 , 1 , 4 , 3 , 4 , 7 , 0 , 0 , 0 , 3 , 2 , 0 , 16 , 4 , 2 , 4 , 2 , 0 , 7 , 4 , 5 , 2 , 26 , <nl> - 2 , 25 , 6 , 11 , 6 , 1 , 16 , 2 , 6 , 17 , 77 , 15 , 3 , 35 , 0 , 1 , 0 , 5 , 1 , 0 , 38 , 16 , 6 , 3 , 12 , 3 , 3 , 3 , 0 , 9 , 3 , 1 , 3 , 5 , 2 , 9 , 0 , 18 , 0 , 25 , 1 , 3 , 32 , 1 , 72 , 46 , 6 , 2 , 7 , 1 , 3 , 14 , 17 , 0 , 28 , 1 , 40 , 13 , 0 , 20 , <nl> - 15 , 40 , 6 , 38 , 24 , 12 , 43 , 1 , 1 , 9 , 0 , 12 , 6 , 0 , 6 , 2 , 4 , 19 , 3 , 7 , 1 , 48 , 0 , 9 , 5 , 0 , 5 , 6 , 9 , 6 , 10 , 15 , 2 , 11 , 19 , 3 , 9 , 2 , 0 , 1 , 10 , 1 , 27 , 8 , 1 , 3 , 6 , 1 , 14 , 0 , 26 , 0 , 27 , 16 , 3 , 4 , 9 , 6 , 2 , 23 , <nl> - 9 , 10 , 5 , 25 , 2 , 1 , 6 , 1 , 1 , 48 , 15 , 9 , 15 , 14 , 3 , 4 , 26 , 60 , 29 , 13 , 37 , 21 , 1 , 6 , 4 , 0 , 2 , 11 , 22 , 23 , 16 , 16 , 2 , 2 , 1 , 3 , 0 , 5 , 1 , 6 , 4 , 0 , 0 , 4 , 0 , 0 , 8 , 3 , 0 , 2 , 5 , 0 , 7 , 1 , 7 , 3 , 13 , 2 , 4 , 10 , <nl> - 3 , 0 , 2 , 31 , 0 , 18 , 3 , 0 , 12 , 10 , 4 , 1 , 0 , 7 , 5 , 7 , 0 , 5 , 4 , 12 , 2 , 22 , 10 , 4 , 2 , 15 , 2 , 8 , 9 , 0 , 23 , 2 , 197 , 51 , 3 , 1 , 1 , 4 , 13 , 4 , 3 , 21 , 4 , 19 , 3 , 10 , 5 , 40 , 0 , 4 , 1 , 1 , 10 , 4 , 1 , 27 , 34 , 7 , 21 , <nl> - 2 , 17 , 2 , 9 , 6 , 4 , 2 , 3 , 0 , 4 , 2 , 7 , 8 , 2 , 5 , 1 , 15 , 21 , 3 , 4 , 4 , 2 , 2 , 17 , 22 , 1 , 5 , 22 , 4 , 26 , 7 , 0 , 32 , 1 , 11 , 42 , 15 , 4 , 1 , 2 , 5 , 0 , 19 , 3 , 1 , 8 , 6 , 0 , 10 , 1 , 9 , 2 , 13 , 30 , 8 , 2 , 24 , 17 , 19 , 1 , 4 , <nl> - 4 , 25 , 13 , 0 , 10 , 16 , 11 , 39 , 18 , 8 , 5 , 30 , 82 , 1 , 6 , 8 , 18 , 77 , 11 , 13 , 20 , 75 , 11 , 112 , 78 , 33 , 3 , 0 , 0 , 60 , 17 , 84 , 9 , 1 , 1 , 12 , 30 , 10 , 49 , 5 , 32 , 158 , 178 , 5 , 5 , 6 , 3 , 3 , 1 , 3 , 1 , 4 , 7 , 6 , <nl> - 19 , 31 , 21 , 0 , 2 , 9 , 5 , 6 , 27 , 4 , 9 , 8 , 1 , 76 , 18 , 12 , 1 , 4 , 0 , 3 , 3 , 6 , 3 , 12 , 2 , 8 , 30 , 16 , 2 , 25 , 1 , 5 , 5 , 4 , 3 , 0 , 6 , 10 , 2 , 3 , 1 , 0 , 5 , 1 , 19 , 3 , 0 , 8 , 1 , 5 , 2 , 6 , 0 , 0 , 0 , 19 , 1 , 2 , 0 , 5 , 1 , 2 , 5 , <nl> - 1 , 3 , 7 , 0 , 4 , 12 , 7 , 3 , 10 , 22 , 0 , 9 , 5 , 1 , 0 , 2 , 20 , 1 , 1 , 3 , 23 , 30 , 3 , 9 , 9 , 1 , 4 , 191 , 14 , 3 , 15 , 6 , 8 , 50 , 0 , 1 , 0 , 0 , 4 , 0 , 0 , 1 , 0 , 2 , 4 , 2 , 0 , 2 , 3 , 0 , 2 , 0 , 2 , 2 , 8 , 7 , 0 , 1 , 1 , 1 , 3 , 3 , 17 , 11 , <nl> - 91 , 1 , 9 , 3 , 2 , 13 , 4 , 24 , 15 , 41 , 3 , 13 , 3 , 1 , 20 , 4 , 125 , 29 , 30 , 1 , 0 , 4 , 12 , 2 , 21 , 4 , 5 , 5 , 19 , 11 , 0 , 13 , 11 , 86 , 2 , 18 , 0 , 7 , 1 , 8 , 8 , 2 , 2 , 22 , 1 , 2 , 6 , 5 , 2 , 0 , 1 , 2 , 8 , 0 , 2 , 0 , 5 , 2 , 1 , 0 , <nl> - 2 , 10 , 2 , 0 , 5 , 9 , 2 , 1 , 2 , 0 , 1 , 0 , 4 , 0 , 0 , 10 , 2 , 5 , 3 , 0 , 6 , 1 , 0 , 1 , 4 , 4 , 33 , 3 , 13 , 17 , 3 , 18 , 6 , 4 , 7 , 1 , 5 , 78 , 0 , 4 , 1 , 13 , 7 , 1 , 8 , 1 , 0 , 35 , 27 , 15 , 3 , 0 , 0 , 0 , 1 , 11 , 5 , 41 , 38 , 15 , 22 , 6 , <nl> - 14 , 14 , 2 , 1 , 11 , 6 , 20 , 63 , 5 , 8 , 27 , 7 , 11 , 2 , 2 , 40 , 58 , 23 , 50 , 54 , 56 , 293 , 8 , 8 , 1 , 5 , 1 , 14 , 0 , 1 , 12 , 37 , 89 , 8 , 8 , 8 , 2 , 10 , 6 , 0 , 0 , 0 , 4 , 5 , 2 , 1 , 0 , 1 , 1 , 2 , 7 , 0 , 3 , 3 , 0 , 4 , 6 , 0 , 3 , 2 , <nl> - 19 , 3 , 8 , 0 , 0 , 0 , 4 , 4 , 16 , 0 , 4 , 1 , 5 , 1 , 3 , 0 , 3 , 4 , 6 , 2 , 17 , 10 , 10 , 31 , 6 , 4 , 3 , 6 , 10 , 126 , 7 , 3 , 2 , 2 , 0 , 9 , 0 , 0 , 5 , 20 , 13 , 0 , 15 , 0 , 6 , 0 , 2 , 5 , 8 , 64 , 50 , 3 , 2 , 12 , 2 , 9 , 0 , 0 , 11 , 8 , 20 , <nl> - 109 , 2 , 18 , 23 , 0 , 0 , 9 , 61 , 3 , 0 , 28 , 41 , 77 , 27 , 19 , 17 , 81 , 5 , 2 , 14 , 5 , 83 , 57 , 252 , 14 , 154 , 263 , 14 , 20 , 8 , 13 , 6 , 57 , 39 , 38 , <nl> + / / 1946 common ideograms code points for Japanese <nl> + / / Sourced from http : / / theinstructionlimit . com / common - kanji - character - ranges - for - xna - spritefont - rendering <nl> + / / FIXME : Source a list of the revised 2136 Joyo Kanji list from 2010 and rebuild this . <nl> + / / You can use ImFontAtlas : : GlyphRangesBuilder to create your own ranges derived from this , by merging existing ranges or adding new characters . <nl> + / / ( Stored as accumulative offsets from the initial unicode codepoint 0x4E00 . This encoding is designed to helps us compact the source code size . ) <nl> + static const short accumulative_offsets_from_0x4E00 [ ] = <nl> + { <nl> + 0 , 1 , 2 , 4 , 1 , 1 , 1 , 1 , 2 , 1 , 6 , 2 , 2 , 1 , 8 , 5 , 7 , 11 , 1 , 2 , 10 , 10 , 8 , 2 , 4 , 20 , 2 , 11 , 8 , 2 , 1 , 2 , 1 , 6 , 2 , 1 , 7 , 5 , 3 , 7 , 1 , 1 , 13 , 7 , 9 , 1 , 4 , 6 , 1 , 2 , 1 , 10 , 1 , 1 , 9 , 2 , 2 , 4 , 5 , 6 , 14 , 1 , 1 , 9 , 3 , 18 , <nl> + 5 , 4 , 2 , 2 , 10 , 7 , 1 , 1 , 1 , 3 , 2 , 4 , 3 , 23 , 2 , 10 , 12 , 2 , 14 , 2 , 4 , 13 , 1 , 6 , 10 , 3 , 1 , 7 , 13 , 6 , 4 , 13 , 5 , 2 , 3 , 17 , 2 , 2 , 5 , 7 , 6 , 4 , 1 , 7 , 14 , 16 , 6 , 13 , 9 , 15 , 1 , 1 , 7 , 16 , 4 , 7 , 1 , 19 , 9 , 2 , 7 , 15 , <nl> + 2 , 6 , 5 , 13 , 25 , 4 , 14 , 13 , 11 , 25 , 1 , 1 , 1 , 2 , 1 , 2 , 2 , 3 , 10 , 11 , 3 , 3 , 1 , 1 , 4 , 4 , 2 , 1 , 4 , 9 , 1 , 4 , 3 , 5 , 5 , 2 , 7 , 12 , 11 , 15 , 7 , 16 , 4 , 5 , 16 , 2 , 1 , 1 , 6 , 3 , 3 , 1 , 1 , 2 , 7 , 6 , 6 , 7 , 1 , 4 , 7 , 6 , 1 , 1 , <nl> + 2 , 1 , 12 , 3 , 3 , 9 , 5 , 8 , 1 , 11 , 1 , 2 , 3 , 18 , 20 , 4 , 1 , 3 , 6 , 1 , 7 , 3 , 5 , 5 , 7 , 2 , 2 , 12 , 3 , 1 , 4 , 2 , 3 , 2 , 3 , 11 , 8 , 7 , 4 , 17 , 1 , 9 , 25 , 1 , 1 , 4 , 2 , 2 , 4 , 1 , 2 , 7 , 1 , 1 , 1 , 3 , 1 , 2 , 6 , 16 , 1 , 2 , 1 , 1 , 3 , 12 , <nl> + 20 , 2 , 5 , 20 , 8 , 7 , 6 , 2 , 1 , 1 , 1 , 1 , 6 , 2 , 1 , 2 , 10 , 1 , 1 , 6 , 1 , 3 , 1 , 2 , 1 , 4 , 1 , 12 , 4 , 1 , 3 , 1 , 1 , 1 , 1 , 1 , 10 , 4 , 7 , 5 , 13 , 1 , 15 , 1 , 1 , 30 , 11 , 9 , 1 , 15 , 38 , 14 , 1 , 32 , 17 , 20 , 1 , 9 , 31 , 2 , 21 , 9 , <nl> + 4 , 49 , 22 , 2 , 1 , 13 , 1 , 11 , 45 , 35 , 43 , 55 , 12 , 19 , 83 , 1 , 3 , 2 , 3 , 13 , 2 , 1 , 7 , 3 , 18 , 3 , 13 , 8 , 1 , 8 , 18 , 5 , 3 , 7 , 25 , 24 , 9 , 24 , 40 , 3 , 17 , 24 , 2 , 1 , 6 , 2 , 3 , 16 , 15 , 6 , 7 , 3 , 12 , 1 , 9 , 7 , 3 , 3 , <nl> + 3 , 15 , 21 , 5 , 16 , 4 , 5 , 12 , 11 , 11 , 3 , 6 , 3 , 2 , 31 , 3 , 2 , 1 , 1 , 23 , 6 , 6 , 1 , 4 , 2 , 6 , 5 , 2 , 1 , 1 , 3 , 3 , 22 , 2 , 6 , 2 , 3 , 17 , 3 , 2 , 4 , 5 , 1 , 9 , 5 , 1 , 1 , 6 , 15 , 12 , 3 , 17 , 2 , 14 , 2 , 8 , 1 , 23 , 16 , 4 , 2 , 23 , <nl> + 8 , 15 , 23 , 20 , 12 , 25 , 19 , 47 , 11 , 21 , 65 , 46 , 4 , 3 , 1 , 5 , 6 , 1 , 2 , 5 , 26 , 2 , 1 , 1 , 3 , 11 , 1 , 1 , 1 , 2 , 1 , 2 , 3 , 1 , 1 , 10 , 2 , 3 , 1 , 1 , 1 , 3 , 6 , 3 , 2 , 2 , 6 , 6 , 9 , 2 , 2 , 2 , 6 , 2 , 5 , 10 , 2 , 4 , 1 , 2 , 1 , 2 , 2 , <nl> + 3 , 1 , 1 , 3 , 1 , 2 , 9 , 23 , 9 , 2 , 1 , 1 , 1 , 1 , 5 , 3 , 2 , 1 , 10 , 9 , 6 , 1 , 10 , 2 , 31 , 25 , 3 , 7 , 5 , 40 , 1 , 15 , 6 , 17 , 7 , 27 , 180 , 1 , 3 , 2 , 2 , 1 , 1 , 1 , 6 , 3 , 10 , 7 , 1 , 3 , 6 , 17 , 8 , 6 , 2 , 2 , 1 , 3 , 5 , 5 , 8 , 16 , 14 , <nl> + 15 , 1 , 1 , 4 , 1 , 2 , 1 , 1 , 1 , 3 , 2 , 7 , 5 , 6 , 2 , 5 , 10 , 1 , 4 , 2 , 9 , 1 , 1 , 11 , 6 , 1 , 44 , 1 , 3 , 7 , 9 , 5 , 1 , 3 , 1 , 1 , 10 , 7 , 1 , 10 , 4 , 2 , 7 , 21 , 15 , 7 , 2 , 5 , 1 , 8 , 3 , 4 , 1 , 3 , 1 , 6 , 1 , 4 , 2 , 1 , 4 , 10 , 8 , 1 , 4 , 5 , <nl> + 1 , 5 , 10 , 2 , 7 , 1 , 10 , 1 , 1 , 3 , 4 , 11 , 10 , 29 , 4 , 7 , 3 , 5 , 2 , 3 , 33 , 5 , 2 , 19 , 3 , 1 , 4 , 2 , 6 , 31 , 11 , 1 , 3 , 3 , 3 , 1 , 8 , 10 , 9 , 12 , 11 , 12 , 8 , 3 , 14 , 8 , 6 , 11 , 1 , 4 , 41 , 3 , 1 , 2 , 7 , 13 , 1 , 5 , 6 , 2 , 6 , 12 , <nl> + 12 , 22 , 5 , 9 , 4 , 8 , 9 , 9 , 34 , 6 , 24 , 1 , 1 , 20 , 9 , 9 , 3 , 4 , 1 , 7 , 2 , 2 , 2 , 6 , 2 , 28 , 5 , 3 , 6 , 1 , 4 , 6 , 7 , 4 , 2 , 1 , 4 , 2 , 13 , 6 , 4 , 4 , 3 , 1 , 8 , 8 , 3 , 2 , 1 , 5 , 1 , 2 , 2 , 3 , 1 , 11 , 11 , 7 , 3 , 6 , 10 , 8 , 6 , 16 , 16 , <nl> + 22 , 7 , 12 , 6 , 21 , 5 , 4 , 6 , 6 , 3 , 6 , 1 , 3 , 2 , 1 , 2 , 8 , 29 , 1 , 10 , 1 , 6 , 13 , 6 , 6 , 19 , 31 , 1 , 13 , 4 , 4 , 22 , 17 , 26 , 33 , 10 , 4 , 15 , 12 , 25 , 6 , 67 , 10 , 2 , 3 , 1 , 6 , 10 , 2 , 6 , 2 , 9 , 1 , 9 , 4 , 4 , 1 , 2 , 16 , 2 , <nl> + 5 , 9 , 2 , 3 , 8 , 1 , 8 , 3 , 9 , 4 , 8 , 6 , 4 , 8 , 11 , 3 , 2 , 1 , 1 , 3 , 26 , 1 , 7 , 5 , 1 , 11 , 1 , 5 , 3 , 5 , 2 , 13 , 6 , 39 , 5 , 1 , 5 , 2 , 11 , 6 , 10 , 5 , 1 , 15 , 5 , 3 , 6 , 19 , 21 , 22 , 2 , 4 , 1 , 6 , 1 , 8 , 1 , 4 , 8 , 2 , 4 , 2 , 2 , 9 , 2 , <nl> + 1 , 1 , 1 , 4 , 3 , 6 , 3 , 12 , 7 , 1 , 14 , 2 , 4 , 10 , 2 , 13 , 1 , 17 , 7 , 3 , 2 , 1 , 3 , 2 , 13 , 7 , 14 , 12 , 3 , 1 , 29 , 2 , 8 , 9 , 15 , 14 , 9 , 14 , 1 , 3 , 1 , 6 , 5 , 9 , 11 , 3 , 38 , 43 , 20 , 7 , 7 , 8 , 5 , 15 , 12 , 19 , 15 , 81 , 8 , 7 , <nl> + 1 , 5 , 73 , 13 , 37 , 28 , 8 , 8 , 1 , 15 , 18 , 20 , 165 , 28 , 1 , 6 , 11 , 8 , 4 , 14 , 7 , 15 , 1 , 3 , 3 , 6 , 4 , 1 , 7 , 14 , 1 , 1 , 11 , 30 , 1 , 5 , 1 , 4 , 14 , 1 , 4 , 2 , 7 , 52 , 2 , 6 , 29 , 3 , 1 , 9 , 1 , 21 , 3 , 5 , 1 , 26 , 3 , 11 , 14 , <nl> + 11 , 1 , 17 , 5 , 1 , 2 , 1 , 3 , 2 , 8 , 1 , 2 , 9 , 12 , 1 , 1 , 2 , 3 , 8 , 3 , 24 , 12 , 7 , 7 , 5 , 17 , 3 , 3 , 3 , 1 , 23 , 10 , 4 , 4 , 6 , 3 , 1 , 16 , 17 , 22 , 3 , 10 , 21 , 16 , 16 , 6 , 4 , 10 , 2 , 1 , 1 , 2 , 8 , 8 , 6 , 5 , 3 , 3 , 3 , 39 , 25 , <nl> + 15 , 1 , 1 , 16 , 6 , 7 , 25 , 15 , 6 , 6 , 12 , 1 , 22 , 13 , 1 , 4 , 9 , 5 , 12 , 2 , 9 , 1 , 12 , 28 , 8 , 3 , 5 , 10 , 22 , 60 , 1 , 2 , 40 , 4 , 61 , 63 , 4 , 1 , 13 , 12 , 1 , 4 , 31 , 12 , 1 , 14 , 89 , 5 , 16 , 6 , 29 , 14 , 2 , 5 , 49 , 18 , 18 , <nl> + 5 , 29 , 33 , 47 , 1 , 17 , 1 , 19 , 12 , 2 , 9 , 7 , 39 , 12 , 3 , 7 , 12 , 39 , 3 , 1 , 46 , 4 , 12 , 3 , 8 , 9 , 5 , 31 , 15 , 18 , 3 , 2 , 2 , 66 , 19 , 13 , 17 , 5 , 3 , 46 , 124 , 13 , 57 , 34 , 2 , 5 , 4 , 5 , 8 , 1 , 1 , 1 , 4 , 3 , 1 , 17 , 5 , <nl> + 3 , 5 , 3 , 1 , 8 , 5 , 6 , 3 , 27 , 3 , 26 , 7 , 12 , 7 , 2 , 17 , 3 , 7 , 18 , 78 , 16 , 4 , 36 , 1 , 2 , 1 , 6 , 2 , 1 , 39 , 17 , 7 , 4 , 13 , 4 , 4 , 4 , 1 , 10 , 4 , 2 , 4 , 6 , 3 , 10 , 1 , 19 , 1 , 26 , 2 , 4 , 33 , 2 , 73 , 47 , 7 , 3 , 8 , 2 , 4 , 15 , <nl> + 18 , 1 , 29 , 2 , 41 , 14 , 1 , 21 , 16 , 41 , 7 , 39 , 25 , 13 , 44 , 2 , 2 , 10 , 1 , 13 , 7 , 1 , 7 , 3 , 5 , 20 , 4 , 8 , 2 , 49 , 1 , 10 , 6 , 1 , 6 , 7 , 10 , 7 , 11 , 16 , 3 , 12 , 20 , 4 , 10 , 3 , 1 , 2 , 11 , 2 , 28 , 9 , 2 , 4 , 7 , 2 , 15 , 1 , <nl> + 27 , 1 , 28 , 17 , 4 , 5 , 10 , 7 , 3 , 24 , 10 , 11 , 6 , 26 , 3 , 2 , 7 , 2 , 2 , 49 , 16 , 10 , 16 , 15 , 4 , 5 , 27 , 61 , 30 , 14 , 38 , 22 , 2 , 7 , 5 , 1 , 3 , 12 , 23 , 24 , 17 , 17 , 3 , 3 , 2 , 4 , 1 , 6 , 2 , 7 , 5 , 1 , 1 , 5 , 1 , 1 , 9 , 4 , <nl> + 1 , 3 , 6 , 1 , 8 , 2 , 8 , 4 , 14 , 3 , 5 , 11 , 4 , 1 , 3 , 32 , 1 , 19 , 4 , 1 , 13 , 11 , 5 , 2 , 1 , 8 , 6 , 8 , 1 , 6 , 5 , 13 , 3 , 23 , 11 , 5 , 3 , 16 , 3 , 9 , 10 , 1 , 24 , 3 , 198 , 52 , 4 , 2 , 2 , 5 , 14 , 5 , 4 , 22 , 5 , 20 , 4 , 11 , 6 , 41 , <nl> + 1 , 5 , 2 , 2 , 11 , 5 , 2 , 28 , 35 , 8 , 22 , 3 , 18 , 3 , 10 , 7 , 5 , 3 , 4 , 1 , 5 , 3 , 8 , 9 , 3 , 6 , 2 , 16 , 22 , 4 , 5 , 5 , 3 , 3 , 18 , 23 , 2 , 6 , 23 , 5 , 27 , 8 , 1 , 33 , 2 , 12 , 43 , 16 , 5 , 2 , 3 , 6 , 1 , 20 , 4 , 2 , 9 , 7 , 1 , 11 , 2 , <nl> + 10 , 3 , 14 , 31 , 9 , 3 , 25 , 18 , 20 , 2 , 5 , 5 , 26 , 14 , 1 , 11 , 17 , 12 , 40 , 19 , 9 , 6 , 31 , 83 , 2 , 7 , 9 , 19 , 78 , 12 , 14 , 21 , 76 , 12 , 113 , 79 , 34 , 4 , 1 , 1 , 61 , 18 , 85 , 10 , 2 , 2 , 13 , 31 , 11 , 50 , 6 , 33 , 159 , <nl> + 179 , 6 , 6 , 7 , 4 , 4 , 2 , 4 , 2 , 5 , 8 , 7 , 20 , 32 , 22 , 1 , 3 , 10 , 6 , 7 , 28 , 5 , 10 , 9 , 2 , 77 , 19 , 13 , 2 , 5 , 1 , 4 , 4 , 7 , 4 , 13 , 3 , 9 , 31 , 17 , 3 , 26 , 2 , 6 , 6 , 5 , 4 , 1 , 7 , 11 , 3 , 4 , 2 , 1 , 6 , 2 , 20 , 4 , 1 , 9 , 2 , 6 , <nl> + 3 , 7 , 1 , 1 , 1 , 20 , 2 , 3 , 1 , 6 , 2 , 3 , 6 , 2 , 4 , 8 , 1 , 5 , 13 , 8 , 4 , 11 , 23 , 1 , 10 , 6 , 2 , 1 , 3 , 21 , 2 , 2 , 4 , 24 , 31 , 4 , 10 , 10 , 2 , 5 , 192 , 15 , 4 , 16 , 7 , 9 , 51 , 1 , 2 , 1 , 1 , 5 , 1 , 1 , 2 , 1 , 3 , 5 , 3 , 1 , 3 , 4 , 1 , <nl> + 3 , 1 , 3 , 3 , 9 , 8 , 1 , 2 , 2 , 2 , 4 , 4 , 18 , 12 , 92 , 2 , 10 , 4 , 3 , 14 , 5 , 25 , 16 , 42 , 4 , 14 , 4 , 2 , 21 , 5 , 126 , 30 , 31 , 2 , 1 , 5 , 13 , 3 , 22 , 5 , 6 , 6 , 20 , 12 , 1 , 14 , 12 , 87 , 3 , 19 , 1 , 8 , 2 , 9 , 9 , 3 , 3 , 23 , 2 , <nl> + 3 , 7 , 6 , 3 , 1 , 2 , 3 , 9 , 1 , 3 , 1 , 6 , 3 , 2 , 1 , 3 , 11 , 3 , 1 , 6 , 10 , 3 , 2 , 3 , 1 , 2 , 1 , 5 , 1 , 1 , 11 , 3 , 6 , 4 , 1 , 7 , 2 , 1 , 2 , 5 , 5 , 34 , 4 , 14 , 18 , 4 , 19 , 7 , 5 , 8 , 2 , 6 , 79 , 1 , 5 , 2 , 14 , 8 , 2 , 9 , 2 , 1 , 36 , 28 , 16 , <nl> + 4 , 1 , 1 , 1 , 2 , 12 , 6 , 42 , 39 , 16 , 23 , 7 , 15 , 15 , 3 , 2 , 12 , 7 , 21 , 64 , 6 , 9 , 28 , 8 , 12 , 3 , 3 , 41 , 59 , 24 , 51 , 55 , 57 , 294 , 9 , 9 , 2 , 6 , 2 , 15 , 1 , 2 , 13 , 38 , 90 , 9 , 9 , 9 , 3 , 11 , 7 , 1 , 1 , 1 , 5 , 6 , 3 , 2 , <nl> + 1 , 2 , 2 , 3 , 8 , 1 , 4 , 4 , 1 , 5 , 7 , 1 , 4 , 3 , 20 , 4 , 9 , 1 , 1 , 1 , 5 , 5 , 17 , 1 , 5 , 2 , 6 , 2 , 4 , 1 , 4 , 5 , 7 , 3 , 18 , 11 , 11 , 32 , 7 , 5 , 4 , 7 , 11 , 127 , 8 , 4 , 3 , 3 , 1 , 10 , 1 , 1 , 6 , 21 , 14 , 1 , 16 , 1 , 7 , 1 , 3 , 6 , 9 , 65 , <nl> + 51 , 4 , 3 , 13 , 3 , 10 , 1 , 1 , 12 , 9 , 21 , 110 , 3 , 19 , 24 , 1 , 1 , 10 , 62 , 4 , 1 , 29 , 42 , 78 , 28 , 20 , 18 , 82 , 6 , 3 , 15 , 6 , 84 , 58 , 253 , 15 , 155 , 264 , 15 , 21 , 9 , 14 , 7 , 58 , 40 , 39 , <nl> } ; <nl> - static ImWchar base_ranges [ ] = <nl> + static ImWchar base_ranges [ ] = / / not zero - terminated <nl> { <nl> 0x0020 , 0x00FF , / / Basic Latin + Latin Supplement <nl> 0x3000 , 0x30FF , / / Punctuations , Hiragana , Katakana <nl> 0x31F0 , 0x31FF , / / Katakana Phonetic Extensions <nl> 0xFF00 , 0xFFEF , / / Half - width characters <nl> } ; <nl> - static bool full_ranges_unpacked = false ; <nl> - static ImWchar full_ranges [ IM_ARRAYSIZE ( base_ranges ) + IM_ARRAYSIZE ( offsets_from_0x4E00 ) * 2 + 1 ] ; <nl> - if ( ! full_ranges_unpacked ) <nl> + static ImWchar full_ranges [ IM_ARRAYSIZE ( base_ranges ) + IM_ARRAYSIZE ( accumulative_offsets_from_0x4E00 ) * 2 + 1 ] = { 0 } ; <nl> + if ( ! full_ranges [ 0 ] ) <nl> { <nl> - / / Unpack <nl> - int codepoint = 0x4e00 ; <nl> memcpy ( full_ranges , base_ranges , sizeof ( base_ranges ) ) ; <nl> - ImWchar * dst = full_ranges + IM_ARRAYSIZE ( base_ranges ) ; <nl> - for ( int n = 0 ; n < IM_ARRAYSIZE ( offsets_from_0x4E00 ) ; n + + , dst + = 2 ) <nl> - dst [ 0 ] = dst [ 1 ] = ( ImWchar ) ( codepoint + = ( offsets_from_0x4E00 [ n ] + 1 ) ) ; <nl> - dst [ 0 ] = 0 ; <nl> - full_ranges_unpacked = true ; <nl> + UnpackAccumulativeOffsetsIntoRanges ( 0x4E00 , accumulative_offsets_from_0x4E00 , IM_ARRAYSIZE ( accumulative_offsets_from_0x4E00 ) , full_ranges + IM_ARRAYSIZE ( base_ranges ) ) ; <nl> } <nl> return & full_ranges [ 0 ] ; <nl> } <nl> | Added GetGlyphRangesChineseSimplifiedCommon ( ) helper that returns a list of ~ 2500 most common Simplified Chinese characters . Renamed GetGlyphRangesChinese ( ) to GetGlyphRangesChineseFull ( ) to distinguish other variants and discourage using the full set . ( ) | ocornut/imgui | d44faa165aa8519cf12429a1e30a2d0887c37982 | 2018-06-06T10:35:36Z |
mmm a / tensorflow / contrib / lite / experimental / examples / unity / TensorFlowLitePlugin / README . md <nl> ppp b / tensorflow / contrib / lite / experimental / examples / unity / TensorFlowLitePlugin / README . md <nl> bazel build - c opt - - cxxopt = - - std = c + + 11 \ <nl> - - cpu = armeabi - v7a \ <nl> / / tensorflow / contrib / lite / experimental / c : libtensorflowlite_c . so <nl> ` ` ` <nl> + <nl> + If you encounter issues with native plugin discovery on Mac ( " Darwin " ) <nl> + platforms , try renaming ` libtensorflowlite_c . so ` to ` tensorflowlite_c . bundle ` . <nl> mmm a / tensorflow / contrib / lite / kernels / internal / BUILD <nl> ppp b / tensorflow / contrib / lite / kernels / internal / BUILD <nl> cc_library ( <nl> " : darwin " : [ <nl> " : neon_tensor_utils " , <nl> ] , <nl> + " : darwin_x86_64 " : [ <nl> + " : neon_tensor_utils " , <nl> + ] , <nl> " / / conditions : default " : [ <nl> " : portable_tensor_utils " , <nl> ] , <nl> | Fix darwin_x86_64 TFLite builds | tensorflow/tensorflow | af59e29ac19b3b377ba8031621e4bcbfb68d80cb | 2018-08-01T23:31:31Z |
mmm a / taichi / ir / snode . cpp <nl> ppp b / taichi / ir / snode . cpp <nl> SNode & SNode : : create_node ( std : : vector < Index > indices , <nl> return new_node ; <nl> } <nl> <nl> + SNode & SNode : : dynamic_chunked ( const Index & expr , int n , int chunk_size ) { <nl> + auto & snode = create_node ( { expr } , { n } , SNodeType : : dynamic ) ; <nl> + snode . chunk_size = chunk_size ; <nl> + return snode ; <nl> + } <nl> + <nl> void SNode : : lazy_grad ( ) { <nl> if ( this - > type = = SNodeType : : place ) <nl> return ; <nl> mmm a / taichi / ir / snode . h <nl> ppp b / taichi / ir / snode . h <nl> class SNode { <nl> <nl> SNode & place ( Expr & expr ) ; <nl> <nl> - SNode & dynamic_chunked ( const Index & expr , int n , int chunk_size ) { <nl> - TI_ASSERT ( bit : : is_power_of_two ( n ) ) ; <nl> - TI_ASSERT ( bit : : is_power_of_two ( chunk_size ) ) ; <nl> - auto & child = insert_children ( SNodeType : : dynamic ) ; <nl> - child . extractors [ expr . value ] . activate ( bit : : log2int ( n ) ) ; <nl> - child . n = n ; <nl> - child . chunk_size = chunk_size ; <nl> - return child ; <nl> - } <nl> + SNode & dynamic_chunked ( const Index & expr , int n , int chunk_size ) ; <nl> <nl> SNode & morton ( bool val = true ) { <nl> _morton = val ; <nl> mmm a / tests / python / test_assert . py <nl> ppp b / tests / python / test_assert . py <nl> def func ( ) : <nl> func ( ) <nl> <nl> <nl> - @ ti . all_archs <nl> def test_assert_ok ( ) : <nl> ti . init ( debug = True ) <nl> ti . set_gdb_trigger ( False ) <nl> def func ( ) : <nl> func ( ) <nl> <nl> <nl> - @ ti . all_archs <nl> def test_not_out_of_bound ( ) : <nl> ti . init ( debug = True ) <nl> ti . set_gdb_trigger ( False ) <nl> def func ( ) : <nl> x [ 7 , 15 ] = 1 <nl> <nl> func ( ) <nl> + <nl> + <nl> + @ ti . must_throw ( RuntimeError ) <nl> + def test_out_of_bound_dynamic ( ) : <nl> + ti . init ( debug = True ) <nl> + ti . set_gdb_trigger ( False ) <nl> + x = ti . var ( ti . i32 ) <nl> + <nl> + ti . root . dynamic ( ti . i , 16 , 4 ) . place ( x ) <nl> + <nl> + @ ti . kernel <nl> + def func ( ) : <nl> + x [ 17 ] = 1 <nl> + <nl> + func ( ) <nl> + <nl> + <nl> + def test_not_out_of_bound_dynamic ( ) : <nl> + ti . init ( debug = True ) <nl> + ti . set_gdb_trigger ( False ) <nl> + x = ti . var ( ti . i32 ) <nl> + <nl> + ti . root . dynamic ( ti . i , 16 , 4 ) . place ( x ) <nl> + <nl> + @ ti . kernel <nl> + def func ( ) : <nl> + x [ 3 ] = 1 <nl> + <nl> + func ( ) <nl> | [ misc ] Fix dynamic node out - of - bound checker ( ) | taichi-dev/taichi | d9b76a1123ed3fbde506af75cd05ff1594562e3d | 2020-04-11T20:20:07Z |
mmm a / src / common / x64 / xbyak_abi . h <nl> ppp b / src / common / x64 / xbyak_abi . h <nl> inline size_t ABI_PushRegistersAndAdjustStack ( Xbyak : : CodeGenerator & code , std : : b <nl> size_t rsp_alignment , size_t needed_frame_size = 0 ) { <nl> s32 subtraction , xmm_offset ; <nl> ABI_CalculateFrameSize ( regs , rsp_alignment , needed_frame_size , & subtraction , & xmm_offset ) ; <nl> + <nl> for ( std : : size_t i = 0 ; i < regs . size ( ) ; + + i ) { <nl> if ( regs [ i ] & & ABI_ALL_GPRS [ i ] ) { <nl> code . push ( IndexToReg64 ( static_cast < int > ( i ) ) ) ; <nl> } <nl> } <nl> + <nl> if ( subtraction ! = 0 ) { <nl> code . sub ( code . rsp , subtraction ) ; <nl> } <nl> <nl> - for ( int i = 0 ; i < regs . count ( ) ; i + + ) { <nl> - if ( regs . test ( i ) & ABI_ALL_GPRS . test ( i ) ) { <nl> - code . push ( IndexToReg64 ( i ) ) ; <nl> - } <nl> - } <nl> - <nl> for ( std : : size_t i = 0 ; i < regs . size ( ) ; + + i ) { <nl> if ( regs [ i ] & & ABI_ALL_XMMS [ i ] ) { <nl> code . movaps ( code . xword [ code . rsp + xmm_offset ] , IndexToXmm ( static_cast < int > ( i ) ) ) ; <nl> | xbyak_abi : Fix ABI_PushRegistersAndAdjustStack | yuzu-emu/yuzu | 4417770ba9a1d48ded255e75c32dcc1005b912c1 | 2020-06-15T17:59:01Z |
mmm a / hphp / runtime / vm / bytecode . cpp <nl> ppp b / hphp / runtime / vm / bytecode . cpp <nl> OPTBLD_INLINE void ExecutionContext : : yield ( IOP_ARGS , <nl> assert ( fp - > resumed ( ) ) ; <nl> assert ( func - > isGenerator ( ) ) ; <nl> <nl> + EventHook : : FunctionSuspend ( fp , true ) ; <nl> + <nl> if ( ! func - > isAsync ( ) ) { <nl> / / Non - async generator . <nl> assert ( fp - > sfp ( ) ) ; <nl> OPTBLD_INLINE void ExecutionContext : : yield ( IOP_ARGS , <nl> } <nl> } <nl> <nl> - EventHook : : FunctionSuspend ( fp , true ) ; <nl> - <nl> / / Grab caller info from ActRec . <nl> ActRec * sfp = fp - > sfp ( ) ; <nl> Offset soff = fp - > m_soff ; <nl> mmm a / hphp / runtime / vm / jit / irgen - resumable . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - resumable . cpp <nl> void implAwaitR ( HTS & env , <nl> gen ( env , RetCtrl , RetCtrlData ( true ) , stack , frame , retAddr ) ; <nl> } <nl> <nl> - void yieldReturnControl ( HTS & env , Block * catchBlock ) { <nl> + void yieldReturnControl ( HTS & env ) { <nl> / / Push return value of next ( ) / send ( ) / raise ( ) . <nl> push ( env , cns ( env , Type : : InitNull ) ) ; <nl> <nl> - auto const stack = spillStack ( env ) ; <nl> - retSurpriseCheck ( env , fp ( env ) , nullptr , catchBlock , true ) ; <nl> - <nl> + auto const stack = spillStack ( env ) ; <nl> auto const retAddr = gen ( env , LdRetAddr , fp ( env ) ) ; <nl> - auto const frame = gen ( env , FreeActRec , fp ( env ) ) ; <nl> - <nl> + auto const frame = gen ( env , FreeActRec , fp ( env ) ) ; <nl> gen ( env , RetCtrl , RetCtrlData ( true ) , stack , frame , retAddr ) ; <nl> } <nl> <nl> void yieldImpl ( HTS & env , Offset resumeOffset ) { <nl> + retSurpriseCheck ( env , fp ( env ) , nullptr , makeCatch ( env ) , true ) ; <nl> + <nl> / / Resumable : : setResumeAddr ( resumeAddr , resumeOffset ) <nl> auto const resumeSk = SrcKey ( curFunc ( env ) , resumeOffset , true ) ; <nl> auto const resumeAddr = gen ( env , LdBindAddr , LdBindAddrData ( resumeSk ) ) ; <nl> void emitYield ( HTS & env ) { <nl> <nl> if ( curFunc ( env ) - > isAsyncGenerator ( ) ) PUNT ( Yield - AsyncGenerator ) ; <nl> <nl> - auto const catchBlock = makeCatchNoSpill ( env ) ; <nl> yieldImpl ( env , resumeOffset ) ; <nl> <nl> / / take a fast path if this generator has no yield k = > v ; <nl> void emitYield ( HTS & env ) { <nl> gen ( env , ContArIncKey , fp ( env ) ) ; <nl> } <nl> <nl> - yieldReturnControl ( env , catchBlock ) ; <nl> + yieldReturnControl ( env ) ; <nl> } <nl> <nl> void emitYieldK ( HTS & env ) { <nl> void emitYieldK ( HTS & env ) { <nl> <nl> if ( curFunc ( env ) - > isAsyncGenerator ( ) ) PUNT ( YieldK - AsyncGenerator ) ; <nl> <nl> - auto const catchBlock = makeCatchNoSpill ( env ) ; <nl> yieldImpl ( env , resumeOffset ) ; <nl> <nl> auto const newKey = popC ( env ) ; <nl> void emitYieldK ( HTS & env ) { <nl> gen ( env , ContArUpdateIdx , fp ( env ) , newKey ) ; <nl> } <nl> <nl> - yieldReturnControl ( env , catchBlock ) ; <nl> + yieldReturnControl ( env ) ; <nl> } <nl> <nl> void emitContCheck ( HTS & env , int32_t checkStarted ) { <nl> new file mode 100644 <nl> index 00000000000 . . bc802d598cc <nl> mmm / dev / null <nl> ppp b / hphp / test / slow / yield / yield - suspend - hook . php <nl> <nl> + < ? hh <nl> + <nl> + function doh ( $ x , $ y ) { <nl> + if ( $ y = = = ' hey ' ) { <nl> + if ( $ x = = = ' exit ' ) { <nl> + global $ counter ; <nl> + if ( ( $ counter + + % 2 ) = = 1 ) { <nl> + throw new exception ( ' x ' ) ; <nl> + } <nl> + } <nl> + } <nl> + } <nl> + fb_setprofile ( ' doh ' ) ; <nl> + <nl> + function hey ( ) { <nl> + yield new stdclass ; <nl> + } <nl> + <nl> + for ( $ i = 0 ; $ i < 3 ; + + $ i ) { <nl> + try { foreach ( hey ( ) as $ k ) { } } catch ( Exception $ x ) { } <nl> + } <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . e69de29bb2d <nl> | Fix suspend hook during Yield and YieldK | facebook/hhvm | 7863a60595ebc58884b6bc87b0fea9a7ed403866 | 2014-12-07T06:00:41Z |
mmm a / src / core / ext / transport / chttp2 / transport / hpack_parser . cc <nl> ppp b / src / core / ext / transport / chttp2 / transport / hpack_parser . cc <nl> static const uint8_t inverse_base64 [ 256 ] = { <nl> 255 , <nl> } ; <nl> <nl> + static void GPR_ATTRIBUTE_NOINLINE on_hdr_log ( grpc_mdelem md ) { <nl> + char * k = grpc_slice_to_c_string ( GRPC_MDKEY ( md ) ) ; <nl> + char * v = nullptr ; <nl> + if ( grpc_is_binary_header_internal ( GRPC_MDKEY ( md ) ) ) { <nl> + v = grpc_dump_slice ( GRPC_MDVALUE ( md ) , GPR_DUMP_HEX ) ; <nl> + } else { <nl> + v = grpc_slice_to_c_string ( GRPC_MDVALUE ( md ) ) ; <nl> + } <nl> + gpr_log ( <nl> + GPR_INFO , <nl> + " Decode : ' % s : % s ' , elem_interned = % d [ % d ] , k_interned = % d , v_interned = % d " , <nl> + k , v , GRPC_MDELEM_IS_INTERNED ( md ) , GRPC_MDELEM_STORAGE ( md ) , <nl> + grpc_slice_is_interned ( GRPC_MDKEY ( md ) ) , <nl> + grpc_slice_is_interned ( GRPC_MDVALUE ( md ) ) ) ; <nl> + gpr_free ( k ) ; <nl> + gpr_free ( v ) ; <nl> + } <nl> + <nl> / * emission helpers * / <nl> - static grpc_error * on_hdr ( grpc_chttp2_hpack_parser * p , grpc_mdelem md , <nl> - int add_to_table ) { <nl> + template < bool do_add > <nl> + static grpc_error * on_hdr ( grpc_chttp2_hpack_parser * p , grpc_mdelem md ) { <nl> if ( GRPC_TRACE_FLAG_ENABLED ( grpc_http_trace ) ) { <nl> - char * k = grpc_slice_to_c_string ( GRPC_MDKEY ( md ) ) ; <nl> - char * v = nullptr ; <nl> - if ( grpc_is_binary_header_internal ( GRPC_MDKEY ( md ) ) ) { <nl> - v = grpc_dump_slice ( GRPC_MDVALUE ( md ) , GPR_DUMP_HEX ) ; <nl> - } else { <nl> - v = grpc_slice_to_c_string ( GRPC_MDVALUE ( md ) ) ; <nl> - } <nl> - gpr_log ( <nl> - GPR_INFO , <nl> - " Decode : ' % s : % s ' , elem_interned = % d [ % d ] , k_interned = % d , v_interned = % d " , <nl> - k , v , GRPC_MDELEM_IS_INTERNED ( md ) , GRPC_MDELEM_STORAGE ( md ) , <nl> - grpc_slice_is_interned ( GRPC_MDKEY ( md ) ) , <nl> - grpc_slice_is_interned ( GRPC_MDVALUE ( md ) ) ) ; <nl> - gpr_free ( k ) ; <nl> - gpr_free ( v ) ; <nl> + on_hdr_log ( md ) ; <nl> } <nl> - if ( add_to_table ) { <nl> - GPR_ASSERT ( GRPC_MDELEM_STORAGE ( md ) = = GRPC_MDELEM_STORAGE_INTERNED | | <nl> - GRPC_MDELEM_STORAGE ( md ) = = GRPC_MDELEM_STORAGE_STATIC ) ; <nl> + if ( do_add ) { <nl> + GPR_DEBUG_ASSERT ( GRPC_MDELEM_STORAGE ( md ) = = GRPC_MDELEM_STORAGE_INTERNED | | <nl> + GRPC_MDELEM_STORAGE ( md ) = = GRPC_MDELEM_STORAGE_STATIC ) ; <nl> grpc_error * err = grpc_chttp2_hptbl_add ( & p - > table , md ) ; <nl> - if ( err ! = GRPC_ERROR_NONE ) return err ; <nl> + if ( GPR_UNLIKELY ( err ! = GRPC_ERROR_NONE ) ) return err ; <nl> } <nl> - if ( p - > on_header = = nullptr ) { <nl> + if ( GPR_UNLIKELY ( p - > on_header = = nullptr ) ) { <nl> GRPC_MDELEM_UNREF ( md ) ; <nl> return GRPC_ERROR_CREATE_FROM_STATIC_STRING ( " on_header callback not set " ) ; <nl> } <nl> static grpc_error * finish_indexed_field ( grpc_chttp2_hpack_parser * p , <nl> } <nl> GRPC_MDELEM_REF ( md ) ; <nl> GRPC_STATS_INC_HPACK_RECV_INDEXED ( ) ; <nl> - grpc_error * err = on_hdr ( p , md , 0 ) ; <nl> + grpc_error * err = on_hdr < false > ( p , md ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return err ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_incidx ( grpc_chttp2_hpack_parser * p , <nl> grpc_mdelem md = grpc_chttp2_hptbl_lookup ( & p - > table , p - > index ) ; <nl> GPR_ASSERT ( ! GRPC_MDISNULL ( md ) ) ; / * handled in string parsing * / <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_INCIDX ( ) ; <nl> - grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> - take_string ( p , & p - > value , true ) ) , <nl> - 1 ) ; <nl> + grpc_error * err = on_hdr < true > ( <nl> + p , grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> + take_string ( p , & p - > value , true ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_incidx_v ( grpc_chttp2_hpack_parser * p , <nl> const uint8_t * end ) { <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_INCIDX_V ( ) ; <nl> grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> - take_string ( p , & p - > value , true ) ) , <nl> - 1 ) ; <nl> + on_hdr < true > ( p , grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> + take_string ( p , & p - > value , true ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_notidx ( grpc_chttp2_hpack_parser * p , <nl> grpc_mdelem md = grpc_chttp2_hptbl_lookup ( & p - > table , p - > index ) ; <nl> GPR_ASSERT ( ! GRPC_MDISNULL ( md ) ) ; / * handled in string parsing * / <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_NOTIDX ( ) ; <nl> - grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> - take_string ( p , & p - > value , false ) ) , <nl> - 0 ) ; <nl> + grpc_error * err = on_hdr < false > ( <nl> + p , grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> + take_string ( p , & p - > value , false ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_notidx_v ( grpc_chttp2_hpack_parser * p , <nl> const uint8_t * cur , <nl> const uint8_t * end ) { <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_NOTIDX_V ( ) ; <nl> - grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> - take_string ( p , & p - > value , false ) ) , <nl> - 0 ) ; <nl> + grpc_error * err = on_hdr < false > ( <nl> + p , grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> + take_string ( p , & p - > value , false ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_nvridx ( grpc_chttp2_hpack_parser * p , <nl> grpc_mdelem md = grpc_chttp2_hptbl_lookup ( & p - > table , p - > index ) ; <nl> GPR_ASSERT ( ! GRPC_MDISNULL ( md ) ) ; / * handled in string parsing * / <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_NVRIDX ( ) ; <nl> - grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> - take_string ( p , & p - > value , false ) ) , <nl> - 0 ) ; <nl> + grpc_error * err = on_hdr < false > ( <nl> + p , grpc_mdelem_from_slices ( grpc_slice_ref_internal ( GRPC_MDKEY ( md ) ) , <nl> + take_string ( p , & p - > value , false ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> static grpc_error * finish_lithdr_nvridx_v ( grpc_chttp2_hpack_parser * p , <nl> const uint8_t * cur , <nl> const uint8_t * end ) { <nl> GRPC_STATS_INC_HPACK_RECV_LITHDR_NVRIDX_V ( ) ; <nl> - grpc_error * err = <nl> - on_hdr ( p , <nl> - grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> - take_string ( p , & p - > value , false ) ) , <nl> - 0 ) ; <nl> + grpc_error * err = on_hdr < false > ( <nl> + p , grpc_mdelem_from_slices ( take_string ( p , & p - > key , true ) , <nl> + take_string ( p , & p - > value , false ) ) ) ; <nl> if ( err ! = GRPC_ERROR_NONE ) return parse_error ( p , cur , end , err ) ; <nl> return parse_begin ( p , cur , end ) ; <nl> } <nl> | Codegen optimizations for hpack_parser on_hdr . | grpc/grpc | 80d1aec0218b3dc72acb23c838deaa0d7a10cb1a | 2019-06-28T00:18:17Z |
mmm a / dbms / tests / queries / 0_stateless / 00563_insert_into_remote_and_zookeeper . sql <nl> ppp b / dbms / tests / queries / 0_stateless / 00563_insert_into_remote_and_zookeeper . sql <nl> <nl> - - Check that settings are correctly passed through Distributed table <nl> DROP TABLE IF EXISTS test . simple ; <nl> - CREATE TABLE test . simple ( d Int8 ) ENGINE = ReplicatedMergeTree ( ' / clickhouse_test / tables / test / simple ' , ' 1 ' ) ORDER BY d ; <nl> + CREATE TABLE test . simple ( d Int8 ) ENGINE = ReplicatedMergeTree ( ' / clickhouse / test / tables / test / simple ' , ' 1 ' ) ORDER BY d ; <nl> <nl> - - TODO : replace ' 127 . 0 . 0 . 2 ' - > ' 127 . 0 . 0 . 1 ' after a fix <nl> INSERT INTO TABLE FUNCTION remote ( ' 127 . 0 . 0 . 2 ' , ' test ' , ' simple ' ) VALUES ( 1 ) ; <nl> | Fixed ZK path in a test . [ # CLICKHOUSE - 2 ] | ClickHouse/ClickHouse | c80407ad8d46d84f02b4948199cefe3ed6f73dcf | 2018-02-21T13:52:17Z |
mmm a / src / mongo / db / repl / replication_coordinator_impl_test . cpp <nl> ppp b / src / mongo / db / repl / replication_coordinator_impl_test . cpp <nl> TEST_F ( ReplCoordTest , ConcurrentStepDownShouldNotSignalTheSameFinishEventMoreTha <nl> replExec - > waitForEvent ( updateTermEvh2 ) ; <nl> ASSERT ( termUpdated2 = = TopologyCoordinator : : UpdateTermResult : : kTriggerStepDown ) ; <nl> replExec - > waitForEvent ( updateTermEvh3 ) ; <nl> - ASSERT ( termUpdated3 = = TopologyCoordinator : : UpdateTermResult : : kTriggerStepDown ) ; <nl> + / / Need to synchronize this . <nl> + / / ASSERT ( termUpdated3 = = TopologyCoordinator : : UpdateTermResult : : kTriggerStepDown ) ; <nl> <nl> - / / Term hasn ' t updated yet . <nl> - ASSERT_EQUALS ( 1 , getReplCoord ( ) - > getTerm ( ) ) ; <nl> + / / / / Term hasn ' t updated yet . <nl> + / / ASSERT_EQUALS ( 1 , getReplCoord ( ) - > getTerm ( ) ) ; <nl> <nl> - / / Update term event handles will wait for potential stepdown . <nl> - ASSERT_TRUE ( getReplCoord ( ) - > getMemberState ( ) . secondary ( ) ) ; <nl> + / / / / Update term event handles will wait for potential stepdown . <nl> + / / ASSERT_TRUE ( getReplCoord ( ) - > getMemberState ( ) . secondary ( ) ) ; <nl> <nl> - TopologyCoordinator : : UpdateTermResult termUpdated4 ; <nl> - auto updateTermEvh4 = getReplCoord ( ) - > updateTerm_forTest ( 3 , & termUpdated4 ) ; <nl> - ASSERT ( updateTermEvh4 . isValid ( ) ) ; <nl> - replExec - > waitForEvent ( updateTermEvh4 ) ; <nl> - ASSERT ( termUpdated4 = = TopologyCoordinator : : UpdateTermResult : : kUpdatedTerm ) ; <nl> - ASSERT_EQUALS ( 3 , getReplCoord ( ) - > getTerm ( ) ) ; <nl> + / / TopologyCoordinator : : UpdateTermResult termUpdated4 ; <nl> + / / auto updateTermEvh4 = getReplCoord ( ) - > updateTerm_forTest ( 3 , & termUpdated4 ) ; <nl> + / / ASSERT ( updateTermEvh4 . isValid ( ) ) ; <nl> + / / replExec - > waitForEvent ( updateTermEvh4 ) ; <nl> + / / ASSERT ( termUpdated4 = = TopologyCoordinator : : UpdateTermResult : : kUpdatedTerm ) ; <nl> + / / ASSERT_EQUALS ( 3 , getReplCoord ( ) - > getTerm ( ) ) ; <nl> } <nl> <nl> TEST_F ( StepDownTest , StepDownNotPrimary ) { <nl> | SERVER - 21425 temporarily comment out racy parts | mongodb/mongo | 02603b46c044115abb17e38f55fd3b151f42c18d | 2015-11-16T15:15:45Z |
mmm a / utils / build_swift / defaults . py <nl> ppp b / utils / build_swift / defaults . py <nl> <nl> <nl> COMPILER_VENDOR = ' none ' <nl> SWIFT_USER_VISIBLE_VERSION = CompilerVersion ( ' 4 . 2 ' ) <nl> - CLANG_USER_VISIBLE_VERSION = CompilerVersion ( ' 5 . 0 . 0 ' ) <nl> + CLANG_USER_VISIBLE_VERSION = CompilerVersion ( ' 6 . 0 . 0 ' ) <nl> SWIFT_ANALYZE_CODE_COVERAGE = ' false ' <nl> <nl> DARWIN_XCRUN_TOOLCHAIN = ' default ' <nl> | Merge pull request from bob - wilson / rdar39960908 | apple/swift | 4acbc4fb81565289842395b6138cdbbd971c42eb | 2018-05-05T04:34:51Z |
mmm a / Box2D / Collision / b2DynamicTree . cpp <nl> ppp b / Box2D / Collision / b2DynamicTree . cpp <nl> <nl> <nl> # include < Box2D / Collision / b2DynamicTree . h > <nl> # include < cstring > <nl> + # ifndef SHP <nl> # include < cfloat > <nl> + # else <nl> + # include < float . h > <nl> + # endif <nl> using namespace std ; <nl> <nl> <nl> mmm a / Box2D / Common / b2Math . h <nl> ppp b / Box2D / Common / b2Math . h <nl> <nl> # include < Box2D / Common / b2Settings . h > <nl> <nl> # include < cmath > <nl> + # ifndef SHP <nl> # include < cfloat > <nl> + # else <nl> + # include < float . h > <nl> + # endif <nl> # include < cstddef > <nl> # include < limits > <nl> <nl> mmm a / Box2D / Common / b2Settings . cpp <nl> ppp b / Box2D / Common / b2Settings . cpp <nl> <nl> # include < cstdlib > <nl> # include < cstdio > <nl> # include < cstdarg > <nl> - <nl> + # ifdef SHP <nl> + # include < FBaseSys . h > <nl> + # endif <nl> b2Version b2_version = { 2 , 2 , 1 } ; <nl> <nl> / / Memory allocators . Modify these to use your own allocator . <nl> void b2Free ( void * mem ) <nl> / / You can modify this to use your logging facility . <nl> void b2Log ( const char * string , . . . ) <nl> { <nl> + # if defined ( SHP ) <nl> + # ifdef _DEBUG <nl> + __App_info ( __PRETTY_FUNCTION__ , __LINE__ , string ) ; <nl> + # endif <nl> + # else <nl> va_list args ; <nl> va_start ( args , string ) ; <nl> vprintf ( string , args ) ; <nl> va_end ( args ) ; <nl> - } <nl> \ No newline at end of file <nl> + # endif <nl> + } <nl> mmm a / Box2D / Common / b2Timer . cpp <nl> ppp b / Box2D / Common / b2Timer . cpp <nl> <nl> <nl> # include < Box2D / Common / b2Timer . h > <nl> <nl> - # if defined ( _WIN32 ) <nl> + # if defined ( _WIN32 ) & & ! defined ( SHP ) <nl> <nl> float64 b2Timer : : s_invFrequency = 0 . 0f ; <nl> <nl> mmm a / Box2D / proj . bada / sdk1 . 0 / . cproject <nl> ppp b / Box2D / proj . bada / sdk1 . 0 / . cproject <nl> <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Rope " / > <nl> < / sourceEntries > <nl> < / configuration > <nl> < / storageModule > <nl> < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < storageModule moduleId = " scannerConfiguration " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < parser enabled = " true " / > <nl> < / scannerInfoProvider > <nl> < / profile > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> - < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < / cconfiguration > <nl> < cconfiguration id = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 " > <nl> < storageModule buildSystemId = " org . eclipse . cdt . managedbuilder . core . configurationDataProvider " id = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 " moduleId = " org . eclipse . cdt . core . settings " name = " Target - Debug " > <nl> <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Rope " / > <nl> < / sourceEntries > <nl> < / configuration > <nl> < / storageModule > <nl> < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < storageModule moduleId = " scannerConfiguration " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < parser enabled = " true " / > <nl> < / scannerInfoProvider > <nl> < / profile > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> - < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < / cconfiguration > <nl> < cconfiguration id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " > <nl> < storageModule buildSystemId = " org . eclipse . cdt . managedbuilder . core . configurationDataProvider " id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " moduleId = " org . eclipse . cdt . core . settings " name = " Target - Release " > <nl> <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH | RESOLVED " kind = " sourcePath " name = " Rope " / > <nl> < / sourceEntries > <nl> < / configuration > <nl> < / storageModule > <nl> < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < storageModule moduleId = " scannerConfiguration " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < parser enabled = " true " / > <nl> < / scannerInfoProvider > <nl> < / profile > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> - < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < / cconfiguration > <nl> < / storageModule > <nl> < storageModule moduleId = " cdtBuildSystem " version = " 4 . 0 . 0 " > <nl> mmm a / Box2D / proj . bada / sdk1 . 0 / . project <nl> ppp b / Box2D / proj . bada / sdk1 . 0 / . project <nl> <nl> < type > 2 < / type > <nl> < locationURI > cocos2dx_root / Box2D / Dynamics < / locationURI > <nl> < / link > <nl> + < link > <nl> + < name > Rope < / name > <nl> + < type > 2 < / type > <nl> + < locationURI > cocos2dx_root / Box2D / Rope < / locationURI > <nl> + < / link > <nl> < / linkedResources > <nl> < / projectDescription > <nl> mmm a / Box2D / proj . bada / sdk2 . 0 / . cproject <nl> ppp b / Box2D / proj . bada / sdk2 . 0 / . cproject <nl> <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " includePath " name = " / Box2D " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D / . Target - Debug " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D " / > <nl> + < entry flags = " RESOLVED " kind = " libraryFile " name = " Box2D " / > <nl> < / externalSetting > <nl> < / externalSettings > <nl> < extensions > <nl> <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH | RESOLVED " kind = " sourcePath " name = " Rope " / > <nl> < / sourceEntries > <nl> < / configuration > <nl> < / storageModule > <nl> < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> < storageModule moduleId = " scannerConfiguration " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < parser enabled = " true " / > <nl> < / scannerInfoProvider > <nl> < / profile > <nl> - < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> < buildOutputProvider > <nl> <nl> < / scannerInfoProvider > <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> - < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . c . compiler . lib . simul . 170099126 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 448258914 " > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> < buildOutputProvider > <nl> <nl> < / scannerInfoProvider > <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> - < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> - < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> - < / cconfiguration > <nl> - < cconfiguration id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " > <nl> - < storageModule buildSystemId = " org . eclipse . cdt . managedbuilder . core . configurationDataProvider " id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " moduleId = " org . eclipse . cdt . core . settings " name = " Target - Release " > <nl> - < externalSettings > <nl> - < externalSetting > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " includePath " name = " / Box2D " / > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D / . Target - Release " / > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D " / > <nl> - < / externalSetting > <nl> - < / externalSettings > <nl> - < extensions > <nl> - < extension id = " org . eclipse . cdt . core . ELF " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> - < extension id = " org . eclipse . cdt . core . GNU_ELF " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> - < extension id = " org . eclipse . cdt . core . Cygwin_PE " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> - < extension id = " org . eclipse . cdt . core . MakeErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> - < extension id = " org . eclipse . cdt . core . GCCErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> - < extension id = " org . eclipse . cdt . core . GASErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> - < extension id = " org . eclipse . cdt . core . GLDErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> - < / extensions > <nl> - < / storageModule > <nl> - < storageModule moduleId = " cdtBuildSystem " version = " 4 . 0 . 0 " > <nl> - < configuration artifactExtension = " a " artifactName = " Box2D " buildArtefactType = " org . eclipse . cdt . build . core . buildArtefactType . staticLib " buildProperties = " org . eclipse . cdt . build . core . buildType = org . eclipse . cdt . build . core . buildType . release , org . eclipse . cdt . build . core . buildArtefactType = org . eclipse . cdt . build . core . buildArtefactType . staticLib " cleanCommand = " rm - rf " description = " " errorParsers = " org . eclipse . cdt . core . MakeErrorParser ; org . eclipse . cdt . core . GCCErrorParser ; org . eclipse . cdt . core . GLDErrorParser ; org . eclipse . cdt . core . GASErrorParser " id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " name = " Target - Release " parent = " cdt . managedbuild . config . osp . gnu . target . lib . release " > <nl> - < folderInfo id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . " name = " / " resourcePath = " " > <nl> - < toolChain id = " cdt . managedbuild . toolchain . osp . gnu . target . lib . release . 1324250934 " name = " bada GCC ToolChain " superClass = " cdt . managedbuild . toolchain . osp . gnu . target . lib . release " > <nl> - < targetPlatform archList = " all " binaryParser = " org . eclipse . cdt . core . Cygwin_PE ; org . eclipse . cdt . core . GNU_ELF ; org . eclipse . cdt . core . ELF " id = " cdt . managedbuild . target . osp . gnu . target . platform . lib . release . 1297193301 " name = " Debug Platform " osList = " osp " superClass = " cdt . managedbuild . target . osp . gnu . target . platform . lib . release " / > <nl> - < builder buildPath = " $ { workspace_loc : / Box2D / . Target - Release } " command = " cs - make " id = " cdt . managedbuild . target . osp . gnu . target . builder . lib . release . 797941129 " keepEnvironmentInBuildfile = " false " managedBuildOn = " true " name = " bada Builder " superClass = " cdt . managedbuild . target . osp . gnu . target . builder . lib . release " / > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . target . archiver . lib . release . 1628574802 " name = " bada Archiver " superClass = " cdt . managedbuild . tool . osp . gnu . target . archiver . lib . release " / > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 " name = " bada C + + Compiler " superClass = " cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release " > <nl> - < option id = " osp . gnu . target . cpp . compiler . release . lib . option . include . paths . 862242866 " name = " Include paths ( - I ) " superClass = " osp . gnu . target . cpp . compiler . release . lib . option . include . paths " valueType = " includePath " > <nl> - < listOptionValue builtIn = " false " value = " & quot ; $ { SDKROOT } / include & quot ; " / > <nl> - < listOptionValue builtIn = " false " value = " . . / . . / . . / . . / " / > <nl> - < / option > <nl> - < inputType id = " cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " superClass = " cdt . managedbuild . tool . osp . gnu . cpp . compiler . input " / > <nl> - < / tool > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 " name = " bada C Compiler " superClass = " cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release " > <nl> - < option id = " osp . gnu . target . c . compiler . release . lib . option . include . paths . 1224339831 " name = " Include paths ( - I ) " superClass = " osp . gnu . target . c . compiler . release . lib . option . include . paths " valueType = " includePath " > <nl> - < listOptionValue builtIn = " false " value = " & quot ; $ { SDKROOT } / include & quot ; " / > <nl> - < listOptionValue builtIn = " false " value = " & quot ; $ { PROJECT_ROOT } / inc & quot ; " / > <nl> - < listOptionValue builtIn = " false " value = " & quot ; $ { PROJECT_ROOT } / . . / & quot ; " / > <nl> - < / option > <nl> - < inputType id = " cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " superClass = " cdt . managedbuild . tool . osp . gnu . c . compiler . input " / > <nl> - < / tool > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . arm . c . linker . base . 363375886 " name = " bada C Linker " superClass = " cdt . managedbuild . tool . osp . gnu . arm . c . linker . base " / > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . arm . cpp . linker . base . 1868485835 " name = " bada C + + Linker " superClass = " cdt . managedbuild . tool . osp . gnu . arm . cpp . linker . base " / > <nl> - < tool id = " cdt . managedbuild . tool . osp . gnu . target . assembler . lib . release . 257999705 " name = " bada Assembler " superClass = " cdt . managedbuild . tool . osp . gnu . target . assembler . lib . release " > <nl> - < inputType id = " cdt . managedbuild . tool . osp . gnu . assembler . input . 385931781 " superClass = " cdt . managedbuild . tool . osp . gnu . assembler . input " / > <nl> - < / tool > <nl> - < / toolChain > <nl> - < / folderInfo > <nl> - < sourceEntries > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> - < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> - < / sourceEntries > <nl> - < / configuration > <nl> - < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> - < storageModule moduleId = " scannerConfiguration " > <nl> - < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> - < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " makefileGenerator " > <nl> - < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> - < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> - < buildOutputProvider > <nl> - < openAction enabled = " true " filePath = " " / > <nl> - < parser enabled = " true " / > <nl> - < / buildOutputProvider > <nl> - < scannerInfoProvider id = " specsFile " > <nl> - < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> - < parser enabled = " true " / > <nl> - < / scannerInfoProvider > <nl> - < / profile > <nl> < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> <nl> < / scannerInfoProvider > <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> - < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . c . compiler . lib . simul . 170099126 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 448258914 " > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 ; cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . debug . 1246338128 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 1161152344 " > <nl> < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> < buildOutputProvider > <nl> <nl> < / scannerInfoProvider > <nl> < / profile > <nl> < / scannerConfigBuildInfo > <nl> - < / storageModule > <nl> - < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> - < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 ; cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . debug . 2067171510 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 366980368 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . c . compiler . lib . simul . 170099126 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 448258914 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < / storageModule > <nl> + < / cconfiguration > <nl> + < cconfiguration id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " > <nl> + < storageModule buildSystemId = " org . eclipse . cdt . managedbuilder . core . configurationDataProvider " id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " moduleId = " org . eclipse . cdt . core . settings " name = " Target - Release " > <nl> + < externalSettings > <nl> + < externalSetting > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " includePath " name = " / Box2D " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D / . Target - Release " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " libraryPath " name = " / Box2D " / > <nl> + < entry flags = " RESOLVED " kind = " libraryFile " name = " Box2D " / > <nl> + < / externalSetting > <nl> + < / externalSettings > <nl> + < extensions > <nl> + < extension id = " org . eclipse . cdt . core . ELF " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> + < extension id = " org . eclipse . cdt . core . GNU_ELF " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> + < extension id = " org . eclipse . cdt . core . Cygwin_PE " point = " org . eclipse . cdt . core . BinaryParser " / > <nl> + < extension id = " org . eclipse . cdt . core . MakeErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> + < extension id = " org . eclipse . cdt . core . GCCErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> + < extension id = " org . eclipse . cdt . core . GASErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> + < extension id = " org . eclipse . cdt . core . GLDErrorParser " point = " org . eclipse . cdt . core . ErrorParser " / > <nl> + < / extensions > <nl> + < / storageModule > <nl> + < storageModule moduleId = " cdtBuildSystem " version = " 4 . 0 . 0 " > <nl> + < configuration artifactExtension = " a " artifactName = " Box2D " buildArtefactType = " org . eclipse . cdt . build . core . buildArtefactType . staticLib " buildProperties = " org . eclipse . cdt . build . core . buildType = org . eclipse . cdt . build . core . buildType . release , org . eclipse . cdt . build . core . buildArtefactType = org . eclipse . cdt . build . core . buildArtefactType . staticLib " cleanCommand = " rm - rf " description = " " errorParsers = " org . eclipse . cdt . core . MakeErrorParser ; org . eclipse . cdt . core . GCCErrorParser ; org . eclipse . cdt . core . GLDErrorParser ; org . eclipse . cdt . core . GASErrorParser " id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 " name = " Target - Release " parent = " cdt . managedbuild . config . osp . gnu . target . lib . release " > <nl> + < folderInfo id = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . " name = " / " resourcePath = " " > <nl> + < toolChain id = " cdt . managedbuild . toolchain . osp . gnu . target . lib . release . 1324250934 " name = " bada GCC ToolChain " superClass = " cdt . managedbuild . toolchain . osp . gnu . target . lib . release " > <nl> + < targetPlatform archList = " all " binaryParser = " org . eclipse . cdt . core . Cygwin_PE ; org . eclipse . cdt . core . GNU_ELF ; org . eclipse . cdt . core . ELF " id = " cdt . managedbuild . target . osp . gnu . target . platform . lib . release . 1297193301 " name = " Debug Platform " osList = " osp " superClass = " cdt . managedbuild . target . osp . gnu . target . platform . lib . release " / > <nl> + < builder buildPath = " $ { workspace_loc : / Box2D / . Target - Release } " command = " cs - make " id = " cdt . managedbuild . target . osp . gnu . target . builder . lib . release . 797941129 " keepEnvironmentInBuildfile = " false " managedBuildOn = " true " name = " bada Builder " superClass = " cdt . managedbuild . target . osp . gnu . target . builder . lib . release " / > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . target . archiver . lib . release . 1628574802 " name = " bada Archiver " superClass = " cdt . managedbuild . tool . osp . gnu . target . archiver . lib . release " / > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 " name = " bada C + + Compiler " superClass = " cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release " > <nl> + < option id = " osp . gnu . target . cpp . compiler . release . lib . option . include . paths . 862242866 " name = " Include paths ( - I ) " superClass = " osp . gnu . target . cpp . compiler . release . lib . option . include . paths " valueType = " includePath " > <nl> + < listOptionValue builtIn = " false " value = " & quot ; $ { SDKROOT } / include & quot ; " / > <nl> + < listOptionValue builtIn = " false " value = " . . / . . / . . / . . / " / > <nl> + < / option > <nl> + < inputType id = " cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " superClass = " cdt . managedbuild . tool . osp . gnu . cpp . compiler . input " / > <nl> + < / tool > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 " name = " bada C Compiler " superClass = " cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release " > <nl> + < option id = " osp . gnu . target . c . compiler . release . lib . option . include . paths . 1224339831 " name = " Include paths ( - I ) " superClass = " osp . gnu . target . c . compiler . release . lib . option . include . paths " valueType = " includePath " > <nl> + < listOptionValue builtIn = " false " value = " & quot ; $ { SDKROOT } / include & quot ; " / > <nl> + < listOptionValue builtIn = " false " value = " & quot ; $ { PROJECT_ROOT } / inc & quot ; " / > <nl> + < listOptionValue builtIn = " false " value = " & quot ; $ { PROJECT_ROOT } / . . / & quot ; " / > <nl> + < / option > <nl> + < inputType id = " cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " superClass = " cdt . managedbuild . tool . osp . gnu . c . compiler . input " / > <nl> + < / tool > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . arm . c . linker . base . 363375886 " name = " bada C Linker " superClass = " cdt . managedbuild . tool . osp . gnu . arm . c . linker . base " / > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . arm . cpp . linker . base . 1868485835 " name = " bada C + + Linker " superClass = " cdt . managedbuild . tool . osp . gnu . arm . cpp . linker . base " / > <nl> + < tool id = " cdt . managedbuild . tool . osp . gnu . target . assembler . lib . release . 257999705 " name = " bada Assembler " superClass = " cdt . managedbuild . tool . osp . gnu . target . assembler . lib . release " > <nl> + < inputType id = " cdt . managedbuild . tool . osp . gnu . assembler . input . 385931781 " superClass = " cdt . managedbuild . tool . osp . gnu . assembler . input " / > <nl> + < / tool > <nl> + < / toolChain > <nl> + < / folderInfo > <nl> + < sourceEntries > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Collision " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Common " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Dynamics " / > <nl> + < entry flags = " VALUE_WORKSPACE_PATH " kind = " sourcePath " name = " Rope " / > <nl> + < / sourceEntries > <nl> + < / configuration > <nl> + < / storageModule > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . externalSettings " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . core . language . mapping " / > <nl> + < storageModule moduleId = " org . eclipse . cdt . internal . ui . text . commentOwnerProjectMappings " / > <nl> + < storageModule moduleId = " scannerConfiguration " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . release . 1344319780 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 706480954 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 ; cdt . managedbuild . config . osp . gnu . target . lib . release . 1600037858 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . release . 1329405292 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 2008297812 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . cpp . compiler . lib . simul . 1039656652 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 1121778972 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 ; cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 . ; cdt . managedbuild . tool . osp . gnu . target . c . compiler . lib . debug . 1246338128 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 1161152344 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 ; cdt . managedbuild . config . osp . gnu . target . lib . debug . 750859454 . ; cdt . managedbuild . tool . osp . gnu . target . cpp . compiler . lib . debug . 2067171510 ; cdt . managedbuild . tool . osp . gnu . cpp . compiler . input . 366980368 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < scannerConfigBuildInfo instanceId = " cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 ; cdt . managedbuild . config . osp . gnu . arm . lib . simul . 2000763399 . ; cdt . managedbuild . tool . osp . gnu . simul . c . compiler . lib . simul . 170099126 ; cdt . managedbuild . tool . osp . gnu . c . compiler . input . 448258914 " > <nl> + < autodiscovery enabled = " true " problemReportingEnabled = " true " selectedProfileId = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " / > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . make . core . GCCStandardMakePerFileProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " makefileGenerator " > <nl> + < runAction arguments = " - f $ { project_name } _scd . mk " command = " make " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / $ { specs_file } " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . cpp " command = " g + + " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - E - P - v - dD $ { plugin_state_location } / specs . c " command = " gcc " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfile " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / $ { specs_file } & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileCPP " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' g + + - E - P - v - dD & quot ; $ { plugin_state_location } / specs . cpp & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < profile id = " org . eclipse . cdt . managedbuilder . core . GCCWinManagedMakePerProjectProfileC " > <nl> + < buildOutputProvider > <nl> + < openAction enabled = " true " filePath = " " / > <nl> + < parser enabled = " true " / > <nl> + < / buildOutputProvider > <nl> + < scannerInfoProvider id = " specsFile " > <nl> + < runAction arguments = " - c ' gcc - E - P - v - dD & quot ; $ { plugin_state_location } / specs . c & quot ; ' " command = " sh " useDefault = " true " / > <nl> + < parser enabled = " true " / > <nl> + < / scannerInfoProvider > <nl> + < / profile > <nl> + < / scannerConfigBuildInfo > <nl> + < / storageModule > <nl> < / cconfiguration > <nl> < / storageModule > <nl> < storageModule moduleId = " cdtBuildSystem " version = " 4 . 0 . 0 " > <nl> mmm a / Box2D / proj . bada / sdk2 . 0 / . project <nl> ppp b / Box2D / proj . bada / sdk2 . 0 / . project <nl> <nl> < type > 2 < / type > <nl> < locationURI > cocos2dx_root / Box2D / Dynamics < / locationURI > <nl> < / link > <nl> + < link > <nl> + < name > Rope < / name > <nl> + < type > 2 < / type > <nl> + < locationURI > cocos2dx_root / Box2D / Rope < / locationURI > <nl> + < / link > <nl> < / linkedResources > <nl> < variableList > <nl> < variable > <nl> | issue : test box2d2 . 2 . 1 on bada | cocos2d/cocos2d-x | 9586f13386d1b80aa1bacb9493b0e5ebd3c44fbf | 2011-11-28T11:03:10Z |
new file mode 100644 <nl> index 00000000000 . . b499f53aea8 <nl> mmm / dev / null <nl> ppp b / test / cctest / interpreter / bytecode_expectations / TemplateLiterals . golden <nl> <nl> + # <nl> + # Autogenerated by generate - bytecode - expectations . <nl> + # <nl> + <nl> + mmm <nl> + wrap : yes <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + return ` $ { a } $ { b } string ` ; <nl> + " <nl> + frame size : 3 <nl> + parameter count : 1 <nl> + bytecode array length : 30 <nl> + bytecodes : [ <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 56 S > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 70 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 70 E > * / B ( Add ) , R ( 2 ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 0 ) , <nl> + / * 70 E > * / B ( Add ) , R ( 2 ) , U8 ( 1 ) , <nl> + / * 80 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " string " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + return ` string $ { a } $ { b } ` ; <nl> + " <nl> + frame size : 3 <nl> + parameter count : 1 <nl> + bytecode array length : 30 <nl> + bytecodes : [ <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 56 S > * / B ( LdaConstant ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 72 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + / * 72 E > * / B ( Add ) , R ( 2 ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 76 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 76 E > * / B ( Add ) , R ( 2 ) , U8 ( 1 ) , <nl> + / * 80 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " string " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + return ` $ { a } string $ { b } ` ; <nl> + " <nl> + frame size : 3 <nl> + parameter count : 1 <nl> + bytecode array length : 30 <nl> + bytecodes : [ <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 56 S > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 0 ) , <nl> + / * 66 E > * / B ( Add ) , R ( 2 ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 76 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 76 E > * / B ( Add ) , R ( 2 ) , U8 ( 1 ) , <nl> + / * 80 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " string " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + return ` foo $ { a } bar $ { b } baz $ { 1 } ` ; <nl> + " <nl> + frame size : 4 <nl> + parameter count : 1 <nl> + bytecode array length : 57 <nl> + bytecodes : [ <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 56 S > * / B ( LdaConstant ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 69 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + / * 69 E > * / B ( Add ) , R ( 2 ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 1 ) , <nl> + / * 69 E > * / B ( Add ) , R ( 2 ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 76 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 76 E > * / B ( Add ) , R ( 2 ) , U8 ( 2 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 2 ) , <nl> + / * 76 E > * / B ( Add ) , R ( 2 ) , U8 ( 3 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 3 ) , <nl> + B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 3 ) , U8 ( 1 ) , <nl> + / * 83 E > * / B ( Add ) , R ( 2 ) , U8 ( 4 ) , <nl> + / * 87 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " foo " ] , <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " bar " ] , <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " baz " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + return ` $ { a } string ` + ` string $ { b } ` ; <nl> + " <nl> + frame size : 4 <nl> + parameter count : 1 <nl> + bytecode array length : 37 <nl> + bytecodes : [ <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 56 S > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 0 ) , <nl> + / * 66 E > * / B ( Add ) , R ( 2 ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + B ( LdaConstant ) , U8 ( 0 ) , <nl> + B ( Star ) , R ( 3 ) , <nl> + / * 87 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 87 E > * / B ( Add ) , R ( 3 ) , U8 ( 1 ) , <nl> + / * 76 E > * / B ( Add ) , R ( 2 ) , U8 ( 2 ) , <nl> + / * 91 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " string " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> + mmm <nl> + snippet : " <nl> + var a = 1 ; <nl> + var b = 2 ; <nl> + function foo ( a , b ) { } ; <nl> + return ` string $ { foo ( a , b ) } $ { a } $ { b } ` ; <nl> + " <nl> + frame size : 5 <nl> + parameter count : 1 <nl> + bytecode array length : 52 <nl> + bytecodes : [ <nl> + B ( CreateClosure ) , U8 ( 0 ) , U8 ( 0 ) , U8 ( 2 ) , <nl> + B ( Star ) , R ( 2 ) , <nl> + / * 30 E > * / B ( StackCheck ) , <nl> + / * 42 S > * / B ( LdaSmi ) , I8 ( 1 ) , <nl> + B ( Star ) , R ( 0 ) , <nl> + / * 53 S > * / B ( LdaSmi ) , I8 ( 2 ) , <nl> + B ( Star ) , R ( 1 ) , <nl> + / * 80 S > * / B ( LdaConstant ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 3 ) , <nl> + / * 96 E > * / B ( CallUndefinedReceiver2 ) , R ( 2 ) , R ( 0 ) , R ( 1 ) , U8 ( 1 ) , <nl> + B ( Star ) , R ( 4 ) , <nl> + B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 4 ) , U8 ( 1 ) , <nl> + / * 96 E > * / B ( Add ) , R ( 3 ) , U8 ( 3 ) , <nl> + B ( Star ) , R ( 3 ) , <nl> + / * 108 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 0 ) , U8 ( 1 ) , <nl> + / * 108 E > * / B ( Add ) , R ( 3 ) , U8 ( 4 ) , <nl> + B ( Star ) , R ( 3 ) , <nl> + / * 112 E > * / B ( InvokeIntrinsic ) , U8 ( Runtime : : k_ToString ) , R ( 1 ) , U8 ( 1 ) , <nl> + / * 112 E > * / B ( Add ) , R ( 3 ) , U8 ( 5 ) , <nl> + / * 116 S > * / B ( Return ) , <nl> + ] <nl> + constant pool : [ <nl> + SHARED_FUNCTION_INFO_TYPE , <nl> + ONE_BYTE_INTERNALIZED_STRING_TYPE [ " string " ] , <nl> + ] <nl> + handlers : [ <nl> + ] <nl> + <nl> mmm a / test / cctest / interpreter / test - bytecode - generator . cc <nl> ppp b / test / cctest / interpreter / test - bytecode - generator . cc <nl> TEST ( StringConcat ) { <nl> LoadGolden ( " StringConcat . golden " ) ) ) ; <nl> } <nl> <nl> + TEST ( TemplateLiterals ) { <nl> + InitializedIgnitionHandleScope scope ; <nl> + BytecodeExpectationsPrinter printer ( CcTest : : isolate ( ) ) ; <nl> + <nl> + const char * snippets [ ] = { <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " return ` $ { a } $ { b } string ` ; \ n " , <nl> + <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " return ` string $ { a } $ { b } ` ; \ n " , <nl> + <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " return ` $ { a } string $ { b } ` ; \ n " , <nl> + <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " return ` foo $ { a } bar $ { b } baz $ { 1 } ` ; \ n " , <nl> + <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " return ` $ { a } string ` + ` string $ { b } ` ; \ n " , <nl> + <nl> + " var a = 1 ; \ n " <nl> + " var b = 2 ; \ n " <nl> + " function foo ( a , b ) { } ; \ n " <nl> + " return ` string $ { foo ( a , b ) } $ { a } $ { b } ` ; \ n " , <nl> + } ; <nl> + <nl> + CHECK ( CompareTexts ( BuildActual ( printer , snippets ) , <nl> + LoadGolden ( " TemplateLiterals . golden " ) ) ) ; <nl> + } <nl> + <nl> # undef XSTR <nl> # undef STR <nl> # undef UNIQUE_VAR <nl> | [ cctest ] add bytecode generator tests for template literals | v8/v8 | 470db43c7bd67bf9a87991835dbac5163fa4bc69 | 2018-03-07T15:17:32Z |
mmm a / docs / source / torch . rst <nl> ppp b / docs / source / torch . rst <nl> Pointwise Ops <nl> . . autofunction : : cosh <nl> . . autofunction : : div <nl> . . autofunction : : exp <nl> + . . autofunction : : floor <nl> + . . autofunction : : fmod <nl> + . . autofunction : : frac <nl> . . autofunction : : lerp <nl> . . autofunction : : log <nl> . . autofunction : : log1p <nl> Comparison Ops <nl> ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ <nl> . . autofunction : : eq <nl> . . autofunction : : equal <nl> - . . autofunction : : floor <nl> - . . autofunction : : fmod <nl> - . . autofunction : : frac <nl> . . autofunction : : ge <nl> . . autofunction : : gt <nl> . . autofunction : : kthvalue <nl> mmm a / torch / nn / modules / module . py <nl> ppp b / torch / nn / modules / module . py <nl> class Module ( object ) : <nl> Modules can also contain other Modules , allowing to nest them in <nl> a tree structure . You can assign the submodules as regular attributes : : <nl> <nl> + import torch . nn as nn <nl> + import torch . nn . functional as F <nl> + <nl> class Model ( nn . Module ) : <nl> def __init__ ( self ) : <nl> super ( Net , self ) . __init__ ( ) <nl> | Minor fixes to docs | pytorch/pytorch | 8d9f6c2583b6d7af19c15394dbbcfc41ee5a696e | 2017-01-17T15:19:14Z |
mmm a / tools / editor / script_editor_debugger . cpp <nl> ppp b / tools / editor / script_editor_debugger . cpp <nl> void ScriptEditorDebugger : : _notification ( int p_what ) { <nl> if ( ! connection - > is_connected ( ) ) { <nl> stop ( ) ; <nl> editor - > notify_child_process_exited ( ) ; / / somehow , exited <nl> - msgdialog - > set_text ( " Process being debugged exited . " ) ; <nl> - msgdialog - > popup_centered ( Size2 ( 250 , 100 ) ) ; <nl> break ; <nl> } ; <nl> <nl> | Fix process being debugged has exited pop up | godotengine/godot | f1b9994cbcb682defb745e2d69503984324416be | 2015-01-08T16:14:56Z |
mmm a / Changelog <nl> ppp b / Changelog <nl> <nl> * Unknown - Christophe Dumez < chris @ qbittorrent . org > - v0 . 9 . 0 <nl> - FEATURE : Based on libtorrent v0 . 12 <nl> - - FEATURE : Based on Qt 4 . 2 <nl> - - FEATURE : Brand new trayicon from Qt 4 . 2 <nl> + - FEATURE : Based on Qt4 . 2 <nl> + - FEATURE : Brand new trayicon from Qt4 . 2 <nl> - FEATURE : Support uTorrent Peer Exchange ( PeX - exchanges peers between clients ) <nl> - FEATURE : Added a menu action to visit qBittorrent website <nl> - FEATURE : Added a menu action to report a bug in qBittorrent <nl> <nl> - FEATURE : Number of complete / incomplete sources are now displayed in download list for each torrent <nl> - FEATURE : Implemented close to systray <nl> - FEATURE : Added Autocompletion to search engine <nl> - - BUGFIX : Two torrents can now have the same name although they are different <nl> + - FEATURE : Splitted BT & GUI parts ( huge code rewriting & optimization ) <nl> + - BUGFIX : Two torrents can now have the same name although they are different ( use their hash ) <nl> - BUGFIX : Fixed download from url that would fail sometimes <nl> - BUGFIX : Save directory was reset to default when filtering files in torrent <nl> - BUGFIX : Force a refresh of download list when the window is shown ( avoid delay ) <nl> - BUGFIX : Fixed deletion from hard drive ( failed for non - empty folders ) <nl> - BUGFIX : qBittorrent now identifies its version correctly on the network <nl> - BUGFIX : Preventing GUI from freezing when deleting a download permanently <nl> - - BUGFIX : Fix directory scanning ( stop trying to download the same files several times ) <nl> - - COSMETIC : Replaced OSD messages by systray messages <nl> + - BUGFIX : Fixed directory scanning ( stop trying to download the same files several times ) <nl> + - BUGFIX : Fixed bad loading of scan dir in option ( widgets still disabled ) <nl> + - COSMETIC : Replaced OSD messages by Qt4 . 2 systray messages <nl> <nl> * Tue Nov 28 2006 - Christophe Dumez < chris @ qbittorrent . org > - v0 . 8 . 0 <nl> - FEATURE : Added a torrent addition dialog <nl> mmm a / TODO <nl> ppp b / TODO <nl> <nl> - Allow to edit the trackers for a torrent <nl> <nl> / / In v0 . 9 . 0 <nl> - - Splitting torrent part from GUI ( & remove handles hashtable to save up some memory ) <nl> + - Splitting torrent part from GUI ( bug squashing + cleanup ) <nl> - Create options object only when necessary to save up some memory <nl> - Wait for libtorrent v0 . 12 official release <nl> + - report this to libtorrent : <nl> + " qbittorrent : kademlia / rpc_manager . cpp : 327 : void libtorrent : : dht : : rpc_manager : : invoke ( int , asio : : ip : : basic_endpoint < asio : : ip : : udp > , boost : : shared_ptr < libtorrent : : dht : : observer > ) : l ' assertion « false » a échoué . " <nl> + Info : current TOP output : <nl> + 25461 chris 15 0 106m 23m 14m S 0 . 7 2 . 4 0 : 01 . 60 qbittorrent <nl> mmm a / src / GUI . cpp <nl> ppp b / src / GUI . cpp <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> setupUi ( this ) ; <nl> setWindowTitle ( tr ( " qBittorrent " ) + VERSION ) ; <nl> readSettings ( ) ; <nl> - <nl> - s = new session ( fingerprint ( " qB " , VERSION_MAJOR , VERSION_MINOR , VERSION_BUGFIX , 0 ) ) ; <nl> / / Setting icons <nl> this - > setWindowIcon ( QIcon ( QString : : fromUtf8 ( " : / Icons / qbittorrent32 . png " ) ) ) ; <nl> actionOpen - > setIcon ( QIcon ( QString : : fromUtf8 ( " : / Icons / skin / open . png " ) ) ) ; <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> if ( ! loadColWidthDLList ( ) ) { <nl> downloadList - > header ( ) - > resizeSection ( 0 , 200 ) ; <nl> } <nl> + nbTorrents = 0 ; <nl> + tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( 0 ) " ) ; <nl> + connect ( & BTSession , SIGNAL ( addedTorrent ( const QString & , torrent_handle & , bool ) ) , this , SLOT ( torrentAdded ( const QString & , torrent_handle & , bool ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( duplicateTorrent ( const QString & ) ) , this , SLOT ( torrentDuplicate ( const QString & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( invalidTorrent ( const QString & ) ) , this , SLOT ( torrentCorrupted ( const QString & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( finishedTorrent ( torrent_handle & ) ) , this , SLOT ( finishedTorrent ( torrent_handle & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( fullDiskError ( torrent_handle & ) ) , this , SLOT ( fullDiskError ( torrent_handle & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( portListeningFailure ( ) ) , this , SLOT ( portListeningFailure ( ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( trackerError ( const QString & , const QString & , const QString & ) ) , this , SLOT ( trackerError ( const QString & , const QString & , const QString & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( trackerAuthenticationRequired ( torrent_handle & ) ) , this , SLOT ( trackerAuthenticationRequired ( torrent_handle & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( scanDirFoundTorrents ( const QStringList & ) ) , this , SLOT ( processScannedFiles ( const QStringList & ) ) ) ; <nl> + connect ( & BTSession , SIGNAL ( newDownloadedTorrent ( const QString & , const QString & ) ) , this , SLOT ( processDownloadedFiles ( const QString & , const QString & ) ) ) ; <nl> / / creating options <nl> options = new options_imp ( this ) ; <nl> connect ( options , SIGNAL ( status_changed ( const QString & ) ) , this , SLOT ( OptionsSaved ( const QString & ) ) ) ; <nl> - / / Scan Dir <nl> - timerScan = new QTimer ( this ) ; <nl> - connect ( timerScan , SIGNAL ( timeout ( ) ) , this , SLOT ( scanDirectory ( ) ) ) ; <nl> - / / Set severity level of libtorrent session <nl> - s - > set_severity_level ( alert : : info ) ; <nl> - / / DHT ( Trackerless ) <nl> - DHTEnabled = false ; <nl> / / Configure BT session according to options <nl> configureSession ( ) ; <nl> - s - > add_extension ( & create_metadata_plugin ) ; <nl> - / / download thread <nl> - downloader = new downloadThread ( this ) ; <nl> - connect ( downloader , SIGNAL ( downloadFinished ( QString , QString , int , QString ) ) , this , SLOT ( processDownloadedFile ( QString , QString , int , QString ) ) ) ; <nl> - nbTorrents = 0 ; <nl> - tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( 0 ) " ) ; <nl> - / / Resume unfinished torrent downloads <nl> - resumeUnfinished ( ) ; <nl> + / / Resume unfinished torrents <nl> + BTSession . resumeUnfinishedTorrents ( ) ; <nl> / / Add torrent given on command line <nl> processParams ( torrentCmdLine ) ; <nl> / / Make download list header clickable for sorting <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> connect ( actionDelete , SIGNAL ( triggered ( ) ) , this , SLOT ( deleteSelection ( ) ) ) ; <nl> connect ( actionOptions , SIGNAL ( triggered ( ) ) , this , SLOT ( showOptions ( ) ) ) ; <nl> connect ( actionDownload_from_URL , SIGNAL ( triggered ( ) ) , this , SLOT ( askForTorrentUrl ( ) ) ) ; <nl> - connect ( actionPause_All , SIGNAL ( triggered ( ) ) , this , SLOT ( pauseAll ( ) ) ) ; <nl> - connect ( actionStart_All , SIGNAL ( triggered ( ) ) , this , SLOT ( startAll ( ) ) ) ; <nl> connect ( actionPause , SIGNAL ( triggered ( ) ) , this , SLOT ( pauseSelection ( ) ) ) ; <nl> connect ( actionTorrent_Properties , SIGNAL ( triggered ( ) ) , this , SLOT ( propertiesSelection ( ) ) ) ; <nl> connect ( actionStart , SIGNAL ( triggered ( ) ) , this , SLOT ( startSelection ( ) ) ) ; <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> std : : cerr < < " Couldn ' t create socket , single instance mode won ' t work . . . \ n " ; <nl> } <nl> connect ( & tcpServer , SIGNAL ( newConnection ( ) ) , this , SLOT ( acceptConnection ( ) ) ) ; <nl> - / / connect ( tcpServer , SIGNAL ( bytesWritten ( qint64 ) ) , this , SLOT ( readParamsOnSocket ( qint64 ) ) ) ; <nl> / / Start connection checking timer <nl> checkConnect = new QTimer ( this ) ; <nl> connect ( checkConnect , SIGNAL ( timeout ( ) ) , this , SLOT ( checkConnectionStatus ( ) ) ) ; <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> searchCompleter - > setCaseSensitivity ( Qt : : CaseInsensitive ) ; <nl> search_pattern - > setCompleter ( searchCompleter ) ; <nl> <nl> - <nl> / / Boolean initialization <nl> search_stopped = false ; <nl> / / Connect signals to slots ( search part ) <nl> GUI : : GUI ( QWidget * parent , QStringList torrentCmdLine ) : QMainWindow ( parent ) { <nl> connect ( meganova , SIGNAL ( stateChanged ( int ) ) , this , SLOT ( saveCheckedSearchEngines ( int ) ) ) ; <nl> / / Update nova . py search plugin if necessary <nl> updateNova ( ) ; <nl> - / / Supported preview extensions <nl> - / / XXX : might be incomplete <nl> - supported_preview_extensions < < " AVI " < < " DIVX " < < " MPG " < < " MPEG " < < " MP3 " < < " OGG " < < " WMV " < < " WMA " < < " RMV " < < " RMVB " < < " ASF " < < " MOV " < < " WAV " < < " MP2 " < < " SWF " < < " AC3 " ; <nl> previewProcess = new QProcess ( this ) ; <nl> connect ( previewProcess , SIGNAL ( finished ( int , QProcess : : ExitStatus ) ) , this , SLOT ( cleanTempPreviewFile ( int , QProcess : : ExitStatus ) ) ) ; <nl> / / Accept drag ' n drops <nl> GUI : : ~ GUI ( ) { <nl> qDebug ( " GUI destruction " ) ; <nl> delete options ; <nl> delete checkConnect ; <nl> - delete timerScan ; <nl> delete searchProcess ; <nl> delete refresher ; <nl> delete myTrayIcon ; <nl> GUI : : ~ GUI ( ) { <nl> delete SearchListModel ; <nl> delete SearchDelegate ; <nl> delete previewProcess ; <nl> - delete downloader ; <nl> delete connecStatusLblIcon ; <nl> - delete s ; <nl> } <nl> <nl> void GUI : : openqBTHomepage ( ) { <nl> void GUI : : readParamsOnSocket ( ) { <nl> } <nl> } <nl> <nl> + / / Toggle paused state of selected torrent <nl> void GUI : : togglePausedState ( const QModelIndex & index ) { <nl> int row = index . row ( ) ; <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( row , HASH ) ) . toString ( ) ; <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> - if ( h . is_paused ( ) ) { <nl> - startSelection ( ) ; <nl> + if ( BTSession . isPaused ( fileHash ) ) { <nl> + BTSession . resumeTorrent ( fileHash ) ; <nl> } else { <nl> - pauseSelection ( ) ; <nl> + BTSession . pauseTorrent ( fileHash ) ; <nl> } <nl> } <nl> <nl> void GUI : : previewFileSelection ( ) { <nl> if ( index . column ( ) = = NAME ) { <nl> / / Get the file name <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( index . row ( ) , HASH ) ) . toString ( ) ; <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> + torrent_handle h = BTSession . getTorrentHandle ( fileHash ) ; <nl> previewSelection = new previewSelect ( this , h ) ; <nl> break ; <nl> } <nl> void GUI : : displayDLListMenu ( const QPoint & pos ) { <nl> / / Get the file name <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( index . row ( ) , HASH ) ) . toString ( ) ; <nl> / / Get handle and pause the torrent <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> + torrent_handle h = BTSession . getTorrentHandle ( fileHash ) ; <nl> if ( h . is_paused ( ) ) { <nl> myDLLlistMenu . addAction ( actionStart ) ; <nl> } else { <nl> void GUI : : displayDLListMenu ( const QPoint & pos ) { <nl> myDLLlistMenu . addAction ( actionDelete ) ; <nl> myDLLlistMenu . addAction ( actionDelete_Permanently ) ; <nl> myDLLlistMenu . addAction ( actionTorrent_Properties ) ; <nl> - if ( ! options - > getPreviewProgram ( ) . isEmpty ( ) & & isFilePreviewPossible ( h ) & & selectedIndexes . size ( ) < = DLListModel - > columnCount ( ) ) { <nl> + if ( ! options - > getPreviewProgram ( ) . isEmpty ( ) & & BTSession . isFilePreviewPossible ( fileHash ) & & selectedIndexes . size ( ) < = DLListModel - > columnCount ( ) ) { <nl> myDLLlistMenu . addAction ( actionPreview_file ) ; <nl> } <nl> break ; <nl> void GUI : : displayInfoBarMenu ( const QPoint & pos ) { <nl> / / update download list accordingly <nl> void GUI : : updateDlList ( bool force ) { <nl> qDebug ( " Updating download list " ) ; <nl> - torrent_handle h ; <nl> char tmp [ MAX_CHAR_TMP ] ; <nl> char tmp2 [ MAX_CHAR_TMP ] ; <nl> / / update global informations <nl> - session_status sessionStatus = s - > status ( ) ; <nl> - snprintf ( tmp , MAX_CHAR_TMP , " % . 1f " , sessionStatus . payload_upload_rate / 1024 . ) ; <nl> - snprintf ( tmp2 , MAX_CHAR_TMP , " % . 1f " , sessionStatus . payload_download_rate / 1024 . ) ; <nl> + snprintf ( tmp , MAX_CHAR_TMP , " % . 1f " , BTSession . getPayloadUploadRate ( ) / 1024 . ) ; <nl> + snprintf ( tmp2 , MAX_CHAR_TMP , " % . 1f " , BTSession . getPayloadDownloadRate ( ) / 1024 . ) ; <nl> myTrayIcon - > setToolTip ( tr ( " < b > qBittorrent < / b > < br > DL Speed : " ) + QString ( tmp2 ) + tr ( " KiB / s " ) + " < br > " + tr ( " UP Speed : " ) + QString ( tmp ) + tr ( " KiB / s " ) ) ; / / tray icon <nl> if ( ! force & & ( isMinimized ( ) | | isHidden ( ) | | tabs - > currentIndex ( ) ) ) { <nl> / / No need to update if qBittorrent DL list is hidden <nl> void GUI : : updateDlList ( bool force ) { <nl> LCD_UpSpeed - > display ( tmp ) ; / / UP LCD <nl> LCD_DownSpeed - > display ( tmp2 ) ; / / DL LCD <nl> / / browse handles <nl> - foreach ( h , handles . values ( ) ) { <nl> + std : : vector < torrent_handle > handles = BTSession . getTorrentHandles ( ) ; <nl> + for ( unsigned int i = 0 ; i < handles . size ( ) ; + + i ) { <nl> + torrent_handle h = handles [ i ] ; <nl> try { <nl> torrent_status torrentStatus = h . status ( ) ; <nl> QString fileHash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> void GUI : : updateDlList ( bool force ) { <nl> qDebug ( " Updated Download list " ) ; <nl> } <nl> <nl> - bool GUI : : isFilePreviewPossible ( const torrent_handle & h ) const { <nl> - / / See if there are supported files in the torrent <nl> - torrent_info torrentInfo = h . get_torrent_info ( ) ; <nl> - for ( int i = 0 ; i < torrentInfo . num_files ( ) ; + + i ) { <nl> - QString fileName = QString ( torrentInfo . file_at ( i ) . path . leaf ( ) . c_str ( ) ) ; <nl> - QString extension = fileName . split ( ' . ' ) . last ( ) . toUpper ( ) ; <nl> - if ( supported_preview_extensions . indexOf ( extension ) > = 0 ) { <nl> - return true ; <nl> - } <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> void GUI : : sortDownloadListFloat ( int index , Qt : : SortOrder sortOrder ) { <nl> QList < QPair < int , double > > lines ; <nl> / / insertion sorting <nl> QPoint GUI : : screenCenter ( ) { <nl> return QPoint ( ( desk . width ( ) - this - > frameGeometry ( ) . width ( ) ) / 2 , ( desk . height ( ) - this - > frameGeometry ( ) . height ( ) ) / 2 ) ; <nl> } <nl> <nl> - bool GUI : : loadFilteredFiles ( torrent_handle & h ) { <nl> - bool has_filtered_files = false ; <nl> - torrent_info torrentInfo = h . get_torrent_info ( ) ; <nl> - QString fileHash = QString ( misc : : toString ( torrentInfo . info_hash ( ) ) . c_str ( ) ) ; <nl> - QFile pieces_file ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . pieces " ) ; <nl> - / / Read saved file <nl> - if ( ! pieces_file . open ( QIODevice : : ReadOnly | QIODevice : : Text ) ) { <nl> - return has_filtered_files ; <nl> - } <nl> - QByteArray pieces_selection = pieces_file . readAll ( ) ; <nl> - pieces_file . close ( ) ; <nl> - QList < QByteArray > pieces_selection_list = pieces_selection . split ( ' \ n ' ) ; <nl> - if ( pieces_selection_list . size ( ) ! = torrentInfo . num_files ( ) + 1 ) { <nl> - std : : cerr < < " Error : Corrupted pieces file \ n " ; <nl> - return has_filtered_files ; <nl> - } <nl> - std : : vector < bool > selectionBitmask ; <nl> - for ( int i = 0 ; i < torrentInfo . num_files ( ) ; + + i ) { <nl> - int isFiltered = pieces_selection_list . at ( i ) . toInt ( ) ; <nl> - if ( isFiltered < 0 | | isFiltered > 1 ) { <nl> - isFiltered = 0 ; <nl> - } <nl> - selectionBitmask . push_back ( isFiltered ) ; <nl> - / / h . filter_piece ( i , isFiltered ) ; <nl> - if ( isFiltered ) { <nl> - has_filtered_files = true ; <nl> - } <nl> - } <nl> - h . filter_files ( selectionBitmask ) ; <nl> - return has_filtered_files ; <nl> - } <nl> - <nl> - bool GUI : : hasFilteredFiles ( const QString & fileHash ) { <nl> - QFile pieces_file ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . pieces " ) ; <nl> - / / Read saved file <nl> - if ( ! pieces_file . open ( QIODevice : : ReadOnly | QIODevice : : Text ) ) { <nl> - return false ; <nl> - } <nl> - QByteArray pieces_selection = pieces_file . readAll ( ) ; <nl> - pieces_file . close ( ) ; <nl> - QList < QByteArray > pieces_selection_list = pieces_selection . split ( ' \ n ' ) ; <nl> - for ( int i = 0 ; i < pieces_selection_list . size ( ) - 1 ; + + i ) { <nl> - int isFiltered = pieces_selection_list . at ( i ) . toInt ( ) ; <nl> - if ( isFiltered < 0 | | isFiltered > 1 ) { <nl> - isFiltered = 0 ; <nl> - } <nl> - if ( isFiltered ) { <nl> - return true ; <nl> - } <nl> - } <nl> - return false ; <nl> - } <nl> - <nl> / / Save last checked search engines to a file <nl> void GUI : : saveCheckedSearchEngines ( int ) const { <nl> QSettings settings ( " qBittorrent " , " qBittorrent " ) ; <nl> void GUI : : closeEvent ( QCloseEvent * e ) { <nl> return ; <nl> } <nl> } <nl> - / / TODO : Clean finished torrents on exit <nl> - / / if ( options - > getClearFinishedOnExit ( ) ) { <nl> - / / torrentBackup . remove ( fileHash + " . torrent " ) ; <nl> - / / torrentBackup . remove ( fileHash + " . fastresume " ) ; <nl> - / / torrentBackup . remove ( fileHash + " . paused " ) ; <nl> - / / torrentBackup . remove ( fileHash + " . incremental " ) ; <nl> - / / torrentBackup . remove ( fileHash + " . pieces " ) ; <nl> - / / torrentBackup . remove ( fileHash + " . savepath " ) ; <nl> - / / if ( isScanningDir ) { <nl> - / / QFile : : remove ( scan_dir + fileHash + " . torrent " ) ; <nl> - / / } <nl> - / / } <nl> + / / Clean finished torrents on exit if asked for <nl> + if ( options - > getClearFinishedOnExit ( ) ) { <nl> + torrent_handle h ; <nl> + / / XXX : Probably move this to the bittorrent part <nl> + QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> + foreach ( h , BTSession . getFinishedTorrentHandles ( ) ) { <nl> + QString fileHash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> + torrentBackup . remove ( fileHash + " . torrent " ) ; <nl> + torrentBackup . remove ( fileHash + " . fastresume " ) ; <nl> + torrentBackup . remove ( fileHash + " . paused " ) ; <nl> + torrentBackup . remove ( fileHash + " . incremental " ) ; <nl> + torrentBackup . remove ( fileHash + " . pieces " ) ; <nl> + torrentBackup . remove ( fileHash + " . savepath " ) ; <nl> + } <nl> + } <nl> / / save the searchHistory for later uses <nl> saveSearchHistory ( ) ; <nl> / / Save DHT entry <nl> - if ( DHTEnabled ) { <nl> - try { <nl> - entry dht_state = s - > dht_state ( ) ; <nl> - boost : : filesystem : : ofstream out ( ( const char * ) ( misc : : qBittorrentPath ( ) + QString ( " dht_state " ) ) . toUtf8 ( ) , std : : ios_base : : binary ) ; <nl> - out . unsetf ( std : : ios_base : : skipws ) ; <nl> - bencode ( std : : ostream_iterator < char > ( out ) , dht_state ) ; <nl> - } catch ( std : : exception & e ) { <nl> - std : : cerr < < e . what ( ) < < " \ n " ; <nl> - } <nl> - } <nl> + BTSession . saveDHTEntry ( ) ; <nl> / / Save window size , columns size <nl> writeSettings ( ) ; <nl> saveColWidthDLList ( ) ; <nl> saveColWidthSearchList ( ) ; <nl> / / Create fast resume data <nl> - saveFastResumeData ( ) ; <nl> + BTSession . saveFastResumeData ( ) ; <nl> / / Hide tray icon <nl> myTrayIcon - > hide ( ) ; <nl> / / Accept exit <nl> void GUI : : dropEvent ( QDropEvent * event ) { <nl> foreach ( file , files ) { <nl> if ( options - > useAdditionDialog ( ) ) { <nl> torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> - connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , this , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> + connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , & BTSession , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> dialog - > showLoad ( file . trimmed ( ) . replace ( " file : / / " , " " ) ) ; <nl> } else { <nl> - addTorrent ( file . trimmed ( ) . replace ( " file : / / " , " " ) ) ; <nl> + BTSession . addTorrent ( file . trimmed ( ) . replace ( " file : / / " , " " ) ) ; <nl> } <nl> } <nl> } <nl> void GUI : : askForTorrents ( ) { <nl> for ( int i = 0 ; i < pathsList . size ( ) ; + + i ) { <nl> if ( options - > useAdditionDialog ( ) ) { <nl> torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> - connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , this , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> + connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , & BTSession , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> dialog - > showLoad ( pathsList . at ( i ) ) ; <nl> } else { <nl> - addTorrent ( pathsList . at ( i ) ) ; <nl> + BTSession . addTorrent ( pathsList . at ( i ) ) ; <nl> } <nl> } <nl> / / Save last dir to remember it <nl> void GUI : : askForTorrents ( ) { <nl> } <nl> } <nl> <nl> - / / Scan the first level of the directory for torrent files <nl> - / / and add them to download list <nl> - void GUI : : scanDirectory ( ) { <nl> - QString dirText = options - > getScanDir ( ) ; <nl> - QString file ; <nl> - if ( ! dirText . isNull ( ) ) { <nl> - QStringList to_add ; <nl> - QDir dir ( dirText ) ; <nl> - QStringList files = dir . entryList ( QDir : : Files , QDir : : Unsorted ) ; <nl> - foreach ( file , files ) { <nl> - QString fullPath = dir . path ( ) + QDir : : separator ( ) + file ; <nl> - if ( fullPath . endsWith ( " . torrent " ) ) { <nl> - to_add < < fullPath ; <nl> - } <nl> - } <nl> - foreach ( file , to_add ) { <nl> - if ( options - > useAdditionDialog ( ) ) { <nl> - torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> - connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , this , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> - connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> - dialog - > showLoad ( file , true ) ; <nl> - } else { <nl> - addTorrent ( file , true ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> - void GUI : : saveFastResumeData ( ) const { <nl> - qDebug ( " Saving fast resume data " ) ; <nl> - QString file ; <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - / / Checking if torrentBackup Dir exists <nl> - / / create it if it is not <nl> - if ( ! torrentBackup . exists ( ) ) { <nl> - torrentBackup . mkpath ( torrentBackup . path ( ) ) ; <nl> - } <nl> - / / Write fast resume data <nl> - foreach ( torrent_handle h , handles . values ( ) ) { <nl> - / / Pause download ( needed before fast resume writing ) <nl> - h . pause ( ) ; <nl> - / / Extracting resume data <nl> - if ( h . has_metadata ( ) ) { <nl> - QString fileHash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> - if ( QFile : : exists ( torrentBackup . path ( ) + QDir : : separator ( ) + fileHash + " . torrent " ) ) { <nl> - / / Remove old . fastresume data in case it exists <nl> - QFile : : remove ( fileHash + " . fastresume " ) ; <nl> - / / Write fast resume data <nl> - entry resumeData = h . write_resume_data ( ) ; <nl> - file = fileHash + " . fastresume " ; <nl> - boost : : filesystem : : ofstream out ( fs : : path ( ( const char * ) torrentBackup . path ( ) . toUtf8 ( ) ) / ( const char * ) file . toUtf8 ( ) , std : : ios_base : : binary ) ; <nl> - out . unsetf ( std : : ios_base : : skipws ) ; <nl> - bencode ( std : : ostream_iterator < char > ( out ) , resumeData ) ; <nl> - } <nl> - } <nl> - / / Remove torrent <nl> - s - > remove_torrent ( h ) ; <nl> - } <nl> - qDebug ( " Fast resume data saved " ) ; <nl> - } <nl> - <nl> / / delete from download list AND from hard drive <nl> void GUI : : deletePermanently ( ) { <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - QString scan_dir = options - > getScanDir ( ) ; <nl> - bool isScanningDir = ! scan_dir . isNull ( ) ; <nl> - if ( isScanningDir & & scan_dir . at ( scan_dir . length ( ) - 1 ) ! = QDir : : separator ( ) ) { <nl> - scan_dir + = QDir : : separator ( ) ; <nl> - } <nl> QModelIndexList selectedIndexes = downloadList - > selectionModel ( ) - > selectedIndexes ( ) ; <nl> if ( ! selectedIndexes . isEmpty ( ) ) { <nl> if ( QMessageBox : : question ( <nl> void GUI : : deletePermanently ( ) { <nl> foreach ( sortedIndex , sortedIndexes ) { <nl> qDebug ( " deleting row : % d , % d , col : % d " , sortedIndex . first , sortedIndex . second . row ( ) , sortedIndex . second . column ( ) ) ; <nl> / / Get the file name <nl> + QString fileName = DLListModel - > data ( DLListModel - > index ( sortedIndex . second . row ( ) , NAME ) ) . toString ( ) ; <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( sortedIndex . second . row ( ) , HASH ) ) . toString ( ) ; <nl> - QString savePath ; <nl> / / Delete item from download list <nl> DLListModel - > removeRow ( sortedIndex . first ) ; <nl> - / / Get handle and remove the torrent <nl> - QHash < QString , torrent_handle > : : iterator it = handles . find ( fileHash ) ; <nl> - if ( it ! = handles . end ( ) & & it . key ( ) = = fileHash ) { <nl> - torrent_handle h = it . value ( ) ; <nl> - QString fileName = QString ( h . name ( ) . c_str ( ) ) ; <nl> - savePath = QString : : fromUtf8 ( h . save_path ( ) . string ( ) . c_str ( ) ) ; <nl> - / / Remove torrent from handles <nl> - qDebug ( ( " There are " + misc : : toString ( handles . size ( ) ) + " items in handles " ) . c_str ( ) ) ; <nl> - handles . erase ( it ) ; <nl> - qDebug ( ( " After removing , there are still " + misc : : toString ( handles . size ( ) ) + " items in handles " ) . c_str ( ) ) ; <nl> - s - > remove_torrent ( h ) ; <nl> - / / remove it from scan dir or it will start again <nl> - if ( isScanningDir ) { <nl> - QFile : : remove ( scan_dir + fileHash + " . torrent " ) ; <nl> - } <nl> - / / Remove it from torrent backup directory <nl> - torrentBackup . remove ( fileHash + " . torrent " ) ; <nl> - torrentBackup . remove ( fileHash + " . fastresume " ) ; <nl> - torrentBackup . remove ( fileHash + " . paused " ) ; <nl> - torrentBackup . remove ( fileHash + " . incremental " ) ; <nl> - torrentBackup . remove ( fileHash + " . pieces " ) ; <nl> - torrentBackup . remove ( fileHash + " . savepath " ) ; <nl> - / / Remove from Hard drive <nl> - qDebug ( " Removing this on hard drive : % s " , qPrintable ( savePath + QDir : : separator ( ) + fileName ) ) ; <nl> - / / Deleting in a thread to avoid GUI freeze <nl> - deleteThread * deleter = new deleteThread ( savePath + QDir : : separator ( ) + fileName ) ; <nl> - deleters < < deleter ; <nl> - int i = 0 ; <nl> - while ( i < deleters . size ( ) ) { <nl> - deleter = deleters . at ( i ) ; <nl> - if ( deleter - > isFinished ( ) ) { <nl> - qDebug ( " Delete thread has finished , deleting it " ) ; <nl> - deleters . removeAt ( i ) ; <nl> - delete deleter ; <nl> - } else { <nl> - + + i ; <nl> - } <nl> - } <nl> + / / Remove the torrent <nl> + BTSession . deleteTorrent ( fileHash , true ) ; <nl> / / Update info bar <nl> setInfoBar ( " ' " + fileName + " ' " + tr ( " removed . " , " < file > removed . " ) ) ; <nl> - - nbTorrents ; <nl> tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( " + QString ( misc : : toString ( nbTorrents ) . c_str ( ) ) + " ) " ) ; <nl> - } else { <nl> - std : : cerr < < " Error : Could not find the torrent handle supposed to be removed \ n " ; <nl> - } <nl> } <nl> } <nl> } <nl> void GUI : : deletePermanently ( ) { <nl> <nl> / / delete selected items in the list <nl> void GUI : : deleteSelection ( ) { <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - QString scan_dir = options - > getScanDir ( ) ; <nl> - bool isScanningDir = ! scan_dir . isNull ( ) ; <nl> - if ( isScanningDir & & scan_dir . at ( scan_dir . length ( ) - 1 ) ! = QDir : : separator ( ) ) { <nl> - scan_dir + = QDir : : separator ( ) ; <nl> - } <nl> QModelIndexList selectedIndexes = downloadList - > selectionModel ( ) - > selectedIndexes ( ) ; <nl> if ( ! selectedIndexes . isEmpty ( ) ) { <nl> if ( QMessageBox : : question ( <nl> this , <nl> tr ( " Are you sure ? - - qBittorrent " ) , <nl> - tr ( " Are you sure you want to delete the selected item ( s ) in download list ? " ) , <nl> + tr ( " Are you sure you want to delete the selected item ( s ) in download list and in hard drive ? " ) , <nl> tr ( " & Yes " ) , tr ( " & No " ) , <nl> QString ( ) , 0 , 1 ) = = 0 ) { <nl> / / User clicked YES <nl> void GUI : : deleteSelection ( ) { <nl> foreach ( sortedIndex , sortedIndexes ) { <nl> qDebug ( " deleting row : % d , % d , col : % d " , sortedIndex . first , sortedIndex . second . row ( ) , sortedIndex . second . column ( ) ) ; <nl> / / Get the file name <nl> + QString fileName = DLListModel - > data ( DLListModel - > index ( sortedIndex . second . row ( ) , NAME ) ) . toString ( ) ; <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( sortedIndex . second . row ( ) , HASH ) ) . toString ( ) ; <nl> / / Delete item from download list <nl> DLListModel - > removeRow ( sortedIndex . first ) ; <nl> - / / Get handle and remove the torrent <nl> - QHash < QString , torrent_handle > : : iterator it = handles . find ( fileHash ) ; <nl> - if ( it ! = handles . end ( ) & & it . key ( ) = = fileHash ) { <nl> - torrent_handle h = it . value ( ) ; <nl> - QString fileName = QString ( h . name ( ) . c_str ( ) ) ; <nl> - / / Remove torrent from handles <nl> - handles . erase ( it ) ; <nl> - s - > remove_torrent ( h ) ; <nl> - / / remove it from scan dir or it will start again <nl> - if ( isScanningDir ) { <nl> - QFile : : remove ( scan_dir + fileHash + " . torrent " ) ; <nl> - } <nl> - / / Remove it from torrent backup directory <nl> - torrentBackup . remove ( fileHash + " . torrent " ) ; <nl> - torrentBackup . remove ( fileHash + " . fastresume " ) ; <nl> - torrentBackup . remove ( fileHash + " . paused " ) ; <nl> - torrentBackup . remove ( fileHash + " . incremental " ) ; <nl> - torrentBackup . remove ( fileHash + " . pieces " ) ; <nl> - torrentBackup . remove ( fileHash + " . savepath " ) ; <nl> + / / Remove the torrent <nl> + BTSession . deleteTorrent ( fileHash , false ) ; <nl> / / Update info bar <nl> setInfoBar ( " ' " + fileName + " ' " + tr ( " removed . " , " < file > removed . " ) ) ; <nl> - - nbTorrents ; <nl> tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( " + QString ( misc : : toString ( nbTorrents ) . c_str ( ) ) + " ) " ) ; <nl> - } else { <nl> - std : : cerr < < " Error : Could not find the torrent handle supposed to be removed \ n " ; <nl> - } <nl> } <nl> } <nl> } <nl> } <nl> <nl> - / / Will fast resume unfinished torrents in <nl> - / / backup directory <nl> - void GUI : : resumeUnfinished ( ) { <nl> - qDebug ( " Resuming unfinished torrents " ) ; <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - QStringList fileNames , filePaths ; <nl> - / / Scan torrentBackup directory <nl> - fileNames = torrentBackup . entryList ( ) ; <nl> - QString fileName ; <nl> - foreach ( fileName , fileNames ) { <nl> - if ( fileName . endsWith ( " . torrent " ) ) { <nl> - filePaths . append ( torrentBackup . path ( ) + QDir : : separator ( ) + fileName ) ; <nl> - } <nl> + / / Called when a torrent is added <nl> + void GUI : : torrentAdded ( const QString & path , torrent_handle & h , bool fastResume ) { <nl> + int row = DLListModel - > rowCount ( ) ; <nl> + QString hash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> + / / Adding torrent to download list <nl> + DLListModel - > insertRow ( row ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( h . name ( ) . c_str ( ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , SIZE ) , QVariant ( ( qlonglong ) h . get_torrent_info ( ) . total_size ( ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , SEEDSLEECH ) , QVariant ( " 0 / 0 " ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , HASH ) , QVariant ( hash ) ) ; <nl> + / / Pause torrent if it was paused last time <nl> + if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ) { <nl> + DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / paused . png " ) ) , Qt : : DecorationRole ) ; <nl> + setRowColor ( row , " red " ) ; <nl> + } else { <nl> + DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Connecting . . . " ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / connecting . png " ) ) , Qt : : DecorationRole ) ; <nl> + setRowColor ( row , " grey " ) ; <nl> } <nl> - / / Resume downloads <nl> - foreach ( fileName , filePaths ) { <nl> - addTorrent ( fileName ) ; <nl> + if ( ! fastResume ) { <nl> + setInfoBar ( " ' " + path + " ' " + tr ( " added to download list . " ) ) ; <nl> + } else { <nl> + setInfoBar ( " ' " + path + " ' " + tr ( " resumed . ( fast resume ) " ) ) ; <nl> } <nl> - qDebug ( " Unfinished torrents resumed " ) ; <nl> + + + nbTorrents ; <nl> + tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( " + QString ( misc : : toString ( nbTorrents ) . c_str ( ) ) + " ) " ) ; <nl> } <nl> <nl> - / / Method used to add torrents to download list <nl> - void GUI : : addTorrent ( const QString & path , bool fromScanDir , const QString & from_url ) { <nl> - torrent_handle h ; <nl> - entry resume_data ; <nl> - bool fastResume = false ; <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - QString file , dest_file , scan_dir ; <nl> + / / Called when trying to add a duplicate torrent <nl> + void GUI : : torrentDuplicate ( const QString & path ) { <nl> + setInfoBar ( " ' " + path + " ' " + tr ( " already in download list . " , " < file > already in download list . " ) ) ; <nl> + } <nl> <nl> - / / Checking if BT_backup Dir exists <nl> - / / create it if it is not <nl> - if ( ! torrentBackup . exists ( ) ) { <nl> - if ( ! torrentBackup . mkpath ( torrentBackup . path ( ) ) ) { <nl> - setInfoBar ( tr ( " Couldn ' t create the directory : " ) + " ' " + torrentBackup . path ( ) + " ' " , " red " ) ; <nl> - return ; <nl> - } <nl> - } <nl> - / / Processing torrents <nl> - file = path . trimmed ( ) . replace ( " file : / / " , " " ) ; <nl> - if ( file . isEmpty ( ) ) { <nl> - return ; <nl> - } <nl> - qDebug ( " Adding % s to download list " , ( const char * ) file . toUtf8 ( ) ) ; <nl> - std : : ifstream in ( ( const char * ) file . toUtf8 ( ) , std : : ios_base : : binary ) ; <nl> - in . unsetf ( std : : ios_base : : skipws ) ; <nl> - try { <nl> - / / Decode torrent file <nl> - entry e = bdecode ( std : : istream_iterator < char > ( in ) , std : : istream_iterator < char > ( ) ) ; <nl> - / / Getting torrent file informations <nl> - torrent_info t ( e ) ; <nl> - QString hash = QString ( misc : : toString ( t . info_hash ( ) ) . c_str ( ) ) ; <nl> - if ( handles . contains ( hash ) ) { <nl> - / / Update info Bar <nl> - if ( ! fromScanDir ) { <nl> - if ( ! from_url . isNull ( ) ) { <nl> - setInfoBar ( " ' " + from_url + " ' " + tr ( " already in download list . " , " < file > already in download list . " ) ) ; <nl> - } else { <nl> - setInfoBar ( " ' " + file + " ' " + tr ( " already in download list . " , " < file > already in download list . " ) ) ; <nl> - } <nl> - } else { <nl> - / / Delete torrent from scan dir <nl> - QFile : : remove ( file ) ; <nl> - } <nl> - return ; <nl> - } <nl> - / / TODO : Remove this in a few releases <nl> - if ( torrentBackup . exists ( QString ( t . name ( ) . c_str ( ) ) + " . torrent " ) ) { <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . torrent " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . torrent " ) ; <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . fastresume " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . fastresume " ) ; <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . pieces " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . pieces " ) ; <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . savepath " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . savepath " ) ; <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . paused " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . paused " ) ; <nl> - QFile : : rename ( torrentBackup . path ( ) + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . incremental " , torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . incremental " ) ; <nl> - file = torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . torrent " ; <nl> - } <nl> - / / Getting fast resume data if existing <nl> - if ( torrentBackup . exists ( hash + " . fastresume " ) ) { <nl> - try { <nl> - std : : stringstream strStream ; <nl> - strStream < < hash . toStdString ( ) < < " . fastresume " ; <nl> - boost : : filesystem : : ifstream resume_file ( fs : : path ( ( const char * ) torrentBackup . path ( ) . toUtf8 ( ) ) / strStream . str ( ) , std : : ios_base : : binary ) ; <nl> - resume_file . unsetf ( std : : ios_base : : skipws ) ; <nl> - resume_data = bdecode ( std : : istream_iterator < char > ( resume_file ) , std : : istream_iterator < char > ( ) ) ; <nl> - fastResume = true ; <nl> - } catch ( invalid_encoding & ) { } <nl> - catch ( fs : : filesystem_error & ) { } <nl> - / / qDebug ( " Got fast resume data " ) ; <nl> - } <nl> - QString savePath = getSavePath ( hash ) ; <nl> - int row = DLListModel - > rowCount ( ) ; <nl> - / / Adding files to bittorrent session <nl> - if ( hasFilteredFiles ( hash ) ) { <nl> - h = s - > add_torrent ( t , fs : : path ( ( const char * ) savePath . toUtf8 ( ) ) , resume_data , false ) ; <nl> - qDebug ( " Full allocation mode " ) ; <nl> - } else { <nl> - h = s - > add_torrent ( t , fs : : path ( ( const char * ) savePath . toUtf8 ( ) ) , resume_data , true ) ; <nl> - qDebug ( " Compact allocation mode " ) ; <nl> - } <nl> - / / Is this really useful and appropriate ? <nl> - / / h . set_max_connections ( 60 ) ; <nl> - h . set_max_uploads ( - 1 ) ; <nl> - qDebug ( " Torrent hash is " + hash . toUtf8 ( ) ) ; <nl> - / / Load filtered files <nl> - loadFilteredFiles ( h ) ; <nl> - / / qDebug ( " Added to session " ) ; <nl> - torrent_status torrentStatus = h . status ( ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , PROGRESS ) , QVariant ( ( double ) torrentStatus . progress ) ) ; <nl> - handles . insert ( hash , h ) ; <nl> - QString newFile = torrentBackup . path ( ) + QDir : : separator ( ) + hash + " . torrent " ; <nl> - if ( file ! = newFile ) { <nl> - / / Delete file from torrentBackup directory in case it exists because <nl> - / / QFile : : copy ( ) do not overwrite <nl> - QFile : : remove ( newFile ) ; <nl> - / / Copy it to torrentBackup directory <nl> - QFile : : copy ( file , newFile ) ; <nl> - } <nl> - / / qDebug ( " Copied to torrent backup directory " ) ; <nl> - if ( fromScanDir ) { <nl> - scan_dir = options - > getScanDir ( ) ; <nl> - if ( scan_dir . at ( scan_dir . length ( ) - 1 ) ! = QDir : : separator ( ) ) { <nl> - scan_dir + = QDir : : separator ( ) ; <nl> - } <nl> - } <nl> - / / Adding torrent to download list <nl> - DLListModel - > insertRow ( row ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( t . name ( ) . c_str ( ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , SIZE ) , QVariant ( ( qlonglong ) t . total_size ( ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , SEEDSLEECH ) , QVariant ( " 0 / 0 " ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , HASH ) , QVariant ( hash ) ) ; <nl> - / / Pause torrent if it was paused last time <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ) { <nl> - DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / paused . png " ) ) , Qt : : DecorationRole ) ; <nl> - setRowColor ( row , " red " ) ; <nl> - } else { <nl> - DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Connecting . . . " ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / connecting . png " ) ) , Qt : : DecorationRole ) ; <nl> - setRowColor ( row , " grey " ) ; <nl> - } <nl> - / / qDebug ( " Added to download list " ) ; <nl> - / / Pause torrent if it was paused last time <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ) { <nl> - h . pause ( ) ; <nl> - } <nl> - / / Incremental download <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . incremental " ) ) { <nl> - qDebug ( " Incremental download enabled for % s " , t . name ( ) . c_str ( ) ) ; <nl> - h . set_sequenced_download_threshold ( 15 ) ; <nl> - } <nl> - if ( ! from_url . isNull ( ) ) { <nl> - / / remove temporary file <nl> - QFile : : remove ( file ) ; <nl> - } <nl> - / / Delete from scan dir to avoid trying to download it again <nl> - if ( fromScanDir ) { <nl> - QFile : : remove ( file ) ; <nl> - } <nl> - / / Update info Bar <nl> - if ( ! fastResume ) { <nl> - if ( ! from_url . isNull ( ) ) { <nl> - setInfoBar ( " ' " + from_url + " ' " + tr ( " added to download list . " ) ) ; <nl> - } else { <nl> - setInfoBar ( " ' " + file + " ' " + tr ( " added to download list . " ) ) ; <nl> - } <nl> - } else { <nl> - if ( ! from_url . isNull ( ) ) { <nl> - setInfoBar ( " ' " + from_url + " ' " + tr ( " resumed . ( fast resume ) " ) ) ; <nl> - } else { <nl> - setInfoBar ( " ' " + file + " ' " + tr ( " resumed . ( fast resume ) " ) ) ; <nl> - } <nl> - } <nl> - + + nbTorrents ; <nl> - tabs - > setTabText ( 0 , tr ( " Transfers " ) + " ( " + QString ( misc : : toString ( nbTorrents ) . c_str ( ) ) + " ) " ) ; <nl> - } catch ( invalid_encoding & e ) { / / Raised by bdecode ( ) <nl> - std : : cerr < < " Could not decode file , reason : " < < e . what ( ) < < ' \ n ' ; <nl> - / / Display warning to tell user we can ' t decode the torrent file <nl> - if ( ! from_url . isNull ( ) ) { <nl> - setInfoBar ( tr ( " Unable to decode torrent file : " ) + " ' " + from_url + " ' " , " red " ) ; <nl> - } else { <nl> - setInfoBar ( tr ( " Unable to decode torrent file : " ) + " ' " + file + " ' " , " red " ) ; <nl> - } <nl> - setInfoBar ( tr ( " This file is either corrupted or this isn ' t a torrent . " ) , " red " ) ; <nl> - if ( fromScanDir ) { <nl> - / / Remove . corrupt file in case it already exists <nl> - QFile : : remove ( file + " . corrupt " ) ; <nl> - / / Rename file extension so that it won ' t display error message more than once <nl> - QFile : : rename ( file , file + " . corrupt " ) ; <nl> - } <nl> - } <nl> - catch ( invalid_torrent_file & ) { / / Raised by torrent_info constructor <nl> - / / Display warning to tell user we can ' t decode the torrent file <nl> - if ( ! from_url . isNull ( ) ) { <nl> - setInfoBar ( tr ( " Unable to decode torrent file : " ) + " ' " + from_url + " ' " , " red " ) ; <nl> - } else { <nl> - setInfoBar ( tr ( " Unable to decode torrent file : " ) + " ' " + file + " ' " , " red " ) ; <nl> - } <nl> - setInfoBar ( tr ( " This file is either corrupted or this isn ' t a torrent . " ) , " red " ) ; <nl> - if ( fromScanDir ) { <nl> - / / Remove . corrupt file in case it already exists <nl> - QFile : : remove ( file + " . corrupt " ) ; <nl> - / / Rename file extension so that it won ' t display error message more than once <nl> - QFile : : rename ( file , file + " . corrupt " ) ; <nl> - } <nl> - } <nl> + void GUI : : torrentCorrupted ( const QString & path ) { <nl> + setInfoBar ( tr ( " Unable to decode torrent file : " ) + " ' " + path + " ' " , " red " ) ; <nl> + setInfoBar ( tr ( " This file is either corrupted or this isn ' t a torrent . " ) , " red " ) ; <nl> } <nl> <nl> QString GUI : : getSavePath ( QString hash ) { <nl> QString GUI : : getSavePath ( QString hash ) { <nl> return savePath ; <nl> } <nl> <nl> - void GUI : : reloadTorrent ( const torrent_handle & h , bool compact_mode ) { <nl> - QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> - fs : : path saveDir = h . save_path ( ) ; <nl> - QString fileName = QString ( h . name ( ) . c_str ( ) ) ; <nl> - QString fileHash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> - qDebug ( " Reloading torrent : % s " , ( const char * ) fileName . toUtf8 ( ) ) ; <nl> - torrent_handle new_h ; <nl> - entry resumeData ; <nl> - torrent_info t = h . get_torrent_info ( ) ; <nl> - / / Checking if torrentBackup Dir exists <nl> - / / create it if it is not <nl> - if ( ! torrentBackup . exists ( ) ) { <nl> - torrentBackup . mkpath ( torrentBackup . path ( ) ) ; <nl> - } <nl> - / / Write fast resume data <nl> - / / Pause download ( needed before fast resume writing ) <nl> - h . pause ( ) ; <nl> - / / Extracting resume data <nl> - if ( h . has_metadata ( ) ) { <nl> - / / get fast resume data <nl> - resumeData = h . write_resume_data ( ) ; <nl> - } <nl> - int row = - 1 ; <nl> - / / Delete item from download list <nl> - for ( int i = 0 ; i < DLListModel - > rowCount ( ) ; + + i ) { <nl> - if ( DLListModel - > data ( DLListModel - > index ( i , HASH ) ) . toString ( ) = = fileHash ) { <nl> - row = i ; <nl> - break ; <nl> - } <nl> - } <nl> - Q_ASSERT ( row ! = - 1 ) ; <nl> - DLListModel - > removeRow ( row ) ; <nl> - / / Remove torrent <nl> - s - > remove_torrent ( h ) ; <nl> - handles . remove ( fileHash ) ; <nl> - / / Add torrent again to session <nl> - unsigned short timeout = 0 ; <nl> - while ( h . is_valid ( ) & & timeout < 6 ) { <nl> - SleeperThread : : msleep ( 1000 ) ; <nl> - + + timeout ; <nl> - } <nl> - if ( h . is_valid ( ) ) { <nl> - std : : cerr < < " Error : Couldn ' t reload the torrent \ n " ; <nl> - return ; <nl> - } <nl> - new_h = s - > add_torrent ( t , saveDir , resumeData , compact_mode ) ; <nl> - if ( compact_mode ) { <nl> - qDebug ( " Using compact allocation mode " ) ; <nl> - } else { <nl> - qDebug ( " Using full allocation mode " ) ; <nl> - } <nl> - handles . insert ( fileHash , new_h ) ; <nl> - new_h . set_max_connections ( 60 ) ; <nl> - new_h . set_max_uploads ( - 1 ) ; <nl> - / / Load filtered Files <nl> - loadFilteredFiles ( new_h ) ; <nl> - / / Adding torrent to download list <nl> - DLListModel - > insertRow ( row ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( t . name ( ) . c_str ( ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , SIZE ) , QVariant ( ( qlonglong ) t . total_size ( ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> - / / Pause torrent if it was paused last time <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + QString ( t . name ( ) . c_str ( ) ) + " . paused " ) ) { <nl> - DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / paused . png " ) ) , Qt : : DecorationRole ) ; <nl> - setRowColor ( row , " red " ) ; <nl> - } else { <nl> - DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Connecting . . . " ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / connecting . png " ) ) , Qt : : DecorationRole ) ; <nl> - setRowColor ( row , " grey " ) ; <nl> - } <nl> - / / Pause torrent if it was paused last time <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . paused " ) ) { <nl> - new_h . pause ( ) ; <nl> - } <nl> - / / Incremental download <nl> - if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . incremental " ) ) { <nl> - qDebug ( " Incremental download enabled for % s " , ( const char * ) fileName . toUtf8 ( ) ) ; <nl> - new_h . set_sequenced_download_threshold ( 15 ) ; <nl> - } <nl> - } <nl> - <nl> / / As program parameters , we can get paths or urls . <nl> / / This function parse the parameters and call <nl> / / the right addTorrent function , considering <nl> void GUI : : processParams ( const QStringList & params ) { <nl> foreach ( param , params ) { <nl> param = param . trimmed ( ) ; <nl> if ( param . startsWith ( " http : / / " , Qt : : CaseInsensitive ) | | param . startsWith ( " ftp : / / " , Qt : : CaseInsensitive ) | | param . startsWith ( " https : / / " , Qt : : CaseInsensitive ) ) { <nl> - downloadFromUrl ( param ) ; <nl> + BTSession . downloadFromUrl ( param ) ; <nl> } else { <nl> if ( options - > useAdditionDialog ( ) ) { <nl> torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> - connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , this , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> + connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , & BTSession , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> dialog - > showLoad ( param ) ; <nl> } else { <nl> - addTorrent ( param ) ; <nl> + BTSession . addTorrent ( param ) ; <nl> } <nl> } <nl> } <nl> } <nl> <nl> - / / libtorrent allow to adjust ratio for each torrent <nl> - / / This function will apply to same ratio to all torrents <nl> - void GUI : : setGlobalRatio ( float ratio ) { <nl> - foreach ( torrent_handle h , handles ) { <nl> - h . set_ratio ( ratio ) ; <nl> + void GUI : : processScannedFiles ( const QStringList & params ) { <nl> + QString param ; <nl> + foreach ( param , params ) { <nl> + if ( options - > useAdditionDialog ( ) ) { <nl> + torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> + connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , & BTSession , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> + connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> + dialog - > showLoad ( param , true ) ; <nl> + } else { <nl> + BTSession . addTorrent ( param , true ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + void GUI : : processDownloadedFiles ( const QString & path , const QString & url ) { <nl> + if ( options - > useAdditionDialog ( ) ) { <nl> + torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> + connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , & BTSession , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> + connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> + dialog - > showLoad ( path , false , url ) ; <nl> + } else { <nl> + BTSession . addTorrent ( path , false , url ) ; <nl> } <nl> } <nl> <nl> void GUI : : setGlobalRatio ( float ratio ) { <nl> void GUI : : showProperties ( const QModelIndex & index ) { <nl> int row = index . row ( ) ; <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( row , HASH ) ) . toString ( ) ; <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> + torrent_handle h = BTSession . getTorrentHandle ( fileHash ) ; <nl> QStringList errors = trackerErrors . value ( fileHash , QStringList ( tr ( " None " ) ) ) ; <nl> properties * prop = new properties ( this , h , errors ) ; <nl> - connect ( prop , SIGNAL ( changedFilteredFiles ( torrent_handle , bool ) ) , this , SLOT ( reloadTorrent ( torrent_handle , bool ) ) ) ; <nl> + connect ( prop , SIGNAL ( changedFilteredFiles ( torrent_handle , bool ) ) , & BTSession , SLOT ( reloadTorrent ( torrent_handle , bool ) ) ) ; <nl> prop - > show ( ) ; <nl> } <nl> <nl> void GUI : : configureSession ( ) { <nl> QPair < int , int > limits ; <nl> unsigned short old_listenPort , new_listenPort ; <nl> session_settings proxySettings ; <nl> - / / creating BT Session & listen <nl> / / Configure session regarding options <nl> - try { <nl> - if ( s - > is_listening ( ) ) { <nl> - old_listenPort = s - > listen_port ( ) ; <nl> - } else { <nl> - old_listenPort = 0 ; <nl> - } <nl> - std : : pair < unsigned short , unsigned short > new_listenPorts = options - > getPorts ( ) ; <nl> - if ( listenPorts ! = new_listenPorts ) { <nl> - s - > listen_on ( new_listenPorts ) ; <nl> - listenPorts = new_listenPorts ; <nl> - } <nl> - <nl> - if ( s - > is_listening ( ) ) { <nl> - new_listenPort = s - > listen_port ( ) ; <nl> - if ( new_listenPort ! = old_listenPort ) { <nl> - setInfoBar ( tr ( " Listening on port " , " Listening on port < xxxxx > " ) + " : " + QString ( misc : : toString ( new_listenPort ) . c_str ( ) ) ) ; <nl> - } <nl> - } <nl> - / / Apply max connec limit ( - 1 if disabled ) <nl> - int max_connec = options - > getMaxConnec ( ) ; <nl> - s - > set_max_connections ( max_connec ) ; <nl> - <nl> - limits = options - > getLimits ( ) ; <nl> - switch ( limits . first ) { <nl> - case - 1 : / / Download limit disabled <nl> - case 0 : <nl> - s - > set_download_rate_limit ( - 1 ) ; <nl> - break ; <nl> - default : <nl> - s - > set_download_rate_limit ( limits . first * 1024 ) ; <nl> - } <nl> - switch ( limits . second ) { <nl> - case - 1 : / / Upload limit disabled <nl> - case 0 : <nl> - s - > set_upload_rate_limit ( - 1 ) ; <nl> - break ; <nl> - default : <nl> - s - > set_upload_rate_limit ( limits . second * 1024 ) ; <nl> - } <nl> - / / Apply ratio ( 0 if disabled ) <nl> - setGlobalRatio ( options - > getRatio ( ) ) ; <nl> - / / DHT ( Trackerless ) <nl> - if ( options - > isDHTEnabled ( ) & & ! DHTEnabled ) { <nl> - boost : : filesystem : : ifstream dht_state_file ( ( const char * ) ( misc : : qBittorrentPath ( ) + QString ( " dht_state " ) ) . toUtf8 ( ) , std : : ios_base : : binary ) ; <nl> - dht_state_file . unsetf ( std : : ios_base : : skipws ) ; <nl> - entry dht_state ; <nl> - try { <nl> - dht_state = bdecode ( std : : istream_iterator < char > ( dht_state_file ) , std : : istream_iterator < char > ( ) ) ; <nl> - } catch ( std : : exception & ) { } <nl> - s - > start_dht ( dht_state ) ; <nl> - s - > add_dht_router ( std : : make_pair ( std : : string ( " router . bittorrent . com " ) , 6881 ) ) ; <nl> - s - > add_dht_router ( std : : make_pair ( std : : string ( " router . utorrent . com " ) , 6881 ) ) ; <nl> - s - > add_dht_router ( std : : make_pair ( std : : string ( " router . bitcomet . com " ) , 6881 ) ) ; <nl> - DHTEnabled = true ; <nl> - qDebug ( " Enabling DHT Support " ) ; <nl> - } else { <nl> - if ( ! options - > isDHTEnabled ( ) & & DHTEnabled ) { <nl> - s - > stop_dht ( ) ; <nl> - DHTEnabled = false ; <nl> - qDebug ( " Disabling DHT Support " ) ; <nl> - } <nl> - } <nl> - if ( ! options - > isPeXDisabled ( ) ) { <nl> - qDebug ( " Enabling Peer eXchange ( PeX ) " ) ; <nl> - s - > add_extension ( & create_ut_pex_plugin ) ; <nl> - } else { <nl> - qDebug ( " Peer eXchange ( PeX ) disabled " ) ; <nl> - } <nl> - int dht_port = options - > getDHTPort ( ) ; <nl> - if ( dht_port > = 1000 ) { <nl> - struct dht_settings DHTSettings ; <nl> - DHTSettings . service_port = dht_port ; <nl> - s - > set_dht_settings ( DHTSettings ) ; <nl> - qDebug ( " Set DHT Port to % d " , dht_port ) ; <nl> - } <nl> - / / Apply filtering settings <nl> - if ( options - > isFilteringEnabled ( ) ) { <nl> - qDebug ( " Ip Filter enabled " ) ; <nl> - s - > set_ip_filter ( options - > getFilter ( ) ) ; <nl> - } else { <nl> - qDebug ( " Ip Filter disabled " ) ; <nl> - s - > set_ip_filter ( ip_filter ( ) ) ; <nl> - } <nl> - / / Apply Proxy settings <nl> - if ( options - > isProxyEnabled ( ) ) { <nl> - proxySettings . proxy_ip = options - > getProxyIp ( ) . toStdString ( ) ; <nl> - proxySettings . proxy_port = options - > getProxyPort ( ) ; <nl> - if ( options - > isProxyAuthEnabled ( ) ) { <nl> - proxySettings . proxy_login = options - > getProxyUsername ( ) . toStdString ( ) ; <nl> - proxySettings . proxy_password = options - > getProxyPassword ( ) . toStdString ( ) ; <nl> - } <nl> - } <nl> - proxySettings . user_agent = " qBittorrent " VERSION ; <nl> - s - > set_settings ( proxySettings ) ; <nl> - / / Scan dir stuff <nl> - if ( options - > getScanDir ( ) . isNull ( ) ) { <nl> - if ( timerScan - > isActive ( ) ) { <nl> - timerScan - > stop ( ) ; <nl> - } <nl> - } else { <nl> - if ( ! timerScan - > isActive ( ) ) { <nl> - timerScan - > start ( 5000 ) ; <nl> - } <nl> - } <nl> - } catch ( std : : exception & e ) { <nl> - std : : cerr < < e . what ( ) < < " \ n " ; <nl> + BTSession . setDefaultSavePath ( options - > getSavePath ( ) ) ; <nl> + old_listenPort = BTSession . getListenPort ( ) ; <nl> + BTSession . setListeningPortsRange ( options - > getPorts ( ) ) ; <nl> + new_listenPort = BTSession . getListenPort ( ) ; <nl> + if ( new_listenPort ! = old_listenPort ) { <nl> + setInfoBar ( tr ( " Listening on port " , " Listening on port < xxxxx > " ) + " : " + QString ( misc : : toString ( new_listenPort ) . c_str ( ) ) ) ; <nl> + } <nl> + / / Apply max connec limit ( - 1 if disabled ) <nl> + BTSession . setMaxConnections ( options - > getMaxConnec ( ) ) ; <nl> + limits = options - > getLimits ( ) ; <nl> + switch ( limits . first ) { <nl> + case - 1 : / / Download limit disabled <nl> + case 0 : <nl> + BTSession . setDownloadRateLimit ( - 1 ) ; <nl> + break ; <nl> + default : <nl> + BTSession . setDownloadRateLimit ( limits . first * 1024 ) ; <nl> + } <nl> + switch ( limits . second ) { <nl> + case - 1 : / / Upload limit disabled <nl> + case 0 : <nl> + BTSession . setDownloadRateLimit ( - 1 ) ; <nl> + break ; <nl> + default : <nl> + BTSession . setDownloadRateLimit ( limits . second * 1024 ) ; <nl> + } <nl> + / / Apply ratio ( 0 if disabled ) <nl> + BTSession . setGlobalRatio ( options - > getRatio ( ) ) ; <nl> + / / DHT ( Trackerless ) <nl> + if ( options - > isDHTEnabled ( ) ) { <nl> + BTSession . enableDHT ( ) ; <nl> + } else { <nl> + BTSession . disableDHT ( ) ; <nl> + } <nl> + / / Set DHT Port <nl> + BTSession . setDHTPort ( options - > getDHTPort ( ) ) ; <nl> + if ( ! options - > isPeXDisabled ( ) ) { <nl> + qDebug ( " Enabling Peer eXchange ( PeX ) " ) ; <nl> + BTSession . enablePeerExchange ( ) ; <nl> + } else { <nl> + qDebug ( " Peer eXchange ( PeX ) disabled " ) ; <nl> + } <nl> + / / Apply filtering settings <nl> + if ( options - > isFilteringEnabled ( ) ) { <nl> + BTSession . enableIPFilter ( options - > getFilter ( ) ) ; <nl> + } else { <nl> + BTSession . disableIPFilter ( ) ; <nl> + } <nl> + / / Apply Proxy settings <nl> + if ( options - > isProxyEnabled ( ) ) { <nl> + proxySettings . proxy_ip = options - > getProxyIp ( ) . toStdString ( ) ; <nl> + proxySettings . proxy_port = options - > getProxyPort ( ) ; <nl> + if ( options - > isProxyAuthEnabled ( ) ) { <nl> + proxySettings . proxy_login = options - > getProxyUsername ( ) . toStdString ( ) ; <nl> + proxySettings . proxy_password = options - > getProxyPassword ( ) . toStdString ( ) ; <nl> + } <nl> + } <nl> + proxySettings . user_agent = " qBittorrent " VERSION ; <nl> + BTSession . setSessionSettings ( proxySettings ) ; <nl> + / / Scan dir stuff <nl> + if ( options - > getScanDir ( ) . isNull ( ) ) { <nl> + BTSession . disableDirectoryScanning ( ) ; <nl> + } else { <nl> + BTSession . enableDirectoryScanning ( options - > getScanDir ( ) ) ; <nl> } <nl> qDebug ( " Session configured " ) ; <nl> } <nl> void GUI : : pauseSelection ( ) { <nl> if ( index . column ( ) = = NAME ) { <nl> / / Get the file name <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( index . row ( ) , HASH ) ) . toString ( ) ; <nl> - / / Get handle and pause the torrent <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> - if ( ! h . is_paused ( ) ) { <nl> - h . pause ( ) ; <nl> - / / Create . paused file <nl> - QFile paused_file ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . paused " ) ; <nl> - paused_file . open ( QIODevice : : WriteOnly | QIODevice : : Text ) ; <nl> - paused_file . close ( ) ; <nl> + if ( ! BTSession . isPaused ( fileHash ) ) { <nl> + / / Pause the torrent <nl> + BTSession . pauseTorrent ( fileHash ) ; <nl> / / Update DL status <nl> int row = index . row ( ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> - setInfoBar ( " ' " + QString ( h . name ( ) . c_str ( ) ) + " ' " + tr ( " paused . " , " < file > paused . " ) ) ; <nl> + setInfoBar ( " ' " + QString ( BTSession . getTorrentHandle ( fileHash ) . name ( ) . c_str ( ) ) + " ' " + tr ( " paused . " , " < file > paused . " ) ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QIcon ( " : / Icons / skin / paused . png " ) , Qt : : DecorationRole ) ; <nl> setRowColor ( row , " red " ) ; <nl> } <nl> void GUI : : startSelection ( ) { <nl> if ( index . column ( ) = = NAME ) { <nl> / / Get the file name <nl> QString fileHash = DLListModel - > data ( DLListModel - > index ( index . row ( ) , HASH ) ) . toString ( ) ; <nl> - / / Get handle and pause the torrent <nl> - torrent_handle h = handles . value ( fileHash ) ; <nl> - if ( h . is_paused ( ) ) { <nl> - h . resume ( ) ; <nl> + if ( BTSession . isPaused ( fileHash ) ) { <nl> + / / Resume the torrent <nl> + BTSession . resumeTorrent ( fileHash ) ; <nl> / / Delete . paused file <nl> QFile : : remove ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . paused " ) ; <nl> / / Update DL status <nl> int row = index . row ( ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Connecting . . . " ) ) ) ; <nl> - setInfoBar ( " ' " + QString ( h . name ( ) . c_str ( ) ) + " ' " + tr ( " resumed . " , " < file > resumed . " ) ) ; <nl> + setInfoBar ( " ' " + QString ( BTSession . getTorrentHandle ( fileHash ) . name ( ) . c_str ( ) ) + " ' " + tr ( " resumed . " , " < file > resumed . " ) ) ; <nl> DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QVariant ( QIcon ( " : / Icons / skin / connecting . png " ) ) , Qt : : DecorationRole ) ; <nl> setRowColor ( row , " grey " ) ; <nl> } <nl> void GUI : : propertiesSelection ( ) { <nl> } <nl> } <nl> <nl> + / / called when a torrent has finished <nl> + void GUI : : finishedTorrent ( torrent_handle & h ) { <nl> + QString fileName = QString ( h . name ( ) . c_str ( ) ) ; <nl> + setInfoBar ( fileName + tr ( " has finished downloading . " ) ) ; <nl> + if ( options - > getUseOSDAlways ( ) | | ( options - > getUseOSDWhenHiddenOnly ( ) & & ( isMinimized ( ) | | isHidden ( ) ) ) ) { <nl> + myTrayIcon - > showMessage ( tr ( " Download finished " ) , fileName + tr ( " has finished downloading . " , " < filename > has finished downloading . " ) , QSystemTrayIcon : : Information , TIME_TRAY_BALLOON ) ; <nl> + } <nl> + } <nl> + <nl> + / / Notification when disk is full <nl> + void GUI : : fullDiskError ( torrent_handle & h ) { <nl> + if ( options - > getUseOSDAlways ( ) | | ( options - > getUseOSDWhenHiddenOnly ( ) & & ( isMinimized ( ) | | isHidden ( ) ) ) ) { <nl> + myTrayIcon - > showMessage ( tr ( " I / O Error " ) , tr ( " An error occured when trying to read or write " ) + QString ( h . name ( ) . c_str ( ) ) + " . " + tr ( " The disk is probably full , download has been paused " ) , QSystemTrayIcon : : Critical , TIME_TRAY_BALLOON ) ; <nl> + } <nl> + / / Download will be paused by libtorrent . Updating GUI information accordingly <nl> + int row = getRowFromHash ( QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> + setInfoBar ( tr ( " An error occured ( full fisk ? ) " ) + " , ' " + QString ( h . get_torrent_info ( ) . name ( ) . c_str ( ) ) + " ' " + tr ( " paused . " , " < file > paused . " ) ) ; <nl> + DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QIcon ( " : / Icons / skin / paused . png " ) , Qt : : DecorationRole ) ; <nl> + setRowColor ( row , " red " ) ; <nl> + } <nl> + <nl> + / / Called when we couldn ' t listen on any port <nl> + / / in the given range . <nl> + void GUI : : portListeningFailure ( ) { <nl> + setInfoBar ( tr ( " Couldn ' t listen on any of the given ports . " ) , " red " ) ; <nl> + } <nl> + <nl> + / / Called when we receive an error from tracker <nl> + void GUI : : trackerError ( const QString & hash , const QString & time , const QString & msg ) { <nl> + / / Check trackerErrors list size and clear it if it is too big <nl> + if ( trackerErrors . size ( ) > 50 ) { <nl> + trackerErrors . clear ( ) ; <nl> + } <nl> + QStringList errors = trackerErrors . value ( hash , QStringList ( ) ) ; <nl> + errors . append ( " < font color = ' grey ' > " + time + " < / font > - < font color = ' red ' > " + msg + " < / font > " ) ; <nl> + trackerErrors . insert ( hash , errors ) ; <nl> + } <nl> + <nl> + / / Called when a tracker requires authentication <nl> + void GUI : : trackerAuthenticationRequired ( torrent_handle & h ) { <nl> + if ( unauthenticated_trackers . indexOf ( QPair < torrent_handle , std : : string > ( h , h . status ( ) . current_tracker ) ) < 0 ) { <nl> + / / Tracker login <nl> + new trackerLogin ( this , h ) ; <nl> + } <nl> + } <nl> + <nl> / / Check connection status and display right icon <nl> void GUI : : checkConnectionStatus ( ) { <nl> qDebug ( " Checking connection status " ) ; <nl> char tmp [ MAX_CHAR_TMP ] ; <nl> - session_status sessionStatus = s - > status ( ) ; <nl> + session_status sessionStatus = BTSession . getSessionStatus ( ) ; <nl> / / Update ratio info <nl> float ratio = 1 . ; <nl> if ( sessionStatus . total_payload_download ! = 0 ) { <nl> void GUI : : checkConnectionStatus ( ) { <nl> connecStatusLblIcon - > setToolTip ( tr ( " < b > Connection Status : < / b > < br > Offline < br > < i > No peers found . . . < / i > " ) ) ; <nl> } <nl> } <nl> - / / Check trackerErrors list size and clear it if it is too big <nl> - if ( trackerErrors . size ( ) > 50 ) { <nl> - trackerErrors . clear ( ) ; <nl> - } <nl> - / / look at session alerts and display some infos <nl> - std : : auto_ptr < alert > a = s - > pop_alert ( ) ; <nl> - while ( a . get ( ) ) { <nl> - if ( torrent_finished_alert * p = dynamic_cast < torrent_finished_alert * > ( a . get ( ) ) ) { <nl> - QString fileName = QString ( p - > handle . get_torrent_info ( ) . name ( ) . c_str ( ) ) ; <nl> - QString fileHash = QString ( misc : : toString ( p - > handle . info_hash ( ) ) . c_str ( ) ) ; <nl> - / / Level : info <nl> - setInfoBar ( fileName + tr ( " has finished downloading . " ) ) ; <nl> - if ( options - > getUseOSDAlways ( ) | | ( options - > getUseOSDWhenHiddenOnly ( ) & & ( isMinimized ( ) | | isHidden ( ) ) ) ) { <nl> - myTrayIcon - > showMessage ( tr ( " Download finished " ) , fileName + tr ( " has finished downloading . " , " < filename > has finished downloading . " ) , QSystemTrayIcon : : Information , TIME_TRAY_BALLOON ) ; <nl> - } <nl> - QString scan_dir = options - > getScanDir ( ) ; <nl> - bool isScanningDir = ! scan_dir . isNull ( ) ; <nl> - if ( isScanningDir & & scan_dir . at ( scan_dir . length ( ) - 1 ) ! = QDir : : separator ( ) ) { <nl> - scan_dir + = QDir : : separator ( ) ; <nl> - } <nl> - / / Remove it from torrentBackup directory <nl> - / / No need to resume it <nl> - if ( options - > getClearFinishedOnExit ( ) ) { <nl> - QFile : : remove ( fileHash + " . torrent " ) ; <nl> - QFile : : remove ( fileHash + " . fastresume " ) ; <nl> - if ( isScanningDir ) { <nl> - QFile : : remove ( scan_dir + fileHash + " . torrent " ) ; <nl> - } <nl> - } <nl> - } <nl> - else if ( file_error_alert * p = dynamic_cast < file_error_alert * > ( a . get ( ) ) ) { <nl> - QString fileName = QString ( p - > handle . get_torrent_info ( ) . name ( ) . c_str ( ) ) ; <nl> - QString fileHash = QString ( misc : : toString ( p - > handle . info_hash ( ) ) . c_str ( ) ) ; <nl> - if ( options - > getUseOSDAlways ( ) | | ( options - > getUseOSDWhenHiddenOnly ( ) & & ( isMinimized ( ) | | isHidden ( ) ) ) ) { <nl> - myTrayIcon - > showMessage ( tr ( " I / O Error " ) , tr ( " An error occured when trying to read or write " ) + fileName + " . " + tr ( " The disk is probably full , download has been paused " ) , QSystemTrayIcon : : Critical , TIME_TRAY_BALLOON ) ; <nl> - / / Download will be pausedby libtorrent . Updating GUI information accordingly <nl> - int row = getRowFromHash ( fileHash ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , DLSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , UPSPEED ) , QVariant ( ( double ) 0 . 0 ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , STATUS ) , QVariant ( tr ( " Paused " ) ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , ETA ) , QVariant ( ( qlonglong ) - 1 ) ) ; <nl> - setInfoBar ( " ' " + fileName + " ' " + tr ( " paused . " , " < file > paused . " ) ) ; <nl> - DLListModel - > setData ( DLListModel - > index ( row , NAME ) , QIcon ( " : / Icons / skin / paused . png " ) , Qt : : DecorationRole ) ; <nl> - setRowColor ( row , " red " ) ; <nl> - } <nl> - } <nl> - else if ( dynamic_cast < listen_failed_alert * > ( a . get ( ) ) ) { <nl> - / / Level : fatal <nl> - setInfoBar ( tr ( " Couldn ' t listen on any of the given ports . " ) , " red " ) ; <nl> - } <nl> - else if ( tracker_alert * p = dynamic_cast < tracker_alert * > ( a . get ( ) ) ) { <nl> - / / Level : fatal <nl> - QString fileHash = QString ( misc : : toString ( p - > handle . info_hash ( ) ) . c_str ( ) ) ; <nl> - QStringList errors = trackerErrors . value ( fileHash , QStringList ( ) ) ; <nl> - errors . append ( " < font color = ' grey ' > " + QTime : : currentTime ( ) . toString ( " hh : mm : ss " ) + " < / font > - < font color = ' red ' > " + QString ( a - > msg ( ) . c_str ( ) ) + " < / font > " ) ; <nl> - trackerErrors . insert ( fileHash , errors ) ; <nl> - / / Authentication <nl> - if ( p - > status_code = = 401 ) { <nl> - if ( unauthenticated_trackers . indexOf ( QPair < torrent_handle , std : : string > ( p - > handle , p - > handle . status ( ) . current_tracker ) ) < 0 ) { <nl> - / / Tracker login <nl> - tracker_login = new trackerLogin ( this , p - > handle ) ; <nl> - } <nl> - } <nl> - } <nl> - / / else if ( peer_error_alert * p = dynamic_cast < peer_error_alert * > ( a . get ( ) ) ) <nl> - / / { <nl> - / / events . push_back ( identify_client ( p - > id ) + " : " + a - > msg ( ) ) ; <nl> - / / } <nl> - / / else if ( invalid_request_alert * p = dynamic_cast < invalid_request_alert * > ( a . get ( ) ) ) <nl> - / / { <nl> - / / events . push_back ( identify_client ( p - > id ) + " : " + a - > msg ( ) ) ; <nl> - / / } <nl> - a = s - > pop_alert ( ) ; <nl> - } <nl> qDebug ( " Connection status updated " ) ; <nl> } <nl> <nl> void GUI : : downloadSelectedItem ( const QModelIndex & index ) { <nl> / / Get Item url <nl> QString url = searchResultsUrls . value ( SearchListModel - > data ( SearchListModel - > index ( row , NAME ) ) . toString ( ) ) ; <nl> / / Download from url <nl> - downloadFromUrl ( url ) ; <nl> + BTSession . downloadFromUrl ( url ) ; <nl> / / Set item color to RED <nl> setRowColor ( row , " red " , false ) ; <nl> } <nl> void GUI : : on_download_button_clicked ( ) { <nl> if ( index . column ( ) = = NAME ) { <nl> / / Get Item url <nl> QString url = searchResultsUrls . value ( index . data ( ) . toString ( ) ) ; <nl> - downloadFromUrl ( url ) ; <nl> + BTSession . downloadFromUrl ( url ) ; <nl> setRowColor ( index . row ( ) , " red " , false ) ; <nl> } <nl> } <nl> void GUI : : OptionsSaved ( const QString & info ) { <nl> * * <nl> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> <nl> - void GUI : : processDownloadedFile ( QString url , QString file_path , int return_code , QString errorBuffer ) { <nl> - if ( return_code ) { <nl> - / / Download failed <nl> - setInfoBar ( tr ( " Couldn ' t download " , " Couldn ' t download < file > " ) + " " + url + " , " + tr ( " reason : " , " Reason why the download failed " ) + " " + errorBuffer , " red " ) ; <nl> - QFile : : remove ( file_path ) ; <nl> - return ; <nl> - } <nl> - / / Add file to torrent download list <nl> - if ( options - > useAdditionDialog ( ) ) { <nl> - torrentAdditionDialog * dialog = new torrentAdditionDialog ( this ) ; <nl> - connect ( dialog , SIGNAL ( torrentAddition ( const QString & , bool , const QString & ) ) , this , SLOT ( addTorrent ( const QString & , bool , const QString & ) ) ) ; <nl> - connect ( dialog , SIGNAL ( setInfoBarGUI ( const QString & , const QString & ) ) , this , SLOT ( setInfoBar ( const QString & , const QString & ) ) ) ; <nl> - dialog - > showLoad ( file_path , false , url ) ; <nl> - } else { <nl> - addTorrent ( file_path , false , url ) ; <nl> - } <nl> - } <nl> - <nl> - / / Take an url string to a torrent file , <nl> - / / download the torrent file to a tmp location , then <nl> - / / add it to download list <nl> - void GUI : : downloadFromUrl ( const QString & url ) { <nl> - setInfoBar ( tr ( " Downloading " , " Example : Downloading www . example . com / test . torrent " ) + " ' " + url + " ' , " + tr ( " Please wait . . . " ) , " black " ) ; <nl> - / / Launch downloader thread <nl> - downloader - > downloadUrl ( url ) ; <nl> - / / downloader - > start ( ) ; <nl> - } <nl> - <nl> / / Display an input dialog to prompt user for <nl> / / an url <nl> void GUI : : askForTorrentUrl ( ) { <nl> downloadFromURLDialog = new downloadFromURL ( this ) ; <nl> } <nl> - <nl> - void GUI : : downloadFromURLList ( const QStringList & url_list ) { <nl> - QString url ; <nl> - foreach ( url , url_list ) { <nl> - downloadFromUrl ( url ) ; <nl> - } <nl> - } <nl> mmm a / src / GUI . h <nl> ppp b / src / GUI . h <nl> <nl> # include " about_imp . h " <nl> # include " previewSelect . h " <nl> # include " trackerLogin . h " <nl> - # include " deleteThread . h " <nl> # include " bittorrent . h " <nl> <nl> <nl> class GUI : public QMainWindow , private Ui : : MainWindow { <nl> <nl> private : <nl> / / Bittorrent <nl> - session * s ; <nl> - std : : pair < unsigned short , unsigned short > listenPorts ; <nl> - QHash < QString , torrent_handle > handles ; <nl> + bittorrent BTSession ; <nl> QTimer * checkConnect ; <nl> - QTimer * timerScan ; <nl> QHash < QString , QStringList > trackerErrors ; <nl> - trackerLogin * tracker_login ; <nl> QList < QPair < torrent_handle , std : : string > > unauthenticated_trackers ; <nl> - downloadThread * downloader ; <nl> downloadFromURL * downloadFromURLDialog ; <nl> - bool DHTEnabled ; <nl> - QList < deleteThread * > deleters ; <nl> / / GUI related <nl> options_imp * options ; <nl> createtorrent * createWindow ; <nl> class GUI : public QMainWindow , private Ui : : MainWindow { <nl> DLListDelegate * DLDelegate ; <nl> QStandardItemModel * SearchListModel ; <nl> SearchListDelegate * SearchDelegate ; <nl> - QStringList supported_preview_extensions ; <nl> unsigned int nbTorrents ; <nl> QLabel * connecStatusLblIcon ; <nl> / / Preview <nl> class GUI : public QMainWindow , private Ui : : MainWindow { <nl> / / Torrent actions <nl> void showProperties ( const QModelIndex & index ) ; <nl> void propertiesSelection ( ) ; <nl> - void addTorrent ( const QString & path , bool fromScanDir = false , const QString & from_url = QString ( ) ) ; <nl> void pauseSelection ( ) ; <nl> void startSelection ( ) ; <nl> void askForTorrents ( ) ; <nl> void deletePermanently ( ) ; <nl> void deleteSelection ( ) ; <nl> - void resumeUnfinished ( ) ; <nl> - void saveFastResumeData ( ) const ; <nl> void checkConnectionStatus ( ) ; <nl> - void scanDirectory ( ) ; <nl> - void setGlobalRatio ( float ratio ) ; <nl> void configureSession ( ) ; <nl> void processParams ( const QStringList & params ) ; <nl> void addUnauthenticatedTracker ( QPair < torrent_handle , std : : string > tracker ) ; <nl> - void processDownloadedFile ( QString url , QString file_path , int return_code , QString errorBuffer ) ; <nl> - void downloadFromURLList ( const QStringList & url_list ) ; <nl> - bool loadFilteredFiles ( torrent_handle & h ) ; <nl> - bool hasFilteredFiles ( const QString & fileName ) ; <nl> - void reloadTorrent ( const torrent_handle & h , bool compact_mode = true ) ; <nl> + void processScannedFiles ( const QStringList & params ) ; <nl> + void processDownloadedFiles ( const QString & path , const QString & url ) ; <nl> / / Search slots <nl> void on_search_button_clicked ( ) ; <nl> void on_stop_search_button_clicked ( ) ; <nl> class GUI : public QMainWindow , private Ui : : MainWindow { <nl> void showOptions ( ) const ; <nl> void OptionsSaved ( const QString & info ) ; <nl> / / HTTP slots <nl> - void downloadFromUrl ( const QString & url ) ; <nl> void askForTorrentUrl ( ) ; <nl> <nl> public slots : <nl> void setLocale ( QString locale ) ; <nl> + void torrentAdded ( const QString & path , torrent_handle & h , bool fastResume ) ; <nl> + void torrentDuplicate ( const QString & path ) ; <nl> + void torrentCorrupted ( const QString & path ) ; <nl> + void finishedTorrent ( torrent_handle & h ) ; <nl> + void fullDiskError ( torrent_handle & h ) ; <nl> + void portListeningFailure ( ) ; <nl> + void trackerError ( const QString & hash , const QString & time , const QString & msg ) ; <nl> + void trackerAuthenticationRequired ( torrent_handle & h ) ; <nl> <nl> protected : <nl> void closeEvent ( QCloseEvent * ) ; <nl> class GUI : public QMainWindow , private Ui : : MainWindow { <nl> float getNovaVersion ( const QString & novaPath ) const ; <nl> QByteArray getNovaChangelog ( const QString & novaPath ) const ; <nl> void updateNova ( ) const ; <nl> - bool isFilePreviewPossible ( const torrent_handle & h ) const ; <nl> QString getSavePath ( QString fileName ) ; <nl> QPoint screenCenter ( ) ; <nl> } ; <nl> mmm a / src / bittorrent . cpp <nl> ppp b / src / bittorrent . cpp <nl> <nl> * / <nl> # include " bittorrent . h " <nl> # include " misc . h " <nl> + # include " downloadThread . h " <nl> <nl> # include < QDir > <nl> # include < QTime > <nl> <nl> / / Main constructor <nl> bittorrent : : bittorrent ( ) { <nl> + / / Supported preview extensions <nl> + / / XXX : might be incomplete <nl> supported_preview_extensions < < " AVI " < < " DIVX " < < " MPG " < < " MPEG " < < " MP3 " < < " OGG " < < " WMV " < < " WMA " < < " RMV " < < " RMVB " < < " ASF " < < " MOV " < < " WAV " < < " MP2 " < < " SWF " < < " AC3 " ; <nl> / / Creating bittorrent session <nl> s = new session ( fingerprint ( " qB " , VERSION_MAJOR , VERSION_MINOR , VERSION_BUGFIX , 0 ) ) ; <nl> bittorrent : : bittorrent ( ) { <nl> timerAlerts = new QTimer ( this ) ; <nl> connect ( timerAlerts , SIGNAL ( timeout ( ) ) , this , SLOT ( readAlerts ( ) ) ) ; <nl> timerAlerts - > start ( 3000 ) ; <nl> + / / To download from urls <nl> + downloader = new downloadThread ( this ) ; <nl> + connect ( downloader , SIGNAL ( downloadFinished ( const QString & , const QString & , int , const QString & ) ) , this , SLOT ( processDownloadedFile ( const QString & , const QString & , int , const QString & ) ) ) ; <nl> + } <nl> + <nl> + void bittorrent : : resumeUnfinishedTorrents ( ) { <nl> + / / Resume unfinished torrents <nl> + resumeUnfinished ( ) ; <nl> } <nl> <nl> / / Main destructor <nl> bittorrent : : ~ bittorrent ( ) { <nl> disableDirectoryScanning ( ) ; <nl> delete timerAlerts ; <nl> + delete downloader ; <nl> delete s ; <nl> } <nl> <nl> torrent_handle bittorrent : : getTorrentHandle ( const QString & hash ) const { <nl> return s - > find_torrent ( misc : : fromString < sha1_hash > ( ( hash . toStdString ( ) ) ) ) ; <nl> } <nl> <nl> + / / Return true if the torrent corresponding to the <nl> + / / hash is paused <nl> + bool bittorrent : : isPaused ( const QString & hash ) const { <nl> + torrent_handle h = s - > find_torrent ( misc : : fromString < sha1_hash > ( ( hash . toStdString ( ) ) ) ) ; <nl> + if ( ! h . is_valid ( ) ) { <nl> + return true ; <nl> + } <nl> + return h . is_paused ( ) ; <nl> + } <nl> + <nl> / / Delete a torrent from the session , given its hash <nl> / / permanent = true means that the torrent will be removed from the hard - drive too <nl> void bittorrent : : deleteTorrent ( const QString & hash , bool permanent ) { <nl> void bittorrent : : pauseTorrent ( const QString & hash ) { <nl> torrent_handle h = s - > find_torrent ( misc : : fromString < sha1_hash > ( ( hash . toStdString ( ) ) ) ) ; <nl> if ( h . is_valid ( ) & & ! h . is_paused ( ) ) { <nl> h . pause ( ) ; <nl> + / / Create . paused file <nl> + QFile paused_file ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ; <nl> + paused_file . open ( QIODevice : : WriteOnly | QIODevice : : Text ) ; <nl> + paused_file . close ( ) ; <nl> } <nl> } <nl> <nl> void bittorrent : : resumeTorrent ( const QString & hash ) { <nl> torrent_handle h = s - > find_torrent ( misc : : fromString < sha1_hash > ( ( hash . toStdString ( ) ) ) ) ; <nl> if ( h . is_valid ( ) & & h . is_paused ( ) ) { <nl> h . resume ( ) ; <nl> + / / Delete . paused file <nl> + QFile : : remove ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ; <nl> } <nl> } <nl> <nl> void bittorrent : : addTorrent ( const QString & path , bool fromScanDir , const QString <nl> } <nl> } <nl> <nl> + / / Set the maximum number of opened connections <nl> + void bittorrent : : setMaxConnections ( int maxConnec ) { <nl> + s - > set_max_connections ( maxConnec ) ; <nl> + } <nl> + <nl> / / Check in . pieces file if the user filtered files <nl> / / in this torrent . <nl> bool bittorrent : : hasFilteredFiles ( const QString & fileHash ) const { <nl> void bittorrent : : scanDirectory ( ) { <nl> foreach ( file , files ) { <nl> QString fullPath = dir . path ( ) + QDir : : separator ( ) + file ; <nl> if ( fullPath . endsWith ( " . torrent " ) ) { <nl> - to_add < < fullPath ; <nl> + QFile : : rename ( fullPath , fullPath + QString ( " . old " ) ) ; <nl> + to_add < < fullPath + QString ( " . old " ) ; <nl> } <nl> } <nl> - foreach ( file , to_add ) { <nl> - / / TODO : Support torrent addition dialog <nl> - addTorrent ( file , true ) ; <nl> - } <nl> + emit scanDirFoundTorrents ( to_add ) ; <nl> } <nl> } <nl> <nl> + void bittorrent : : setDefaultSavePath ( const QString & savepath ) { <nl> + defaultSavePath = savepath ; <nl> + } <nl> + <nl> / / Enable directory scanning <nl> void bittorrent : : enableDirectoryScanning ( const QString & _scan_dir ) { <nl> if ( ! _scan_dir . isEmpty ( ) ) { <nl> void bittorrent : : readAlerts ( ) { <nl> emit finishedTorrent ( p - > handle ) ; <nl> } <nl> else if ( file_error_alert * p = dynamic_cast < file_error_alert * > ( a . get ( ) ) ) { <nl> - emit fullDiskError ( QString ( p - > handle . get_torrent_info ( ) . name ( ) . c_str ( ) ) ) ; <nl> + emit fullDiskError ( p - > handle ) ; <nl> } <nl> else if ( dynamic_cast < listen_failed_alert * > ( a . get ( ) ) ) { <nl> / / Level : fatal <nl> void bittorrent : : readAlerts ( ) { <nl> } <nl> } <nl> <nl> + void bittorrent : : reloadTorrent ( const torrent_handle & h , bool compact_mode ) { <nl> + QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> + fs : : path saveDir = h . save_path ( ) ; <nl> + QString fileName = QString ( h . name ( ) . c_str ( ) ) ; <nl> + QString fileHash = QString ( misc : : toString ( h . info_hash ( ) ) . c_str ( ) ) ; <nl> + qDebug ( " Reloading torrent : % s " , ( const char * ) fileName . toUtf8 ( ) ) ; <nl> + torrent_handle new_h ; <nl> + entry resumeData ; <nl> + torrent_info t = h . get_torrent_info ( ) ; <nl> + / / Checking if torrentBackup Dir exists <nl> + / / create it if it is not <nl> + if ( ! torrentBackup . exists ( ) ) { <nl> + torrentBackup . mkpath ( torrentBackup . path ( ) ) ; <nl> + } <nl> + / / Write fast resume data <nl> + / / Pause download ( needed before fast resume writing ) <nl> + h . pause ( ) ; <nl> + / / Extracting resume data <nl> + if ( h . has_metadata ( ) ) { <nl> + / / get fast resume data <nl> + resumeData = h . write_resume_data ( ) ; <nl> + } <nl> + / / Remove torrent <nl> + s - > remove_torrent ( h ) ; <nl> + / / Add torrent again to session <nl> + unsigned short timeout = 0 ; <nl> + while ( h . is_valid ( ) & & timeout < 6 ) { <nl> + SleeperThread : : msleep ( 1000 ) ; <nl> + + + timeout ; <nl> + } <nl> + if ( h . is_valid ( ) ) { <nl> + std : : cerr < < " Error : Couldn ' t reload the torrent \ n " ; <nl> + return ; <nl> + } <nl> + new_h = s - > add_torrent ( t , saveDir , resumeData , compact_mode ) ; <nl> + if ( compact_mode ) { <nl> + qDebug ( " Using compact allocation mode " ) ; <nl> + } else { <nl> + qDebug ( " Using full allocation mode " ) ; <nl> + } <nl> + <nl> + / / new_h . set_max_connections ( 60 ) ; <nl> + new_h . set_max_uploads ( - 1 ) ; <nl> + / / Load filtered Files <nl> + loadFilteredFiles ( new_h ) ; <nl> + <nl> + / / Pause torrent if it was paused last time <nl> + if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . paused " ) ) { <nl> + new_h . pause ( ) ; <nl> + } <nl> + / / Incremental download <nl> + if ( QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + fileHash + " . incremental " ) ) { <nl> + qDebug ( " Incremental download enabled for % s " , ( const char * ) fileName . toUtf8 ( ) ) ; <nl> + new_h . set_sequenced_download_threshold ( 15 ) ; <nl> + } <nl> + } <nl> + <nl> + int bittorrent : : getListenPort ( ) const { <nl> + return s - > listen_port ( ) ; <nl> + } <nl> + <nl> + session_status bittorrent : : getSessionStatus ( ) const { <nl> + return s - > status ( ) ; <nl> + } <nl> + <nl> QString bittorrent : : getSavePath ( const QString & hash ) { <nl> QFile savepath_file ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . savepath " ) ; <nl> QByteArray line ; <nl> QString bittorrent : : getSavePath ( const QString & hash ) { <nl> qDebug ( " Save path : % s " , line . data ( ) ) ; <nl> savePath = QString : : fromUtf8 ( line . data ( ) ) ; <nl> } else { <nl> - / / TODO : always create . savepath file <nl> - / / savePath = options - > getSavePath ( ) ; <nl> + / / use default save path <nl> + savePath = defaultSavePath ; <nl> } <nl> / / Checking if savePath Dir exists <nl> / / create it if it is not <nl> QString bittorrent : : getSavePath ( const QString & hash ) { <nl> if ( ! saveDir . exists ( ) ) { <nl> if ( ! saveDir . mkpath ( saveDir . path ( ) ) ) { <nl> std : : cerr < < " Couldn ' t create the save directory : " < < ( const char * ) saveDir . path ( ) . toUtf8 ( ) < < " \ n " ; <nl> - / / TODO : handle this better <nl> + / / XXX : handle this better <nl> return QDir : : homePath ( ) ; <nl> } <nl> } <nl> return savePath ; <nl> } <nl> + <nl> + / / Take an url string to a torrent file , <nl> + / / download the torrent file to a tmp location , then <nl> + / / add it to download list <nl> + void bittorrent : : downloadFromUrl ( const QString & url ) { <nl> + / / Launch downloader thread <nl> + downloader - > downloadUrl ( url ) ; <nl> + } <nl> + <nl> + / / Add to bittorrent session the downloaded torrent file <nl> + void bittorrent : : processDownloadedFile ( const QString & url , const QString & file_path , int return_code , const QString & errorBuffer ) { <nl> + if ( return_code ) { <nl> + / / Download failed <nl> + emit downloadFromUrlFailure ( url , errorBuffer ) ; <nl> + QFile : : remove ( file_path ) ; <nl> + return ; <nl> + } <nl> + / / Add file to torrent download list <nl> + emit newDownloadedTorrent ( file_path , url ) ; <nl> + } <nl> + <nl> + void bittorrent : : downloadFromURLList ( const QStringList & url_list ) { <nl> + QString url ; <nl> + foreach ( url , url_list ) { <nl> + downloadFromUrl ( url ) ; <nl> + } <nl> + } <nl> + <nl> + / / Return current download rate for the BT <nl> + / / session . Payload means that it only take into <nl> + / / account " useful " part of the rate <nl> + float bittorrent : : getPayloadDownloadRate ( ) const { <nl> + session_status sessionStatus = s - > status ( ) ; <nl> + return sessionStatus . payload_download_rate ; <nl> + } <nl> + <nl> + / / Return current upload rate for the BT <nl> + / / session . Payload means that it only take into <nl> + / / account " useful " part of the rate <nl> + float bittorrent : : getPayloadUploadRate ( ) const { <nl> + session_status sessionStatus = s - > status ( ) ; <nl> + return sessionStatus . payload_upload_rate ; <nl> + } <nl> + <nl> + / / Return a vector with all torrent handles in it <nl> + std : : vector < torrent_handle > bittorrent : : getTorrentHandles ( ) const { <nl> + return s - > get_torrents ( ) ; <nl> + } <nl> + <nl> + / / Return a vector with all finished torrent handles in it <nl> + QList < torrent_handle > bittorrent : : getFinishedTorrentHandles ( ) const { <nl> + QList < torrent_handle > finished ; <nl> + std : : vector < torrent_handle > handles ; <nl> + for ( unsigned int i = 0 ; i < handles . size ( ) ; + + i ) { <nl> + torrent_handle h = handles [ i ] ; <nl> + if ( h . is_seed ( ) ) { <nl> + finished < < h ; <nl> + } <nl> + } <nl> + return finished ; <nl> + } <nl> + <nl> + / / Save DHT entry to hard drive <nl> + void bittorrent : : saveDHTEntry ( ) { <nl> + / / Save DHT entry <nl> + if ( DHTEnabled ) { <nl> + try { <nl> + entry dht_state = s - > dht_state ( ) ; <nl> + boost : : filesystem : : ofstream out ( ( const char * ) ( misc : : qBittorrentPath ( ) + QString ( " dht_state " ) ) . toUtf8 ( ) , std : : ios_base : : binary ) ; <nl> + out . unsetf ( std : : ios_base : : skipws ) ; <nl> + bencode ( std : : ostream_iterator < char > ( out ) , dht_state ) ; <nl> + } catch ( std : : exception & e ) { <nl> + std : : cerr < < e . what ( ) < < " \ n " ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / Will fast resume unfinished torrents in <nl> + / / backup directory <nl> + void bittorrent : : resumeUnfinished ( ) { <nl> + qDebug ( " Resuming unfinished torrents " ) ; <nl> + QDir torrentBackup ( misc : : qBittorrentPath ( ) + " BT_backup " ) ; <nl> + QStringList fileNames , filePaths ; <nl> + / / Scan torrentBackup directory <nl> + fileNames = torrentBackup . entryList ( ) ; <nl> + QString fileName ; <nl> + foreach ( fileName , fileNames ) { <nl> + if ( fileName . endsWith ( " . torrent " ) ) { <nl> + filePaths . append ( torrentBackup . path ( ) + QDir : : separator ( ) + fileName ) ; <nl> + } <nl> + } <nl> + / / Resume downloads <nl> + foreach ( fileName , filePaths ) { <nl> + addTorrent ( fileName ) ; <nl> + } <nl> + qDebug ( " Unfinished torrents resumed " ) ; <nl> + } <nl> mmm a / src / bittorrent . h <nl> ppp b / src / bittorrent . h <nl> <nl> using namespace libtorrent ; <nl> namespace fs = boost : : filesystem ; <nl> <nl> + class downloadThread ; <nl> + <nl> class bittorrent : public QObject { <nl> Q_OBJECT <nl> <nl> class bittorrent : public QObject { <nl> QString scan_dir ; <nl> QTimer * timerScan ; <nl> QTimer * timerAlerts ; <nl> - QWidget * parent ; <nl> + downloadThread * downloader ; <nl> QStringList supported_preview_extensions ; <nl> - <nl> - / / Constructor / Destructor <nl> - bittorrent ( ) ; <nl> - ~ bittorrent ( ) ; <nl> + QString defaultSavePath ; <nl> <nl> protected : <nl> QString getSavePath ( const QString & hash ) ; <nl> <nl> public : <nl> + / / Constructor / Destructor <nl> + bittorrent ( ) ; <nl> + ~ bittorrent ( ) ; <nl> torrent_handle getTorrentHandle ( const QString & hash ) const ; <nl> + std : : vector < torrent_handle > getTorrentHandles ( ) const ; <nl> + bool isPaused ( const QString & hash ) const ; <nl> bool hasFilteredFiles ( const QString & fileHash ) const ; <nl> bool isFilePreviewPossible ( const QString & fileHash ) const ; <nl> bool isDHTEnabled ( ) const ; <nl> + float getPayloadDownloadRate ( ) const ; <nl> + float getPayloadUploadRate ( ) const ; <nl> + QList < torrent_handle > getFinishedTorrentHandles ( ) const ; <nl> + session_status getSessionStatus ( ) const ; <nl> + int getListenPort ( ) const ; <nl> <nl> public slots : <nl> void addTorrent ( const QString & path , bool fromScanDir = false , const QString & from_url = QString ( ) ) ; <nl> + void downloadFromUrl ( const QString & url ) ; <nl> + void downloadFromURLList ( const QStringList & url_list ) ; <nl> void deleteTorrent ( const QString & hash , bool permanent = false ) ; <nl> void pauseTorrent ( const QString & hash ) ; <nl> void resumeTorrent ( const QString & hash ) ; <nl> void enableDHT ( ) ; <nl> void disableDHT ( ) ; <nl> + void saveDHTEntry ( ) ; <nl> void saveFastResumeData ( ) ; <nl> void enableDirectoryScanning ( const QString & scan_dir ) ; <nl> void disableDirectoryScanning ( ) ; <nl> void enablePeerExchange ( ) ; <nl> void enableIPFilter ( ip_filter filter ) ; <nl> void disableIPFilter ( ) ; <nl> + void reloadTorrent ( const torrent_handle & h , bool compact_mode = true ) ; <nl> + void resumeUnfinishedTorrents ( ) ; <nl> / / Session configuration - Setters <nl> void setListeningPortsRange ( std : : pair < unsigned short , unsigned short > ports ) ; <nl> + void setMaxConnections ( int maxConnec ) ; <nl> void setDownloadRateLimit ( int rate ) ; <nl> void setUploadRateLimit ( int rate ) ; <nl> void setGlobalRatio ( float ratio ) ; <nl> void setDHTPort ( int dht_port ) ; <nl> void setSessionSettings ( session_settings sessionSettings ) ; <nl> + void setDefaultSavePath ( const QString & savepath ) ; <nl> <nl> protected slots : <nl> void cleanDeleter ( deleteThread * deleter ) ; <nl> void loadFilteredFiles ( torrent_handle & h ) ; <nl> void scanDirectory ( ) ; <nl> void readAlerts ( ) ; <nl> + void processDownloadedFile ( const QString & , const QString & , int , const QString & ) ; <nl> + void resumeUnfinished ( ) ; <nl> <nl> signals : <nl> void invalidTorrent ( const QString & path ) ; <nl> void duplicateTorrent ( const QString & path ) ; <nl> void addedTorrent ( const QString & path , torrent_handle & h , bool fastResume ) ; <nl> - void resumedTorrent ( const QString & path ) ; <nl> void finishedTorrent ( torrent_handle & h ) ; <nl> - void fullDiskError ( const QString & fileName ) ; <nl> + void fullDiskError ( torrent_handle & h ) ; <nl> void trackerError ( const QString & hash , const QString & time , const QString & msg ) ; <nl> void portListeningFailure ( ) ; <nl> void trackerAuthenticationRequired ( torrent_handle & h ) ; <nl> + void downloadFromUrlFailure ( const QString & url , const QString & error ) ; <nl> + void scanDirFoundTorrents ( const QStringList & pathList ) ; <nl> + void newDownloadedTorrent ( const QString & path , const QString & url ) ; <nl> } ; <nl> <nl> # endif <nl> mmm a / src / downloadThread . h <nl> ppp b / src / downloadThread . h <nl> class downloadThread : public QThread { <nl> QWaitCondition condition ; <nl> <nl> signals : <nl> - void downloadFinished ( QString url , QString file_path , int return_code , QString errorBuffer ) ; <nl> + void downloadFinished ( const QString & url , const QString & file_path , int return_code , const QString & errorBuffer ) ; <nl> <nl> public : <nl> downloadThread ( QObject * parent ) : QThread ( parent ) { } <nl> class downloadThread : public QThread { <nl> qDebug ( " In Download thread RUN , mutex unlocked ( no urls ) - > stopping " ) ; <nl> break ; <nl> } <nl> + SleeperThread : : msleep ( 500 ) ; <nl> } <nl> } <nl> } ; <nl> mmm a / src / options_imp . cpp <nl> ppp b / src / options_imp . cpp <nl> void options_imp : : loadOptions ( ) { <nl> strValue = settings . value ( " ScanDir " , QString ( ) ) . toString ( ) ; <nl> if ( ! strValue . isEmpty ( ) ) { <nl> enableScan_checkBox - > setChecked ( true ) ; <nl> + lbl_scanDir - > setEnabled ( true ) ; <nl> scanDir - > setEnabled ( true ) ; <nl> + browse_button_scan - > setEnabled ( true ) ; <nl> scanDir - > setText ( strValue ) ; <nl> } else { <nl> enableScan_checkBox - > setChecked ( false ) ; <nl> + lbl_scanDir - > setEnabled ( false ) ; <nl> + browse_button_scan - > setEnabled ( false ) ; <nl> scanDir - > setEnabled ( false ) ; <nl> } <nl> / / End Main options <nl> mmm a / src / properties_imp . cpp <nl> ppp b / src / properties_imp . cpp <nl> properties : : properties ( QWidget * parent , torrent_handle h , QStringList trackerErr <nl> } <nl> <nl> properties : : ~ properties ( ) { <nl> + qDebug ( " Properties destroyed " ) ; <nl> delete updateProgressTimer ; <nl> delete PropDelegate ; <nl> delete PropListModel ; <nl> mmm a / src / torrentAddition . h <nl> ppp b / src / torrentAddition . h <nl> class torrentAdditionDialog : public QDialog , private Ui_addTorrentDialog { <nl> / / Setting file name <nl> fileName = QString ( t . name ( ) . c_str ( ) ) ; <nl> fileHash = QString ( misc : : toString ( t . info_hash ( ) ) . c_str ( ) ) ; <nl> - fileNameLbl - > setText ( " < center > < b > " + fileName + " < / b > < / center > " ) ; <nl> + / / Use left ( ) to remove . old extension <nl> + QString newFileName ; <nl> + if ( fileName . endsWith ( " . old " ) ) { <nl> + newFileName = fileName . left ( fileName . size ( ) - 4 ) ; <nl> + } else { <nl> + newFileName = fileName ; <nl> + } <nl> + fileNameLbl - > setText ( " < center > < b > " + newFileName + " < / b > < / center > " ) ; <nl> / / List files in torrent <nl> for ( int i = 0 ; i < t . num_files ( ) ; + + i ) { <nl> QStringList line ; <nl> | * * BIG COMMIT * * | qbittorrent/qBittorrent | bd3bde919d282967b26a8c47e51156e11622a890 | 2007-03-07T22:36:01Z |
mmm a / src / base / bittorrent / session . cpp <nl> ppp b / src / base / bittorrent / session . cpp <nl> void Session : : handleStateUpdateAlert ( const lt : : state_update_alert * p ) <nl> updatedTorrents . push_back ( torrent ) ; <nl> } <nl> <nl> - emit torrentsUpdated ( updatedTorrents ) ; <nl> + if ( ! updatedTorrents . isEmpty ( ) ) <nl> + emit torrentsUpdated ( updatedTorrents ) ; <nl> } <nl> <nl> namespace <nl> mmm a / src / gui / mainwindow . cpp <nl> ppp b / src / gui / mainwindow . cpp <nl> MainWindow : : MainWindow ( QWidget * parent ) <nl> / / Configure BT session according to options <nl> loadPreferences ( false ) ; <nl> <nl> - connect ( BitTorrent : : Session : : instance ( ) , & BitTorrent : : Session : : torrentsUpdated , this , & MainWindow : : updateGUI ) ; <nl> + connect ( BitTorrent : : Session : : instance ( ) , & BitTorrent : : Session : : statsUpdated , this , & MainWindow : : reloadSessionStats ) ; <nl> + connect ( BitTorrent : : Session : : instance ( ) , & BitTorrent : : Session : : torrentsUpdated , this , & MainWindow : : reloadTorrentStats ) ; <nl> <nl> / / Accept drag ' n drops <nl> setAcceptDrops ( true ) ; <nl> void MainWindow : : loadPreferences ( bool configureSession ) <nl> qDebug ( " GUI settings loaded " ) ; <nl> } <nl> <nl> - / / Check connection status and display right icon <nl> - void MainWindow : : updateGUI ( ) <nl> + void MainWindow : : reloadSessionStats ( ) <nl> { <nl> - if ( currentTabWidget ( ) = = m_transferListWidget ) <nl> - m_propertiesWidget - > loadDynamicData ( ) ; <nl> - <nl> const BitTorrent : : SessionStatus & status = BitTorrent : : Session : : instance ( ) - > status ( ) ; <nl> <nl> / / update global information <nl> - # ifndef Q_OS_MACOS <nl> + # ifdef Q_OS_MACOS <nl> + if ( status . payloadDownloadRate > 0 ) { <nl> + QtMac : : setBadgeLabelText ( tr ( " % 1 / s " , " s is a shorthand for seconds " ) <nl> + . arg ( Utils : : Misc : : friendlyUnit ( status . payloadDownloadRate ) ) ) ; <nl> + } <nl> + else if ( ! QtMac : : badgeLabelText ( ) . isEmpty ( ) ) { <nl> + QtMac : : setBadgeLabelText ( " " ) ; <nl> + } <nl> + # else <nl> if ( m_systrayIcon ) { <nl> # ifdef Q_OS_UNIX <nl> - const QString html = QString ( QLatin1String ( <nl> + const QString toolTip = QString ( QLatin1String ( <nl> " < div style = ' background - color : # 678db2 ; color : # fff ; height : 18px ; font - weight : bold ; margin - bottom : 5px ; ' > " <nl> " qBittorrent " <nl> " < / div > " <nl> void MainWindow : : updateGUI ( ) <nl> , tr ( " UP speed : % 1 " , " e . g : Upload speed : 10 KiB / s " ) . arg ( Utils : : Misc : : friendlyUnit ( status . payloadUploadRate , true ) ) ) ; <nl> # else <nl> / / OSes such as Windows do not support html here <nl> - const QString html = QString ( " % 1 \ n % 2 " ) . arg ( <nl> + const QString toolTip = QString ( " % 1 \ n % 2 " ) . arg ( <nl> tr ( " DL speed : % 1 " , " e . g : Download speed : 10 KiB / s " ) . arg ( Utils : : Misc : : friendlyUnit ( status . payloadDownloadRate , true ) ) <nl> , tr ( " UP speed : % 1 " , " e . g : Upload speed : 10 KiB / s " ) . arg ( Utils : : Misc : : friendlyUnit ( status . payloadUploadRate , true ) ) ) ; <nl> # endif / / Q_OS_UNIX <nl> - m_systrayIcon - > setToolTip ( html ) ; / / tray icon <nl> + m_systrayIcon - > setToolTip ( toolTip ) ; / / tray icon <nl> } <nl> - # else <nl> - if ( status . payloadDownloadRate > 0 ) <nl> - QtMac : : setBadgeLabelText ( tr ( " % 1 / s " , " s is a shorthand for seconds " ) <nl> - . arg ( Utils : : Misc : : friendlyUnit ( status . payloadDownloadRate ) ) ) ; <nl> - else if ( ! QtMac : : badgeLabelText ( ) . isEmpty ( ) ) <nl> - QtMac : : setBadgeLabelText ( " " ) ; <nl> - # endif / / Q_OS_MACOS <nl> + # endif / / Q_OS_MACOS <nl> <nl> if ( m_displaySpeedInTitle ) { <nl> setWindowTitle ( tr ( " [ D : % 1 , U : % 2 ] qBittorrent % 3 " , " D = Download ; U = Upload ; % 3 is qBittorrent version " ) <nl> void MainWindow : : updateGUI ( ) <nl> } <nl> } <nl> <nl> + void MainWindow : : reloadTorrentStats ( const QVector < BitTorrent : : TorrentHandle * > & torrents ) <nl> + { <nl> + if ( currentTabWidget ( ) = = m_transferListWidget ) { <nl> + if ( torrents . contains ( m_propertiesWidget - > getCurrentTorrent ( ) ) ) <nl> + m_propertiesWidget - > loadDynamicData ( ) ; <nl> + } <nl> + } <nl> + <nl> void MainWindow : : showNotificationBaloon ( const QString & title , const QString & msg ) const <nl> { <nl> if ( ! isNotificationsEnabled ( ) ) return ; <nl> void MainWindow : : on_actionSpeedInTitleBar_triggered ( ) <nl> m_displaySpeedInTitle = static_cast < QAction * > ( sender ( ) ) - > isChecked ( ) ; <nl> Preferences : : instance ( ) - > showSpeedInTitleBar ( m_displaySpeedInTitle ) ; <nl> if ( m_displaySpeedInTitle ) <nl> - updateGUI ( ) ; <nl> + reloadSessionStats ( ) ; <nl> else <nl> setWindowTitle ( " qBittorrent " QBT_VERSION ) ; <nl> } <nl> mmm a / src / gui / mainwindow . h <nl> ppp b / src / gui / mainwindow . h <nl> private slots : <nl> void displayRSSTab ( ) ; <nl> void displayExecutionLogTab ( ) ; <nl> void focusSearchFilter ( ) ; <nl> - void updateGUI ( ) ; <nl> + void reloadSessionStats ( ) ; <nl> + void reloadTorrentStats ( const QVector < BitTorrent : : TorrentHandle * > & torrents ) ; <nl> void loadPreferences ( bool configureSession = true ) ; <nl> void addTorrentFailed ( const QString & error ) const ; <nl> void torrentNew ( BitTorrent : : TorrentHandle * const torrent ) const ; <nl> | Merge pull request from Chocobo1 / signals | qbittorrent/qBittorrent | 7a3607c729df7c3017251cde58933ffb22bebd55 | 2019-09-25T01:53:39Z |
mmm a / buildscripts / resmokeconfig / suites / sharding_multiversion . yml <nl> ppp b / buildscripts / resmokeconfig / suites / sharding_multiversion . yml <nl> selector : <nl> exclude_with_any_tags : <nl> - multiversion_incompatible <nl> - requires_fcv_44 <nl> - exclude_files : <nl> - - jstests / sharding / updates_to_rangedeletions_collection_trigger_range_deletions . js <nl> - - jstests / sharding / resubmit_rangedeletions_on_stepup . js <nl> - - jstests / sharding / migration_fails_if_exists_in_rangedeletions . js <nl> executor : <nl> config : <nl> shell_options : <nl> mmm a / jstests / sharding / migration_fails_if_exists_in_rangedeletions . js <nl> ppp b / jstests / sharding / migration_fails_if_exists_in_rangedeletions . js <nl> <nl> + / * <nl> + * Ensures that error is reported when overlappping range is submitted for deletion . <nl> + * @ tags : [ multiversion_incompatible ] <nl> + * / <nl> + <nl> ( function ( ) { <nl> " use strict " ; <nl> <nl> mmm a / jstests / sharding / resubmit_rangedeletions_on_stepup . js <nl> ppp b / jstests / sharding / resubmit_rangedeletions_on_stepup . js <nl> <nl> / * * <nl> * Ensure that orphaned documents are submitted for deletion on step up . <nl> + * @ tags : [ multiversion_incompatible ] <nl> * / <nl> <nl> ( function ( ) { <nl> mmm a / jstests / sharding / updates_to_rangedeletions_collection_trigger_range_deletions . js <nl> ppp b / jstests / sharding / updates_to_rangedeletions_collection_trigger_range_deletions . js <nl> <nl> / * * <nl> * Ensure that orphaned documents are deleted when the pending = true field is removed from the <nl> * config . rangeDeletions collection . <nl> - * <nl> + * @ tags : [ multiversion_incompatible ] <nl> * / <nl> <nl> ( function ( ) { <nl> | SERVER - 44663 Blacklist range deletion tests from the sharding_multiversion suite | mongodb/mongo | 1a6d8082a6f70f50f28318aadce719eae73669b2 | 2019-11-18T16:46:38Z |
similarity index 100 % <nl> rename from jstests / sharding / sharding_state_after_reconfig . js <nl> rename to jstests / sharding / sharding_state_after_stepdown . js <nl> | SERVER - 15535 Rename sharding_state_after_reconfig . js to sharding_state_after_stepdown . js | mongodb/mongo | 37bd6f5f6886ca07d798f199a856db7894434a4c | 2014-10-14T23:06:50Z |
mmm a / modules / gdscript / gd_parser . cpp <nl> ppp b / modules / gdscript / gd_parser . cpp <nl> GDParser : : PatternNode * GDParser : : _parse_pattern ( bool p_static ) { <nl> / / all the constants like strings and numbers <nl> default : { <nl> Node * value = _parse_and_reduce_expression ( pattern , p_static ) ; <nl> - if ( error_set ) { <nl> + if ( ! value ) { <nl> + _set_error ( " Expect constant expression or variables in a pattern " ) ; <nl> return NULL ; <nl> } <nl> <nl> | fix editor crash when missing variable in pattern match dispatch | godotengine/godot | 822af935e3878013bb2cb3b90f0342e52a4d1a52 | 2017-10-24T05:07:21Z |
mmm a / include / swift / AST / DiagnosticsFrontend . def <nl> ppp b / include / swift / AST / DiagnosticsFrontend . def <nl> ERROR ( error_implicit_output_file_is_directory , none , <nl> " the implicit output file ' % 0 ' is a directory ; explicitly specify a filename " <nl> " using - o " , ( StringRef ) ) <nl> <nl> + ERROR ( error_if_any_output_files_are_specified_they_all_must_be , none , <nl> + " if any output files are specified , they all must be " , ( ) ) <nl> + <nl> ERROR ( error_primary_file_not_found , none , <nl> " primary file ' % 0 ' was not found in file list ' % 1 ' " , <nl> ( StringRef , StringRef ) ) <nl> mmm a / include / swift / AST / IRGenOptions . h <nl> ppp b / include / swift / AST / IRGenOptions . h <nl> class IRGenOptions { <nl> <nl> / / / Gets the name of the specified output filename . <nl> / / / If multiple files are specified , the last one is returned . <nl> + / / / This function is used by ( at least ) <nl> + / / / lldb / source / Symbol / SwiftASTContext . cpp : 4603 <nl> + / / / FIXME : This function should go away in favor of <nl> + / / / Instance . getFrontendOptions ( ) . InputsAndOutputs . getSingleOutputFilename <nl> + / / / when batch mode handles all contingencies . <nl> StringRef getSingleOutputFilename ( ) const { <nl> if ( OutputFilenames . size ( ) > = 1 ) <nl> return OutputFilenames . back ( ) ; <nl> mmm a / include / swift / Frontend / ArgsToFrontendInputsConverter . h <nl> ppp b / include / swift / Frontend / ArgsToFrontendInputsConverter . h <nl> class ArgsToFrontendInputsConverter { <nl> std : : set < StringRef > <nl> createInputFilesConsumingPrimaries ( std : : set < StringRef > primaryFiles ) ; <nl> bool checkForMissingPrimaryFiles ( std : : set < StringRef > primaryFiles ) ; <nl> + <nl> + bool isSingleThreadedWMO ( ) const ; <nl> } ; <nl> <nl> } / / namespace swift <nl> mmm a / include / swift / Frontend / ArgsToFrontendOptionsConverter . h <nl> ppp b / include / swift / Frontend / ArgsToFrontendOptionsConverter . h <nl> class ArgsToFrontendOptionsConverter { <nl> void setUnsignedIntegerArgument ( options : : ID optionID , unsigned max , <nl> unsigned & valueToSet ) ; <nl> <nl> - FrontendOptions : : ActionType determineRequestedAction ( ) const ; <nl> - <nl> bool setUpForSILOrLLVM ( ) ; <nl> <nl> - / / / Determine the correct output filename when none was specified . <nl> - / / / <nl> - / / / Such an absence should only occur when invoking the frontend <nl> - / / / without the driver , <nl> - / / / because the driver will always pass - o with an appropriate filename <nl> - / / / if output is required for the requested action . <nl> - bool deriveOutputFilenameFromInputFile ( ) ; <nl> - <nl> - / / / Determine the correct output filename when a directory was specified . <nl> - / / / <nl> - / / / Such a specification should only occur when invoking the frontend <nl> - / / / directly , because the driver will always pass - o with an appropriate <nl> - / / / filename if output is required for the requested action . <nl> - bool deriveOutputFilenameForDirectory ( StringRef outputDir ) ; <nl> - <nl> - std : : string determineBaseNameOfOutput ( ) const ; <nl> - <nl> - void deriveOutputFilenameFromParts ( StringRef dir , StringRef base ) ; <nl> - <nl> void determineSupplementaryOutputFilenames ( ) ; <nl> <nl> / / / \ returns the output filenames on the command line or in the output <nl> class ArgsToFrontendOptionsConverter { <nl> : Diags ( Diags ) , Args ( Args ) , Opts ( Opts ) { } <nl> <nl> bool convert ( ) ; <nl> + <nl> + static FrontendOptions : : ActionType <nl> + determineRequestedAction ( const llvm : : opt : : ArgList & ) ; <nl> } ; <nl> <nl> } / / namespace swift <nl> new file mode 100644 <nl> index 000000000000 . . a23aeedde895 <nl> mmm / dev / null <nl> ppp b / include / swift / Frontend / ArgsToFrontendOutputsConverter . h <nl> <nl> + / / = = = mmm ArgsToFrontendOutputsConverter . h mmmmmmmmmmmmmmmmmmmmm - - * - C + + - * - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2018 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See https : / / swift . org / LICENSE . txt for license information <nl> + / / See https : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + # ifndef SWIFT_FRONTEND_ARGSTOFRONTENDOUTPUTSCONVERTER_H <nl> + # define SWIFT_FRONTEND_ARGSTOFRONTENDOUTPUTSCONVERTER_H <nl> + <nl> + # include " swift / AST / DiagnosticConsumer . h " <nl> + # include " swift / AST / DiagnosticEngine . h " <nl> + # include " swift / Basic / LLVM . h " <nl> + # include " swift / Frontend / FrontendOptions . h " <nl> + # include " swift / Option / Options . h " <nl> + # include " llvm / Option / ArgList . h " <nl> + <nl> + # include < vector > <nl> + <nl> + namespace swift { <nl> + <nl> + / / / Given the command line arguments and information about the inputs , <nl> + / / / Fill in all the information in FrontendInputsAndOutputs . <nl> + <nl> + class ArgsToFrontendOutputsConverter { <nl> + const llvm : : opt : : ArgList & Args ; <nl> + StringRef ModuleName ; <nl> + FrontendInputsAndOutputs & InputsAndOutputs ; <nl> + DiagnosticEngine & Diags ; <nl> + <nl> + public : <nl> + ArgsToFrontendOutputsConverter ( const llvm : : opt : : ArgList & args , <nl> + StringRef moduleName , <nl> + FrontendInputsAndOutputs & inputsAndOutputs , <nl> + DiagnosticEngine & diags ) <nl> + : Args ( args ) , ModuleName ( moduleName ) , InputsAndOutputs ( inputsAndOutputs ) , <nl> + Diags ( diags ) { } <nl> + <nl> + Optional < std : : vector < std : : string > > convert ( ) ; <nl> + <nl> + / / / \ returns ` None ` if it could not open the filelist . <nl> + static Optional < std : : vector < std : : string > > <nl> + readOutputFileList ( StringRef filelistPath , DiagnosticEngine & diags ) ; <nl> + } ; <nl> + <nl> + class OutputFilesComputer { <nl> + const llvm : : opt : : ArgList & Args ; <nl> + DiagnosticEngine & Diags ; <nl> + const FrontendInputsAndOutputs & InputsAndOutputs ; <nl> + const std : : vector < std : : string > OutputFileArguments ; <nl> + const std : : string OutputDirectoryArgument ; <nl> + const StringRef FirstInput ; <nl> + const FrontendOptions : : ActionType RequestedAction ; <nl> + const llvm : : opt : : Arg * const ModuleNameArg ; <nl> + const StringRef Suffix ; <nl> + const bool HasTextualOutput ; <nl> + <nl> + OutputFilesComputer ( const llvm : : opt : : ArgList & args , DiagnosticEngine & diags , <nl> + const FrontendInputsAndOutputs & inputsAndOutputs , <nl> + std : : vector < std : : string > outputFileArguments , <nl> + StringRef outputDirectoryArgument , StringRef firstInput , <nl> + FrontendOptions : : ActionType requestedAction , <nl> + const llvm : : opt : : Arg * moduleNameArg , StringRef suffix , <nl> + bool hasTextualOutput ) ; <nl> + <nl> + public : <nl> + static Optional < OutputFilesComputer > <nl> + create ( const llvm : : opt : : ArgList & args , DiagnosticEngine & diags , <nl> + const FrontendInputsAndOutputs & inputsAndOutputs ) ; <nl> + <nl> + / / / \ return the output filenames on the command line or in the output <nl> + / / / filelist . If there <nl> + / / / were neither - o ' s nor an output filelist , returns an empty vector . <nl> + static Optional < std : : vector < std : : string > > <nl> + getOutputFilenamesFromCommandLineOrFilelist ( const llvm : : opt : : ArgList & args , <nl> + DiagnosticEngine & diags ) ; <nl> + <nl> + Optional < std : : vector < std : : string > > computeOutputFiles ( ) const ; <nl> + <nl> + private : <nl> + Optional < std : : string > computeOutputFile ( StringRef outputArg , <nl> + const InputFile & input ) const ; <nl> + <nl> + / / / \ return the correct output filename when none was specified . <nl> + / / / <nl> + / / / Such an absence should only occur when invoking the frontend <nl> + / / / without the driver , <nl> + / / / because the driver will always pass - o with an appropriate filename <nl> + / / / if output is required for the requested action . <nl> + Optional < std : : string > deriveOutputFileFromInput ( const InputFile & input ) const ; <nl> + <nl> + / / / \ return the correct output filename when a directory was specified . <nl> + / / / <nl> + / / / Such a specification should only occur when invoking the frontend <nl> + / / / directly , because the driver will always pass - o with an appropriate <nl> + / / / filename if output is required for the requested action . <nl> + Optional < std : : string > <nl> + deriveOutputFileForDirectory ( const InputFile & input ) const ; <nl> + <nl> + std : : string determineBaseNameOfOutput ( const InputFile & input ) const ; <nl> + <nl> + std : : string deriveOutputFileFromParts ( StringRef dir , StringRef base ) const ; <nl> + } ; <nl> + <nl> + } / / namespace swift <nl> + <nl> + # endif / * SWIFT_FRONTEND_ARGSTOFRONTENDOUTPUTSCONVERTER_H * / <nl> mmm a / include / swift / Frontend / Frontend . h <nl> ppp b / include / swift / Frontend / Frontend . h <nl> class CompilerInvocation { <nl> <nl> <nl> StringRef getOutputFilename ( ) const { <nl> - return FrontendOpts . getSingleOutputFilename ( ) ; <nl> + return FrontendOpts . InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> } <nl> <nl> void setCodeCompletionPoint ( llvm : : MemoryBuffer * Buf , unsigned Offset ) { <nl> mmm a / include / swift / Frontend / FrontendInputsAndOutputs . h <nl> ppp b / include / swift / Frontend / FrontendInputsAndOutputs . h <nl> class FrontendInputsAndOutputs { <nl> <nl> llvm : : StringMap < unsigned > PrimaryInputs ; <nl> <nl> + / / / In Single - threaded WMO mode , all inputs are used <nl> + / / / both for importing and compiling . <nl> + bool IsSingleThreadedWMO = false ; <nl> + <nl> + / / / Punt where needed to enable batch mode experiments . <nl> + bool AreBatchModeChecksBypassed = false ; <nl> + <nl> public : <nl> + bool areBatchModeChecksBypassed ( ) const { return AreBatchModeChecksBypassed ; } <nl> + void setBypassBatchModeChecks ( bool bbc ) { AreBatchModeChecksBypassed = bbc ; } <nl> + <nl> FrontendInputsAndOutputs ( ) = default ; <nl> FrontendInputsAndOutputs ( const FrontendInputsAndOutputs & other ) ; <nl> FrontendInputsAndOutputs & operator = ( const FrontendInputsAndOutputs & other ) ; <nl> <nl> + / / Whole - module - optimization ( WMO ) routines : <nl> + <nl> + / / SingleThreadedWMO produces only main output file . In contrast , <nl> + / / multi - threaded WMO produces one main output per input , as single - file and <nl> + / / batch - mode do for each primary . Both WMO modes produce only one set of <nl> + / / supplementary outputs . <nl> + <nl> + bool isSingleThreadedWMO ( ) const { return IsSingleThreadedWMO ; } <nl> + void setIsSingleThreadedWMO ( bool istw ) { IsSingleThreadedWMO = istw ; } <nl> + <nl> + bool isWholeModule ( ) const { return ! hasPrimaryInputs ( ) ; } <nl> + <nl> / / Readers : <nl> <nl> / / All inputs : <nl> class FrontendInputsAndOutputs { <nl> <nl> bool hasPrimaryInputs ( ) const { return primaryInputCount ( ) > 0 ; } <nl> <nl> - bool isWholeModule ( ) const { return ! hasPrimaryInputs ( ) ; } <nl> - <nl> / / / Fails an assertion if there is more than one primary input . <nl> / / / Used in situations where only one primary input can be handled <nl> / / / and where batch mode has not been implemented yet . <nl> void assertMustNotBeMoreThanOnePrimaryInput ( ) const ; <nl> <nl> + / / / Fails an assertion when there is more than one primary input unless <nl> + / / / the experimental - bypass - batch - mode - checks argument was passed to <nl> + / / / the front end . <nl> + / / / FIXME : When batch mode is complete , this function should be obsolete . <nl> + void <nl> + assertMustNotBeMoreThanOnePrimaryInputUnlessBatchModeChecksHaveBeenBypassed ( ) <nl> + const ; <nl> + <nl> / / Count - dependend readers : <nl> <nl> / / / \ return the unique primary input , if one exists . <nl> class FrontendInputsAndOutputs { <nl> / / / there isn ' t one . <nl> StringRef getNameOfUniquePrimaryInputFile ( ) const ; <nl> <nl> + / / / Combines all primaries for stats reporter <nl> + std : : string getStatsFileMangledInputName ( ) const ; <nl> + <nl> bool isInputPrimary ( StringRef file ) const ; <nl> <nl> unsigned numberOfPrimaryInputsEndingWith ( const char * extension ) const ; <nl> class FrontendInputsAndOutputs { <nl> void addInputFile ( StringRef file , llvm : : MemoryBuffer * buffer = nullptr ) ; <nl> void addPrimaryInputFile ( StringRef file , <nl> llvm : : MemoryBuffer * buffer = nullptr ) ; <nl> + <nl> + / / Outputs <nl> + <nl> + private : <nl> + friend class ArgsToFrontendOptionsConverter ; <nl> + <nl> + void setMainOutputs ( ArrayRef < std : : string > outputFiles ) ; <nl> + <nl> + public : <nl> + unsigned countOfInputsProducingMainOutputs ( ) const ; <nl> + <nl> + const InputFile & firstInputProducingOutput ( ) const ; <nl> + const InputFile & lastInputProducingOutput ( ) const ; <nl> + <nl> + / / / Under single - threaded WMO , we pretend that the first input <nl> + / / / generates the main output , even though it will include code <nl> + / / / generated from all of them . <nl> + void forEachInputProducingAMainOutputFile ( <nl> + llvm : : function_ref < void ( const InputFile & ) > fn ) const ; <nl> + <nl> + std : : vector < std : : string > copyOutputFilenames ( ) const ; <nl> + <nl> + void <nl> + forEachOutputFilename ( llvm : : function_ref < void ( const std : : string & ) > fn ) const ; <nl> + <nl> + / / / Gets the name of the specified output filename . <nl> + / / / If multiple files are specified , the last one is returned . <nl> + StringRef getSingleOutputFilename ( ) const ; <nl> + <nl> + bool isOutputFilenameStdout ( ) const ; <nl> + bool isOutputFileDirectory ( ) const ; <nl> + bool hasNamedOutputFile ( ) const ; <nl> + <nl> + / / Supplementary outputs <nl> + <nl> + void forEachInputProducingSupplementaryOutput ( <nl> + llvm : : function_ref < void ( const InputFile & ) > fn ) const ; <nl> } ; <nl> <nl> } / / namespace swift <nl> mmm a / include / swift / Frontend / FrontendOptions . h <nl> ppp b / include / swift / Frontend / FrontendOptions . h <nl> class FrontendOptions { <nl> / / / The kind of input on which the frontend should operate . <nl> InputFileKind InputKind = InputFileKind : : IFK_Swift ; <nl> <nl> - / / / The specified output files . If only a single outputfile is generated , <nl> - / / / the name of the last specified file is taken . <nl> - std : : vector < std : : string > OutputFilenames ; <nl> - <nl> - void forAllOutputPaths ( std : : function < void ( const std : : string & ) > fn ) const ; <nl> - <nl> - / / / Gets the name of the specified output filename . <nl> - / / / If multiple files are specified , the last one is returned . <nl> - StringRef getSingleOutputFilename ( ) const { <nl> - if ( OutputFilenames . size ( ) > = 1 ) <nl> - return OutputFilenames . back ( ) ; <nl> - return StringRef ( ) ; <nl> - } <nl> - / / / Sets a single filename as output filename . <nl> - void setSingleOutputFilename ( const std : : string & FileName ) { <nl> - OutputFilenames . clear ( ) ; <nl> - OutputFilenames . push_back ( FileName ) ; <nl> - } <nl> - void setOutputFilenameToStdout ( ) { setSingleOutputFilename ( " - " ) ; } <nl> - bool isOutputFilenameStdout ( ) const { <nl> - return getSingleOutputFilename ( ) = = " - " ; <nl> - } <nl> + void forAllOutputPaths ( const InputFile & input , <nl> + std : : function < void ( const std : : string & ) > fn ) const ; <nl> + <nl> bool isOutputFileDirectory ( ) const ; <nl> - bool hasNamedOutputFile ( ) const { <nl> - return ! OutputFilenames . empty ( ) & & ! isOutputFilenameStdout ( ) ; <nl> - } <nl> <nl> / / / A list of arbitrary modules to import and make implicitly visible . <nl> std : : vector < std : : string > ImplicitImportModuleNames ; <nl> mmm a / include / swift / Frontend / InputFile . h <nl> ppp b / include / swift / Frontend / InputFile . h <nl> enum class InputFileKind { <nl> class InputFile { <nl> std : : string Filename ; <nl> bool IsPrimary ; <nl> - / / / Null if the contents are not overridden . <nl> + / / / Points to a buffer overriding the file ' s contents , or nullptr if there is <nl> + / / / none . <nl> llvm : : MemoryBuffer * Buffer ; <nl> <nl> + / / / Contains the name of the main output file , that is , the . o file for this <nl> + / / / input . If there is no such file , contains an empty string . If the output <nl> + / / / is to be written to stdout , contains " - " . <nl> + std : : string OutputFilename ; <nl> + <nl> public : <nl> / / / Does not take ownership of \ p buffer . Does take ownership of ( copy ) a <nl> / / / string . <nl> InputFile ( StringRef name , bool isPrimary , <nl> - llvm : : MemoryBuffer * buffer = nullptr ) <nl> + llvm : : MemoryBuffer * buffer = nullptr , <nl> + StringRef outputFilename = StringRef ( ) ) <nl> : Filename ( <nl> convertBufferNameFromLLVM_getFileOrSTDIN_toSwiftConventions ( name ) ) , <nl> - IsPrimary ( isPrimary ) , Buffer ( buffer ) { <nl> + IsPrimary ( isPrimary ) , Buffer ( buffer ) , OutputFilename ( outputFilename ) { <nl> assert ( ! name . empty ( ) ) ; <nl> } <nl> <nl> class InputFile { <nl> StringRef filename ) { <nl> return filename . equals ( " < stdin > " ) ? " - " : filename ; <nl> } <nl> + <nl> + const std : : string & outputFilename ( ) const { return OutputFilename ; } <nl> + <nl> + void setOutputFilename ( StringRef outputFilename ) { <nl> + OutputFilename = outputFilename ; <nl> + } <nl> } ; <nl> <nl> } / / namespace swift <nl> mmm a / include / swift / Option / FrontendOptions . td <nl> ppp b / include / swift / Option / FrontendOptions . td <nl> def validate_tbd_against_ir_EQ : Joined < [ " - " ] , " validate - tbd - against - ir = " > , <nl> / / This is used to guard preemptive testing for the fix - it . <nl> def fix_string_substring_conversion : Flag < [ " - " ] , " fix - string - substring - conversion " > , <nl> HelpText < " Emit a fix - it to append ' [ ] ' to String expressions when converting to Substring . " > ; <nl> + <nl> + def bypass_batch_mode_checks : Flag < [ " - " ] , " bypass - batch - mode - checks " > , <nl> + HelpText < " Bypass checks for batch - mode errors . " > ; <nl> <nl> } / / end let Flags = [ FrontendOption , NoDriverOption , HelpHidden ] <nl> mmm a / lib / Frontend / ArgsToFrontendInputsConverter . cpp <nl> ppp b / lib / Frontend / ArgsToFrontendInputsConverter . cpp <nl> <nl> # include " swift / Frontend / ArgsToFrontendInputsConverter . h " <nl> <nl> # include " swift / AST / DiagnosticsFrontend . h " <nl> + # include " swift / Frontend / ArgsToFrontendOutputsConverter . h " <nl> # include " swift / Frontend / FrontendOptions . h " <nl> # include " swift / Option / Options . h " <nl> # include " swift / Parse / Lexer . h " <nl> bool ArgsToFrontendInputsConverter : : convert ( ) { <nl> return true ; <nl> std : : set < StringRef > unusedPrimaryFiles = <nl> createInputFilesConsumingPrimaries ( * primaryFiles ) ; <nl> - return checkForMissingPrimaryFiles ( unusedPrimaryFiles ) ; <nl> + <nl> + if ( checkForMissingPrimaryFiles ( unusedPrimaryFiles ) ) <nl> + return true ; <nl> + <nl> + / / Must be set before iterating over inputs needing outputs . <nl> + InputsAndOutputs . setIsSingleThreadedWMO ( isSingleThreadedWMO ( ) ) ; <nl> + <nl> + InputsAndOutputs . setBypassBatchModeChecks ( <nl> + Args . hasArg ( options : : OPT_bypass_batch_mode_checks ) ) ; <nl> + return false ; <nl> } <nl> <nl> bool ArgsToFrontendInputsConverter : : enforceFilelistExclusion ( ) { <nl> bool ArgsToFrontendInputsConverter : : checkForMissingPrimaryFiles ( <nl> } <nl> return ! primaryFiles . empty ( ) ; <nl> } <nl> + <nl> + bool ArgsToFrontendInputsConverter : : isSingleThreadedWMO ( ) const { <nl> + Optional < std : : vector < std : : string > > userSuppliedNamesOrErr = <nl> + OutputFilesComputer : : getOutputFilenamesFromCommandLineOrFilelist ( Args , <nl> + Diags ) ; <nl> + return InputsAndOutputs . hasInputs ( ) & & ! InputsAndOutputs . hasPrimaryInputs ( ) & & <nl> + userSuppliedNamesOrErr & & userSuppliedNamesOrErr - > size ( ) = = 1 ; <nl> + } <nl> mmm a / lib / Frontend / ArgsToFrontendOptionsConverter . cpp <nl> ppp b / lib / Frontend / ArgsToFrontendOptionsConverter . cpp <nl> <nl> # include " swift / AST / DiagnosticsFrontend . h " <nl> # include " swift / Basic / Platform . h " <nl> # include " swift / Frontend / ArgsToFrontendInputsConverter . h " <nl> + # include " swift / Frontend / ArgsToFrontendOutputsConverter . h " <nl> # include " swift / Frontend / Frontend . h " <nl> # include " swift / Option / Options . h " <nl> # include " swift / Option / SanitizerOptions . h " <nl> bool ArgsToFrontendOptionsConverter : : convert ( ) { <nl> / / This can be enabled independently of the playground transform . <nl> Opts . PCMacro | = Args . hasArg ( OPT_pc_macro ) ; <nl> <nl> - computeHelpOptions ( ) ; <nl> - if ( ArgsToFrontendInputsConverter ( Diags , Args , Opts . InputsAndOutputs ) <nl> - . convert ( ) ) <nl> - return true ; <nl> - <nl> Opts . ParseStdlib | = Args . hasArg ( OPT_parse_stdlib ) ; <nl> <nl> + computeHelpOptions ( ) ; <nl> + <nl> if ( const Arg * A = Args . getLastArg ( OPT_verify_generic_signatures ) ) { <nl> Opts . VerifyGenericSignaturesInModule = A - > getValue ( ) ; <nl> } <nl> <nl> computeDumpScopeMapLocations ( ) ; <nl> - Opts . RequestedAction = determineRequestedAction ( ) ; <nl> + <nl> + if ( ArgsToFrontendInputsConverter ( Diags , Args , Opts . InputsAndOutputs ) <nl> + . convert ( ) ) <nl> + return true ; <nl> + <nl> + Opts . RequestedAction = determineRequestedAction ( Args ) ; <nl> <nl> if ( Opts . RequestedAction = = FrontendOptions : : ActionType : : Immediate & & <nl> Opts . InputsAndOutputs . hasPrimaryInputs ( ) ) { <nl> void ArgsToFrontendOptionsConverter : : computeDumpScopeMapLocations ( ) { <nl> } <nl> <nl> FrontendOptions : : ActionType <nl> - ArgsToFrontendOptionsConverter : : determineRequestedAction ( ) const { <nl> + ArgsToFrontendOptionsConverter : : determineRequestedAction ( const ArgList & args ) { <nl> using namespace options ; <nl> - const Arg * A = Args . getLastArg ( OPT_modes_Group ) ; <nl> + const Arg * A = args . getLastArg ( OPT_modes_Group ) ; <nl> if ( ! A ) { <nl> / / We don ' t have a mode , so determine a default . <nl> - if ( Args . hasArg ( OPT_emit_module , OPT_emit_module_path ) ) { <nl> + if ( args . hasArg ( OPT_emit_module , OPT_emit_module_path ) ) { <nl> / / We ' ve been told to emit a module , but have no other mode indicators . <nl> / / As a result , put the frontend into EmitModuleOnly mode . <nl> / / ( Setting up module output will be handled below . ) <nl> bool ArgsToFrontendOptionsConverter : : computeFallbackModuleName ( ) { <nl> / / selected " . <nl> return false ; <nl> } <nl> - ArrayRef < std : : string > outputFilenames = <nl> - getOutputFilenamesFromCommandLineOrFilelist ( ) ; <nl> - <nl> - bool isOutputAUniqueOrdinaryFile = <nl> - outputFilenames . size ( ) = = 1 & & outputFilenames [ 0 ] ! = " - " & & <nl> - ! llvm : : sys : : fs : : is_directory ( outputFilenames [ 0 ] ) ; <nl> - std : : string nameToStem = <nl> - isOutputAUniqueOrdinaryFile <nl> - ? outputFilenames [ 0 ] <nl> + Optional < std : : vector < std : : string > > outputFilenames = <nl> + OutputFilesComputer : : getOutputFilenamesFromCommandLineOrFilelist ( Args , <nl> + Diags ) ; <nl> + <nl> + auto nameToStem = <nl> + outputFilenames & & outputFilenames - > size ( ) = = 1 & & <nl> + outputFilenames - > front ( ) ! = " - " & & <nl> + ! llvm : : sys : : fs : : is_directory ( outputFilenames - > front ( ) ) <nl> + ? outputFilenames - > front ( ) <nl> : Opts . InputsAndOutputs . getFilenameOfFirstInput ( ) . str ( ) ; <nl> + <nl> Opts . ModuleName = llvm : : sys : : path : : stem ( nameToStem ) ; <nl> return false ; <nl> } <nl> <nl> bool ArgsToFrontendOptionsConverter : : computeOutputFilenames ( ) { <nl> - assert ( Opts . OutputFilenames . empty ( ) & & <nl> - " Output filename should not be set at this point " ) ; <nl> - if ( ! FrontendOptions : : doesActionProduceOutput ( Opts . RequestedAction ) ) { <nl> - return false ; <nl> - } <nl> - ArrayRef < std : : string > outputFilenamesFromCommandLineOrFilelist = <nl> - getOutputFilenamesFromCommandLineOrFilelist ( ) ; <nl> - <nl> - if ( outputFilenamesFromCommandLineOrFilelist . size ( ) > 1 ) { <nl> - / / WMO , threaded with N files ( also someday batch mode ) . <nl> - Opts . OutputFilenames = outputFilenamesFromCommandLineOrFilelist ; <nl> - return false ; <nl> - } <nl> - <nl> - if ( outputFilenamesFromCommandLineOrFilelist . empty ( ) ) { <nl> - / / When the Frontend is invoked without going through the driver <nl> - / / ( e . g . for testing ) , it is convenient to derive output filenames from <nl> - / / input . <nl> - return deriveOutputFilenameFromInputFile ( ) ; <nl> - } <nl> - <nl> - StringRef outputFilename = outputFilenamesFromCommandLineOrFilelist [ 0 ] ; <nl> - if ( ! llvm : : sys : : fs : : is_directory ( outputFilename ) ) { <nl> - / / Could be - primary - file ( 1 ) , or - wmo ( non - threaded w / N ( input ) files ) <nl> - Opts . OutputFilenames = outputFilenamesFromCommandLineOrFilelist ; <nl> - return false ; <nl> - } <nl> - / / Only used for testing & when invoking frontend directly . <nl> - return deriveOutputFilenameForDirectory ( outputFilename ) ; <nl> - } <nl> - <nl> - bool ArgsToFrontendOptionsConverter : : deriveOutputFilenameFromInputFile ( ) { <nl> - if ( Opts . InputsAndOutputs . isReadingFromStdin ( ) | | <nl> - FrontendOptions : : doesActionProduceTextualOutput ( Opts . RequestedAction ) ) { <nl> - Opts . setOutputFilenameToStdout ( ) ; <nl> - return false ; <nl> - } <nl> - std : : string baseName = determineBaseNameOfOutput ( ) ; <nl> - if ( baseName . empty ( ) ) { <nl> - if ( Opts . RequestedAction ! = FrontendOptions : : ActionType : : REPL & & <nl> - Opts . RequestedAction ! = FrontendOptions : : ActionType : : Immediate & & <nl> - Opts . RequestedAction ! = FrontendOptions : : ActionType : : NoneAction ) { <nl> - Diags . diagnose ( SourceLoc ( ) , diag : : error_no_output_filename_specified ) ; <nl> - return true ; <nl> - } <nl> - return false ; <nl> - } <nl> - deriveOutputFilenameFromParts ( " " , baseName ) ; <nl> - return false ; <nl> - } <nl> - <nl> - bool ArgsToFrontendOptionsConverter : : deriveOutputFilenameForDirectory ( <nl> - StringRef outputDir ) { <nl> - <nl> - std : : string baseName = determineBaseNameOfOutput ( ) ; <nl> - if ( baseName . empty ( ) ) { <nl> - Diags . diagnose ( SourceLoc ( ) , diag : : error_implicit_output_file_is_directory , <nl> - outputDir ) ; <nl> + Optional < std : : vector < std : : string > > outs = <nl> + ArgsToFrontendOutputsConverter ( Args , Opts . ModuleName , <nl> + Opts . InputsAndOutputs , Diags ) <nl> + . convert ( ) ; <nl> + if ( ! outs ) <nl> return true ; <nl> - } <nl> - deriveOutputFilenameFromParts ( outputDir , baseName ) ; <nl> + if ( FrontendOptions : : doesActionProduceOutput ( Opts . RequestedAction ) ) <nl> + Opts . InputsAndOutputs . setMainOutputs ( * outs ) ; <nl> + else <nl> + assert ( outs - > empty ( ) & & <nl> + " cannot have main outputs for actions that don ' t produce outputs " ) ; <nl> return false ; <nl> } <nl> <nl> - void ArgsToFrontendOptionsConverter : : deriveOutputFilenameFromParts ( <nl> - StringRef dir , StringRef base ) { <nl> - assert ( ! base . empty ( ) ) ; <nl> - llvm : : SmallString < 128 > path ( dir ) ; <nl> - llvm : : sys : : path : : append ( path , base ) ; <nl> - StringRef suffix = FrontendOptions : : suffixForPrincipalOutputFileForAction ( <nl> - Opts . RequestedAction ) ; <nl> - llvm : : sys : : path : : replace_extension ( path , suffix ) ; <nl> - Opts . OutputFilenames . push_back ( path . str ( ) ) ; <nl> - } <nl> - <nl> - std : : string ArgsToFrontendOptionsConverter : : determineBaseNameOfOutput ( ) const { <nl> - std : : string nameToStem ; <nl> - if ( Opts . InputsAndOutputs . hasPrimaryInputs ( ) ) { <nl> - nameToStem = Opts . InputsAndOutputs . getRequiredUniquePrimaryInput ( ) . file ( ) ; <nl> - } else if ( auto UserSpecifiedModuleName = <nl> - Args . getLastArg ( options : : OPT_module_name ) ) { <nl> - nameToStem = UserSpecifiedModuleName - > getValue ( ) ; <nl> - } else if ( Opts . InputsAndOutputs . hasSingleInput ( ) ) { <nl> - nameToStem = Opts . InputsAndOutputs . getFilenameOfFirstInput ( ) ; <nl> - } else <nl> - nameToStem = " " ; <nl> - <nl> - return llvm : : sys : : path : : stem ( nameToStem ) . str ( ) ; <nl> - } <nl> - <nl> - ArrayRef < std : : string > <nl> - ArgsToFrontendOptionsConverter : : getOutputFilenamesFromCommandLineOrFilelist ( ) { <nl> - if ( cachedOutputFilenamesFromCommandLineOrFilelist ) { <nl> - return * cachedOutputFilenamesFromCommandLineOrFilelist ; <nl> - } <nl> - <nl> - if ( const Arg * A = Args . getLastArg ( options : : OPT_output_filelist ) ) { <nl> - assert ( ! Args . hasArg ( options : : OPT_o ) & & <nl> - " don ' t use - o with - output - filelist " ) ; <nl> - cachedOutputFilenamesFromCommandLineOrFilelist . emplace ( <nl> - readOutputFileList ( A - > getValue ( ) ) ) ; <nl> - } else { <nl> - cachedOutputFilenamesFromCommandLineOrFilelist . emplace ( <nl> - Args . getAllArgValues ( options : : OPT_o ) ) ; <nl> - } <nl> - return * cachedOutputFilenamesFromCommandLineOrFilelist ; <nl> - } <nl> - <nl> - / / / Try to read an output file list file . <nl> - std : : vector < std : : string > ArgsToFrontendOptionsConverter : : readOutputFileList ( <nl> - const StringRef filelistPath ) const { <nl> - llvm : : ErrorOr < std : : unique_ptr < llvm : : MemoryBuffer > > buffer = <nl> - llvm : : MemoryBuffer : : getFile ( filelistPath ) ; <nl> - if ( ! buffer ) { <nl> - Diags . diagnose ( SourceLoc ( ) , diag : : cannot_open_file , filelistPath , <nl> - buffer . getError ( ) . message ( ) ) ; <nl> - } <nl> - std : : vector < std : : string > outputFiles ; <nl> - for ( StringRef line : make_range ( llvm : : line_iterator ( * buffer . get ( ) ) , { } ) ) { <nl> - outputFiles . push_back ( line . str ( ) ) ; <nl> - } <nl> - return outputFiles ; <nl> - } <nl> - <nl> void ArgsToFrontendOptionsConverter : : determineSupplementaryOutputFilenames ( ) { <nl> using namespace options ; <nl> auto determineOutputFilename = <nl> void ArgsToFrontendOptionsConverter : : determineSupplementaryOutputFilenames ( ) { <nl> if ( ! Args . hasArg ( optWithoutPath ) ) <nl> return ; <nl> <nl> - if ( useMainOutput & & ! Opts . OutputFilenames . empty ( ) ) { <nl> - output = Opts . getSingleOutputFilename ( ) ; <nl> + if ( useMainOutput & & <nl> + ! Opts . InputsAndOutputs . getSingleOutputFilename ( ) . empty ( ) ) { <nl> + output = Opts . InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> return ; <nl> } <nl> <nl> new file mode 100644 <nl> index 000000000000 . . f53e5c97456d <nl> mmm / dev / null <nl> ppp b / lib / Frontend / ArgsToFrontendOutputsConverter . cpp <nl> <nl> + / / = = = mmm ArgsToFrontendOutputsConverter . cpp mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2018 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See https : / / swift . org / LICENSE . txt for license information <nl> + / / See https : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + # include " swift / Frontend / ArgsToFrontendOutputsConverter . h " <nl> + <nl> + # include " swift / AST / DiagnosticsFrontend . h " <nl> + # include " swift / Basic / Platform . h " <nl> + # include " swift / Frontend / ArgsToFrontendInputsConverter . h " <nl> + # include " swift / Frontend / ArgsToFrontendOptionsConverter . h " <nl> + # include " swift / Frontend / Frontend . h " <nl> + # include " swift / Option / Options . h " <nl> + # include " swift / Option / SanitizerOptions . h " <nl> + # include " swift / Strings . h " <nl> + # include " llvm / ADT / STLExtras . h " <nl> + # include " llvm / ADT / Triple . h " <nl> + # include " llvm / Option / Arg . h " <nl> + # include " llvm / Option / ArgList . h " <nl> + # include " llvm / Option / Option . h " <nl> + # include " llvm / Support / FileSystem . h " <nl> + # include " llvm / Support / LineIterator . h " <nl> + # include " llvm / Support / Path . h " <nl> + <nl> + using namespace swift ; <nl> + using namespace llvm : : opt ; <nl> + <nl> + Optional < std : : vector < std : : string > > ArgsToFrontendOutputsConverter : : convert ( ) { <nl> + const auto requestedAction = <nl> + ArgsToFrontendOptionsConverter : : determineRequestedAction ( Args ) ; <nl> + <nl> + if ( ! FrontendOptions : : doesActionProduceOutput ( requestedAction ) ) <nl> + return std : : vector < std : : string > ( ) ; <nl> + <nl> + if ( auto ofc = OutputFilesComputer : : create ( Args , Diags , InputsAndOutputs ) ) <nl> + return ofc - > computeOutputFiles ( ) ; <nl> + return None ; <nl> + } <nl> + <nl> + / / / Try to read an output file list file . <nl> + Optional < std : : vector < std : : string > > <nl> + ArgsToFrontendOutputsConverter : : readOutputFileList ( const StringRef filelistPath , <nl> + DiagnosticEngine & diags ) { <nl> + llvm : : ErrorOr < std : : unique_ptr < llvm : : MemoryBuffer > > buffer = <nl> + llvm : : MemoryBuffer : : getFile ( filelistPath ) ; <nl> + if ( ! buffer ) { <nl> + diags . diagnose ( SourceLoc ( ) , diag : : cannot_open_file , filelistPath , <nl> + buffer . getError ( ) . message ( ) ) ; <nl> + return None ; <nl> + } <nl> + std : : vector < std : : string > outputFiles ; <nl> + for ( StringRef line : make_range ( llvm : : line_iterator ( * buffer . get ( ) ) , { } ) ) { <nl> + outputFiles . push_back ( line . str ( ) ) ; <nl> + } <nl> + return outputFiles ; <nl> + } <nl> + <nl> + Optional < std : : vector < std : : string > > <nl> + OutputFilesComputer : : getOutputFilenamesFromCommandLineOrFilelist ( <nl> + const ArgList & args , DiagnosticEngine & diags ) { <nl> + if ( const Arg * A = args . getLastArg ( options : : OPT_output_filelist ) ) { <nl> + assert ( ! args . hasArg ( options : : OPT_o ) & & <nl> + " don ' t use - o with - output - filelist " ) ; <nl> + return ArgsToFrontendOutputsConverter : : readOutputFileList ( A - > getValue ( ) , <nl> + diags ) ; <nl> + } <nl> + return args . getAllArgValues ( options : : OPT_o ) ; <nl> + } <nl> + <nl> + Optional < OutputFilesComputer > <nl> + OutputFilesComputer : : create ( const llvm : : opt : : ArgList & args , <nl> + DiagnosticEngine & diags , <nl> + const FrontendInputsAndOutputs & inputsAndOutputs ) { <nl> + Optional < std : : vector < std : : string > > outputArguments = <nl> + getOutputFilenamesFromCommandLineOrFilelist ( args , diags ) ; <nl> + if ( ! outputArguments ) <nl> + return None ; <nl> + const StringRef outputDirectoryArgument = <nl> + outputArguments - > size ( ) = = 1 & & <nl> + llvm : : sys : : fs : : is_directory ( outputArguments - > front ( ) ) <nl> + ? StringRef ( outputArguments - > front ( ) ) <nl> + : StringRef ( ) ; <nl> + ArrayRef < std : : string > outputFileArguments = <nl> + outputDirectoryArgument . empty ( ) ? ArrayRef < std : : string > ( * outputArguments ) <nl> + : ArrayRef < std : : string > ( ) ; <nl> + const StringRef firstInput = inputsAndOutputs . hasSingleInput ( ) <nl> + ? inputsAndOutputs . getFilenameOfFirstInput ( ) <nl> + : StringRef ( ) ; <nl> + const FrontendOptions : : ActionType requestedAction = <nl> + ArgsToFrontendOptionsConverter : : determineRequestedAction ( args ) ; <nl> + <nl> + if ( ! outputFileArguments . empty ( ) & & <nl> + outputFileArguments . size ( ) ! = <nl> + inputsAndOutputs . countOfInputsProducingMainOutputs ( ) ) { <nl> + diags . diagnose ( <nl> + SourceLoc ( ) , <nl> + diag : : error_if_any_output_files_are_specified_they_all_must_be ) ; <nl> + return None ; <nl> + } <nl> + <nl> + return OutputFilesComputer ( <nl> + args , diags , inputsAndOutputs , std : : move ( outputFileArguments ) , <nl> + outputDirectoryArgument , firstInput , requestedAction , <nl> + args . getLastArg ( options : : OPT_module_name ) , <nl> + FrontendOptions : : suffixForPrincipalOutputFileForAction ( requestedAction ) , <nl> + FrontendOptions : : doesActionProduceTextualOutput ( requestedAction ) ) ; <nl> + } <nl> + <nl> + OutputFilesComputer : : OutputFilesComputer ( <nl> + const llvm : : opt : : ArgList & args , DiagnosticEngine & diags , <nl> + const FrontendInputsAndOutputs & inputsAndOutputs , <nl> + std : : vector < std : : string > outputFileArguments , <nl> + const StringRef outputDirectoryArgument , const StringRef firstInput , <nl> + const FrontendOptions : : ActionType requestedAction , <nl> + const llvm : : opt : : Arg * moduleNameArg , const StringRef suffix , <nl> + const bool hasTextualOutput ) <nl> + : Args ( args ) , Diags ( diags ) , InputsAndOutputs ( inputsAndOutputs ) , <nl> + OutputFileArguments ( outputFileArguments ) , <nl> + OutputDirectoryArgument ( outputDirectoryArgument ) , FirstInput ( firstInput ) , <nl> + RequestedAction ( requestedAction ) , ModuleNameArg ( moduleNameArg ) , <nl> + Suffix ( suffix ) , HasTextualOutput ( hasTextualOutput ) { } <nl> + <nl> + Optional < std : : vector < std : : string > > <nl> + OutputFilesComputer : : computeOutputFiles ( ) const { <nl> + std : : vector < std : : string > outputFiles ; <nl> + bool hadError = false ; <nl> + unsigned i = 0 ; <nl> + InputsAndOutputs . forEachInputProducingAMainOutputFile ( <nl> + [ & ] ( const InputFile & input ) - > void { <nl> + <nl> + StringRef outputArg = OutputFileArguments . empty ( ) <nl> + ? StringRef ( ) <nl> + : StringRef ( OutputFileArguments [ i + + ] ) ; <nl> + <nl> + Optional < std : : string > outputFile = computeOutputFile ( outputArg , input ) ; <nl> + if ( ! outputFile ) { <nl> + hadError = true ; <nl> + return ; <nl> + } <nl> + outputFiles . push_back ( * outputFile ) ; <nl> + } ) ; <nl> + return hadError ? None : Optional < std : : vector < std : : string > > ( outputFiles ) ; <nl> + } <nl> + <nl> + Optional < std : : string > <nl> + OutputFilesComputer : : computeOutputFile ( StringRef outputArg , <nl> + const InputFile & input ) const { <nl> + if ( ! OutputDirectoryArgument . empty ( ) ) <nl> + return deriveOutputFileForDirectory ( input ) ; <nl> + <nl> + if ( ! outputArg . empty ( ) ) <nl> + return outputArg . str ( ) ; <nl> + <nl> + return deriveOutputFileFromInput ( input ) ; <nl> + } <nl> + <nl> + Optional < std : : string > <nl> + OutputFilesComputer : : deriveOutputFileFromInput ( const InputFile & input ) const { <nl> + if ( input . file ( ) = = " - " | | HasTextualOutput ) <nl> + return std : : string ( " - " ) ; <nl> + <nl> + std : : string baseName = determineBaseNameOfOutput ( input ) ; <nl> + if ( baseName . empty ( ) ) { <nl> + / / Assuming FrontendOptions : : doesActionProduceOutput ( RequestedAction ) <nl> + Diags . diagnose ( SourceLoc ( ) , diag : : error_no_output_filename_specified ) ; <nl> + return None ; <nl> + } <nl> + return deriveOutputFileFromParts ( " " , baseName ) ; <nl> + } <nl> + <nl> + Optional < std : : string > OutputFilesComputer : : deriveOutputFileForDirectory ( <nl> + const InputFile & input ) const { <nl> + std : : string baseName = determineBaseNameOfOutput ( input ) ; <nl> + if ( baseName . empty ( ) ) { <nl> + Diags . diagnose ( SourceLoc ( ) , diag : : error_implicit_output_file_is_directory , <nl> + OutputDirectoryArgument ) ; <nl> + return None ; <nl> + } <nl> + return deriveOutputFileFromParts ( OutputDirectoryArgument , baseName ) ; <nl> + } <nl> + <nl> + std : : string <nl> + OutputFilesComputer : : determineBaseNameOfOutput ( const InputFile & input ) const { <nl> + StringRef nameToStem = <nl> + input . isPrimary ( ) <nl> + ? input . file ( ) <nl> + : ModuleNameArg ? ModuleNameArg - > getValue ( ) : FirstInput ; <nl> + return llvm : : sys : : path : : stem ( nameToStem ) . str ( ) ; <nl> + } <nl> + <nl> + std : : string <nl> + OutputFilesComputer : : deriveOutputFileFromParts ( StringRef dir , <nl> + StringRef base ) const { <nl> + assert ( ! base . empty ( ) ) ; <nl> + llvm : : SmallString < 128 > path ( dir ) ; <nl> + llvm : : sys : : path : : append ( path , base ) ; <nl> + llvm : : sys : : path : : replace_extension ( path , Suffix ) ; <nl> + return path . str ( ) ; <nl> + } <nl> mmm a / lib / Frontend / CMakeLists . txt <nl> ppp b / lib / Frontend / CMakeLists . txt <nl> <nl> add_swift_library ( swiftFrontend STATIC <nl> ArgsToFrontendInputsConverter . cpp <nl> ArgsToFrontendOptionsConverter . cpp <nl> + ArgsToFrontendOutputsConverter . cpp <nl> CompilerInvocation . cpp <nl> DiagnosticVerifier . cpp <nl> Frontend . cpp <nl> mmm a / lib / Frontend / CompilerInvocation . cpp <nl> ppp b / lib / Frontend / CompilerInvocation . cpp <nl> static bool ParseSILArgs ( SILOptions & Opts , ArgList & Args , <nl> if ( Args . hasArg ( OPT_debug_on_sil ) ) { <nl> / / Derive the name of the SIL file for debugging from <nl> / / the regular outputfile . <nl> - StringRef BaseName = FEOpts . getSingleOutputFilename ( ) ; <nl> + StringRef BaseName = FEOpts . InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> / / If there are no or multiple outputfiles , derive the name <nl> / / from the module name . <nl> if ( BaseName . empty ( ) ) <nl> static bool ParseIRGenArgs ( IRGenOptions & Opts , ArgList & Args , <nl> Opts . MainInputFilename = <nl> FrontendOpts . InputsAndOutputs . getFilenameOfFirstInput ( ) ; <nl> } <nl> - Opts . OutputFilenames = FrontendOpts . OutputFilenames ; <nl> + Opts . OutputFilenames = FrontendOpts . InputsAndOutputs . copyOutputFilenames ( ) ; <nl> Opts . ModuleName = FrontendOpts . ModuleName ; <nl> <nl> if ( Args . hasArg ( OPT_use_jit ) ) <nl> mmm a / lib / Frontend / FrontendInputsAndOutputs . cpp <nl> ppp b / lib / Frontend / FrontendInputsAndOutputs . cpp <nl> FrontendInputsAndOutputs : : FrontendInputsAndOutputs ( <nl> const FrontendInputsAndOutputs & other ) { <nl> for ( InputFile input : other . AllInputs ) <nl> addInput ( input ) ; <nl> + IsSingleThreadedWMO = other . IsSingleThreadedWMO ; <nl> } <nl> <nl> FrontendInputsAndOutputs & FrontendInputsAndOutputs : : <nl> operator = ( const FrontendInputsAndOutputs & other ) { <nl> clearInputs ( ) ; <nl> for ( InputFile input : other . AllInputs ) <nl> addInput ( input ) ; <nl> + IsSingleThreadedWMO = other . IsSingleThreadedWMO ; <nl> return * this ; <nl> } <nl> <nl> void FrontendInputsAndOutputs : : assertMustNotBeMoreThanOnePrimaryInput ( ) const { <nl> " have not implemented > 1 primary input yet " ) ; <nl> } <nl> <nl> + void FrontendInputsAndOutputs : : <nl> + assertMustNotBeMoreThanOnePrimaryInputUnlessBatchModeChecksHaveBeenBypassed ( ) <nl> + const { <nl> + if ( ! areBatchModeChecksBypassed ( ) ) <nl> + assertMustNotBeMoreThanOnePrimaryInput ( ) ; <nl> + } <nl> + <nl> const InputFile * FrontendInputsAndOutputs : : getUniquePrimaryInput ( ) const { <nl> assertMustNotBeMoreThanOnePrimaryInput ( ) ; <nl> const auto b = PrimaryInputs . begin ( ) ; <nl> StringRef FrontendInputsAndOutputs : : getNameOfUniquePrimaryInputFile ( ) const { <nl> return input = = nullptr ? StringRef ( ) : input - > file ( ) ; <nl> } <nl> <nl> + std : : string FrontendInputsAndOutputs : : getStatsFileMangledInputName ( ) const { <nl> + / / FIXME : " batch " should probably be some concatenation of all the primary <nl> + / / input names , in order to keep the stats file names unique . ( Or perhaps just <nl> + / / the first primary ? ) <nl> + return isWholeModule ( ) <nl> + ? " all " <nl> + : primaryInputCount ( ) = = 1 ? firstPrimaryInput ( ) . file ( ) : " batch " ; <nl> + } <nl> + <nl> bool FrontendInputsAndOutputs : : isInputPrimary ( StringRef file ) const { <nl> StringRef correctedFile = <nl> InputFile : : convertBufferNameFromLLVM_getFileOrSTDIN_toSwiftConventions ( <nl> void FrontendInputsAndOutputs : : addPrimaryInputFile ( StringRef file , <nl> llvm : : MemoryBuffer * buffer ) { <nl> addInput ( InputFile ( file , true , buffer ) ) ; <nl> } <nl> + <nl> + / / Outputs <nl> + <nl> + unsigned FrontendInputsAndOutputs : : countOfInputsProducingMainOutputs ( ) const { <nl> + return isSingleThreadedWMO ( ) <nl> + ? 1 <nl> + : hasPrimaryInputs ( ) ? primaryInputCount ( ) : inputCount ( ) ; <nl> + } <nl> + <nl> + const InputFile & FrontendInputsAndOutputs : : firstInputProducingOutput ( ) const { <nl> + return isSingleThreadedWMO ( ) <nl> + ? firstInput ( ) <nl> + : hasPrimaryInputs ( ) ? firstPrimaryInput ( ) : firstInput ( ) ; <nl> + } <nl> + <nl> + const InputFile & FrontendInputsAndOutputs : : lastInputProducingOutput ( ) const { <nl> + return isSingleThreadedWMO ( ) <nl> + ? firstInput ( ) <nl> + : hasPrimaryInputs ( ) ? lastPrimaryInput ( ) : lastInput ( ) ; <nl> + } <nl> + <nl> + void FrontendInputsAndOutputs : : forEachInputProducingAMainOutputFile ( <nl> + llvm : : function_ref < void ( const InputFile & ) > fn ) const { <nl> + isSingleThreadedWMO ( ) <nl> + ? fn ( firstInput ( ) ) <nl> + : hasPrimaryInputs ( ) ? forEachPrimaryInput ( fn ) : forEachInput ( fn ) ; <nl> + } <nl> + <nl> + void FrontendInputsAndOutputs : : setMainOutputs ( <nl> + ArrayRef < std : : string > outputFiles ) { <nl> + assert ( countOfInputsProducingMainOutputs ( ) = = outputFiles . size ( ) ) ; <nl> + if ( hasPrimaryInputs ( ) ) { <nl> + unsigned i = 0 ; <nl> + for ( auto index : indices ( AllInputs ) ) { <nl> + InputFile & f = AllInputs [ index ] ; <nl> + if ( f . isPrimary ( ) ) <nl> + f . setOutputFilename ( outputFiles [ i + + ] ) ; <nl> + } <nl> + } else if ( isSingleThreadedWMO ( ) ) { <nl> + AllInputs [ 0 ] . setOutputFilename ( outputFiles [ 0 ] ) ; <nl> + } else { <nl> + for ( auto i : indices ( AllInputs ) ) <nl> + AllInputs [ i ] . setOutputFilename ( outputFiles [ i ] ) ; <nl> + } <nl> + } <nl> + <nl> + std : : vector < std : : string > FrontendInputsAndOutputs : : copyOutputFilenames ( ) const { <nl> + std : : vector < std : : string > outputs ; <nl> + forEachInputProducingAMainOutputFile ( [ & ] ( const InputFile & input ) - > void { <nl> + outputs . push_back ( input . outputFilename ( ) ) ; <nl> + } ) ; <nl> + return outputs ; <nl> + } <nl> + <nl> + void FrontendInputsAndOutputs : : forEachOutputFilename ( <nl> + llvm : : function_ref < void ( const std : : string & ) > fn ) const { <nl> + forEachInputProducingAMainOutputFile ( <nl> + [ & ] ( const InputFile & input ) - > void { fn ( input . outputFilename ( ) ) ; } ) ; <nl> + } <nl> + <nl> + StringRef FrontendInputsAndOutputs : : getSingleOutputFilename ( ) const { <nl> + assertMustNotBeMoreThanOnePrimaryInputUnlessBatchModeChecksHaveBeenBypassed ( ) ; <nl> + return hasInputs ( ) ? StringRef ( lastInputProducingOutput ( ) . outputFilename ( ) ) <nl> + : StringRef ( ) ; <nl> + } <nl> + <nl> + bool FrontendInputsAndOutputs : : isOutputFilenameStdout ( ) const { <nl> + return getSingleOutputFilename ( ) = = " - " ; <nl> + } <nl> + <nl> + bool FrontendInputsAndOutputs : : isOutputFileDirectory ( ) const { <nl> + return hasNamedOutputFile ( ) & & <nl> + llvm : : sys : : fs : : is_directory ( getSingleOutputFilename ( ) ) ; <nl> + } <nl> + <nl> + bool FrontendInputsAndOutputs : : hasNamedOutputFile ( ) const { <nl> + return hasInputs ( ) & & ! isOutputFilenameStdout ( ) ; <nl> + } <nl> + <nl> + / / Supplementary outputs <nl> + <nl> + void FrontendInputsAndOutputs : : forEachInputProducingSupplementaryOutput ( <nl> + llvm : : function_ref < void ( const InputFile & ) > fn ) const { <nl> + if ( hasPrimaryInputs ( ) ) <nl> + forEachPrimaryInput ( fn ) ; <nl> + else <nl> + fn ( firstInput ( ) ) ; <nl> + } <nl> mmm a / lib / Frontend / FrontendOptions . cpp <nl> ppp b / lib / Frontend / FrontendOptions . cpp <nl> bool FrontendOptions : : isActionImmediate ( ActionType action ) { <nl> } <nl> <nl> void FrontendOptions : : forAllOutputPaths ( <nl> - std : : function < void ( const std : : string & ) > fn ) const { <nl> + const InputFile & input , std : : function < void ( const std : : string & ) > fn ) const { <nl> if ( RequestedAction ! = FrontendOptions : : ActionType : : EmitModuleOnly & & <nl> RequestedAction ! = FrontendOptions : : ActionType : : MergeModules ) { <nl> - for ( const std : : string & OutputFileName : OutputFilenames ) { <nl> - fn ( OutputFileName ) ; <nl> - } <nl> + if ( InputsAndOutputs . isWholeModule ( ) ) <nl> + InputsAndOutputs . forEachOutputFilename ( fn ) ; <nl> + else <nl> + fn ( input . outputFilename ( ) ) ; <nl> } <nl> const std : : string * outputs [ ] = { <nl> & ModuleOutputPath , <nl> void FrontendOptions : : forAllOutputPaths ( <nl> <nl> <nl> StringRef FrontendOptions : : originalPath ( ) const { <nl> - if ( hasNamedOutputFile ( ) ) <nl> + if ( InputsAndOutputs . hasNamedOutputFile ( ) ) <nl> / / Put the serialized diagnostics file next to the output file . <nl> - return getSingleOutputFilename ( ) ; <nl> + return InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> <nl> / / If we have a primary input , so use that as the basis for the name of the <nl> / / serialized diagnostics file , otherwise fall back on the <nl> StringRef FrontendOptions : : originalPath ( ) const { <nl> : StringRef ( ModuleName ) ; <nl> } <nl> <nl> - bool FrontendOptions : : isOutputFileDirectory ( ) const { <nl> - return hasNamedOutputFile ( ) & & <nl> - llvm : : sys : : fs : : is_directory ( getSingleOutputFilename ( ) ) ; <nl> - } <nl> - <nl> const char * <nl> FrontendOptions : : suffixForPrincipalOutputFileForAction ( ActionType action ) { <nl> switch ( action ) { <nl> mmm a / lib / FrontendTool / FrontendTool . cpp <nl> ppp b / lib / FrontendTool / FrontendTool . cpp <nl> static std : : string displayName ( StringRef MainExecutablePath ) { <nl> / / / Emits a Make - style dependencies file . <nl> static bool emitMakeDependencies ( DiagnosticEngine & diags , <nl> DependencyTracker & depTracker , <nl> - const FrontendOptions & opts ) { <nl> + const FrontendOptions & opts , <nl> + const InputFile & input ) { <nl> std : : error_code EC ; <nl> llvm : : raw_fd_ostream out ( opts . DependenciesFilePath , EC , <nl> llvm : : sys : : fs : : F_None ) ; <nl> static bool emitMakeDependencies ( DiagnosticEngine & diags , <nl> <nl> / / FIXME : Xcode can ' t currently handle multiple targets in a single <nl> / / dependency line . <nl> - opts . forAllOutputPaths ( [ & ] ( StringRef targetName ) { <nl> + opts . forAllOutputPaths ( input , [ & ] ( StringRef targetName ) { <nl> out < < escape ( targetName ) < < " : " ; <nl> / / First include all other files in the module . Make - style dependencies <nl> / / need to be conservative ! <nl> static bool emitMakeDependencies ( DiagnosticEngine & diags , <nl> return false ; <nl> } <nl> <nl> + static bool emitMakeDependencies ( DiagnosticEngine & diags , <nl> + DependencyTracker & depTracker , <nl> + const FrontendOptions & opts ) { <nl> + bool hadError = false ; <nl> + opts . InputsAndOutputs . forEachInputProducingSupplementaryOutput ( <nl> + [ & ] ( const InputFile & f ) - > void { <nl> + hadError = emitMakeDependencies ( diags , depTracker , opts , f ) | | hadError ; <nl> + } ) ; <nl> + return hadError ; <nl> + } <nl> + <nl> namespace { <nl> struct LoadedModuleTraceFormat { <nl> std : : string Name ; <nl> static bool performCompile ( CompilerInstance & Instance , <nl> return clangImporter - > emitBridgingPCH ( <nl> Invocation . getFrontendOptions ( ) <nl> . InputsAndOutputs . getFilenameOfFirstInput ( ) , <nl> - opts . getSingleOutputFilename ( ) ) ; <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) ) ; <nl> } <nl> <nl> IRGenOptions & IRGenOpts = Invocation . getIRGenOptions ( ) ; <nl> static bool performCompile ( CompilerInstance & Instance , <nl> SF - > dumpInterfaceHash ( llvm : : errs ( ) ) ; <nl> else if ( Action = = FrontendOptions : : ActionType : : EmitSyntax ) { <nl> emitSyntax ( SF , Invocation . getLangOptions ( ) , Instance . getSourceMgr ( ) , <nl> - opts . getSingleOutputFilename ( ) ) ; <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) ) ; <nl> } else <nl> SF - > dump ( ) ; <nl> return Context . hadError ( ) ; <nl> static bool performCompileStepsPostSILGen ( CompilerInstance & Instance , <nl> if ( Invocation . getSILOptions ( ) . LinkMode = = SILOptions : : LinkAll ) <nl> performSILLinking ( SM . get ( ) , true ) ; <nl> return writeSIL ( * SM , Instance . getMainModule ( ) , opts . EmitVerboseSIL , <nl> - opts . getSingleOutputFilename ( ) , opts . EmitSortedSIL ) ; <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) , <nl> + opts . EmitSortedSIL ) ; <nl> } <nl> <nl> if ( Action = = FrontendOptions : : ActionType : : EmitSIBGen ) { <nl> static bool performCompileStepsPostSILGen ( CompilerInstance & Instance , <nl> / / We ' ve been told to write canonical SIL , so write it now . <nl> if ( Action = = FrontendOptions : : ActionType : : EmitSIL ) { <nl> return writeSIL ( * SM , Instance . getMainModule ( ) , opts . EmitVerboseSIL , <nl> - opts . getSingleOutputFilename ( ) , opts . EmitSortedSIL ) ; <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) , <nl> + opts . EmitSortedSIL ) ; <nl> } <nl> <nl> assert ( Action > = FrontendOptions : : ActionType : : Immediate & & <nl> static bool performCompileStepsPostSILGen ( CompilerInstance & Instance , <nl> IRModule = performIRGeneration ( IRGenOpts , <nl> * MSF . get < SourceFile * > ( ) , <nl> std : : move ( SM ) , <nl> - opts . getSingleOutputFilename ( ) , LLVMContext , <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) , LLVMContext , <nl> 0 , & HashGlobal ) ; <nl> } else { <nl> IRModule = performIRGeneration ( IRGenOpts , MSF . get < ModuleDecl * > ( ) , <nl> std : : move ( SM ) , <nl> - opts . getSingleOutputFilename ( ) , LLVMContext , <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) , LLVMContext , <nl> & HashGlobal ) ; <nl> } <nl> <nl> static bool performCompileStepsPostSILGen ( CompilerInstance & Instance , <nl> <nl> / / Now that we have a single IR Module , hand it over to performLLVM . <nl> return performLLVM ( IRGenOpts , & Instance . getDiags ( ) , nullptr , HashGlobal , <nl> - IRModule . get ( ) , TargetMachine . get ( ) , EffectiveLanguageVersion , <nl> - opts . getSingleOutputFilename ( ) , Stats ) | | HadError ; <nl> + IRModule . get ( ) , TargetMachine . get ( ) , <nl> + EffectiveLanguageVersion , <nl> + opts . InputsAndOutputs . getSingleOutputFilename ( ) , Stats ) | | <nl> + HadError ; <nl> } <nl> <nl> static bool emitIndexData ( SourceFile * PrimarySourceFile , <nl> static bool emitIndexData ( SourceFile * PrimarySourceFile , <nl> <nl> if ( PrimarySourceFile ) { <nl> if ( index : : indexAndRecord ( <nl> - PrimarySourceFile , opts . getSingleOutputFilename ( ) , <nl> - opts . IndexStorePath , opts . IndexSystemModules , <nl> - isDebugCompilation , Invocation . getTargetTriple ( ) , <nl> - * Instance . getDependencyTracker ( ) ) ) { <nl> + PrimarySourceFile , opts . InputsAndOutputs . getSingleOutputFilename ( ) , <nl> + opts . IndexStorePath , opts . IndexSystemModules , isDebugCompilation , <nl> + Invocation . getTargetTriple ( ) , * Instance . getDependencyTracker ( ) ) ) { <nl> return true ; <nl> } <nl> } else { <nl> StringRef moduleToken = opts . ModuleOutputPath ; <nl> if ( moduleToken . empty ( ) ) <nl> - moduleToken = opts . getSingleOutputFilename ( ) ; <nl> + moduleToken = opts . InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> <nl> - if ( index : : indexAndRecord ( Instance . getMainModule ( ) , opts . OutputFilenames , <nl> + if ( index : : indexAndRecord ( Instance . getMainModule ( ) , opts . InputsAndOutputs . copyOutputFilenames ( ) , <nl> moduleToken , opts . IndexStorePath , <nl> opts . IndexSystemModules , <nl> isDebugCompilation , Invocation . getTargetTriple ( ) , <nl> silOptModeArgStr ( OptimizationMode mode ) { <nl> } <nl> } <nl> <nl> + static std : : unique_ptr < UnifiedStatsReporter > <nl> + computeStatsReporter ( const CompilerInvocation & Invocation , CompilerInstance * Instance ) { <nl> + const std : : string & StatsOutputDir = <nl> + Invocation . getFrontendOptions ( ) . StatsOutputDir ; <nl> + std : : unique_ptr < UnifiedStatsReporter > StatsReporter ; <nl> + if ( StatsOutputDir . empty ( ) ) <nl> + return std : : unique_ptr < UnifiedStatsReporter > ( ) ; <nl> + <nl> + auto & FEOpts = Invocation . getFrontendOptions ( ) ; <nl> + auto & LangOpts = Invocation . getLangOptions ( ) ; <nl> + auto & SILOpts = Invocation . getSILOptions ( ) ; <nl> + std : : string InputName = <nl> + FEOpts . InputsAndOutputs . getStatsFileMangledInputName ( ) ; <nl> + StringRef OptType = silOptModeArgStr ( SILOpts . OptMode ) ; <nl> + StringRef OutFile = <nl> + FEOpts . InputsAndOutputs . lastInputProducingOutput ( ) . outputFilename ( ) ; <nl> + StringRef OutputType = llvm : : sys : : path : : extension ( OutFile ) ; <nl> + std : : string TripleName = LangOpts . Target . normalize ( ) ; <nl> + auto Trace = Invocation . getFrontendOptions ( ) . TraceStats ; <nl> + SourceManager * SM = & Instance - > getSourceMgr ( ) ; <nl> + clang : : SourceManager * CSM = nullptr ; <nl> + if ( auto * clangImporter = static_cast < ClangImporter * > ( <nl> + Instance - > getASTContext ( ) . getClangModuleLoader ( ) ) ) { <nl> + CSM = & clangImporter - > getClangASTContext ( ) . getSourceManager ( ) ; <nl> + } <nl> + return llvm : : make_unique < UnifiedStatsReporter > ( <nl> + " swift - frontend " , FEOpts . ModuleName , InputName , TripleName , OutputType , <nl> + OptType , StatsOutputDir , SM , CSM , Trace ) ; <nl> + } <nl> + <nl> int swift : : performFrontend ( ArrayRef < const char * > Args , <nl> const char * Argv0 , void * MainAddr , <nl> FrontendObserver * observer ) { <nl> int swift : : performFrontend ( ArrayRef < const char * > Args , <nl> return finishDiagProcessing ( 1 ) ; <nl> } <nl> <nl> - const std : : string & StatsOutputDir = <nl> - Invocation . getFrontendOptions ( ) . StatsOutputDir ; <nl> - std : : unique_ptr < UnifiedStatsReporter > StatsReporter ; <nl> - if ( ! StatsOutputDir . empty ( ) ) { <nl> - auto & FEOpts = Invocation . getFrontendOptions ( ) ; <nl> - auto & LangOpts = Invocation . getLangOptions ( ) ; <nl> - auto & SILOpts = Invocation . getSILOptions ( ) ; <nl> - StringRef InputName = <nl> - FEOpts . InputsAndOutputs . getNameOfUniquePrimaryInputFile ( ) ; <nl> - StringRef OptType = silOptModeArgStr ( SILOpts . OptMode ) ; <nl> - StringRef OutFile = FEOpts . getSingleOutputFilename ( ) ; <nl> - StringRef OutputType = llvm : : sys : : path : : extension ( OutFile ) ; <nl> - std : : string TripleName = LangOpts . Target . normalize ( ) ; <nl> - SourceManager * SM = & Instance - > getSourceMgr ( ) ; <nl> - clang : : SourceManager * CSM = nullptr ; <nl> - if ( auto * clangImporter = static_cast < ClangImporter * > ( <nl> - Instance - > getASTContext ( ) . getClangModuleLoader ( ) ) ) { <nl> - CSM = & clangImporter - > getClangASTContext ( ) . getSourceManager ( ) ; <nl> - } <nl> - auto Trace = Invocation . getFrontendOptions ( ) . TraceStats ; <nl> - StatsReporter = llvm : : make_unique < UnifiedStatsReporter > ( " swift - frontend " , <nl> - FEOpts . ModuleName , <nl> - InputName , <nl> - TripleName , <nl> - OutputType , <nl> - OptType , <nl> - StatsOutputDir , <nl> - SM , CSM , <nl> - Trace ) ; <nl> - <nl> + std : : unique_ptr < UnifiedStatsReporter > StatsReporter = <nl> + computeStatsReporter ( Invocation , Instance . get ( ) ) ; <nl> + if ( StatsReporter ) { <nl> / / Install stats - reporter somewhere visible for subsystems that <nl> / / need to bump counters as they work , rather than measure <nl> / / accumulated work on completion ( mostly : TypeChecker ) . <nl> mmm a / lib / FrontendTool / ImportedModules . cpp <nl> ppp b / lib / FrontendTool / ImportedModules . cpp <nl> static void findAllClangImports ( const clang : : Module * module , <nl> bool swift : : emitImportedModules ( ASTContext & Context , ModuleDecl * mainModule , <nl> const FrontendOptions & opts ) { <nl> <nl> - auto path = opts . getSingleOutputFilename ( ) ; <nl> + auto path = opts . InputsAndOutputs . getSingleOutputFilename ( ) ; <nl> std : : error_code EC ; <nl> llvm : : raw_fd_ostream out ( path , EC , llvm : : sys : : fs : : F_None ) ; <nl> <nl> | Move main outputs to FrontendInputsAndOutputs and InputFile | apple/swift | 614006bc4f0f3ac91032662a3b07ad5fda7378d2 | 2018-02-01T19:55:08Z |
mmm a / Tests / UnitTests / ReaderTests / Common / ReaderTestHelper . h <nl> ppp b / Tests / UnitTests / ReaderTests / Common / ReaderTestHelper . h <nl> namespace Microsoft { namespace MSR { namespace CNTK <nl> BOOST_TEST_MESSAGE ( " Setup fixture " ) ; <nl> m_initialWorkingPath = boost : : filesystem : : current_path ( ) . generic_string ( ) ; <nl> BOOST_TEST_MESSAGE ( " Current working directory : " + m_initialWorkingPath ) ; <nl> + fprintf ( stderr , " Current working directory : % s \ n " , m_initialWorkingPath . c_str ( ) ) ; <nl> <nl> boost : : filesystem : : path path ( boost : : unit_test : : framework : : master_test_suite ( ) . argv [ 0 ] ) ; <nl> - m_parentPath = path . parent_path ( ) . generic_string ( ) ; <nl> + m_parentPath = boost : : filesystem : : canonical ( path . parent_path ( ) ) . generic_string ( ) ; <nl> + fprintf ( stderr , " Executable path : % s \ n " , m_parentPath . c_str ( ) ) ; <nl> + <nl> m_testDataPath = m_parentPath + " / . . / . . / . . / Tests / UnitTests / ReaderTests " ; <nl> - <nl> + boost : : filesystem : : path absTestPath ( m_testDataPath ) ; <nl> + absTestPath = boost : : filesystem : : canonical ( absTestPath ) ; <nl> + m_testDataPath = absTestPath . generic_string ( ) ; <nl> + <nl> BOOST_TEST_MESSAGE ( " Setting test data path to : " + m_testDataPath ) ; <nl> + fprintf ( stderr , " Test path : % s \ n " , m_testDataPath . c_str ( ) ) ; <nl> <nl> string newCurrentPath ; <nl> <nl> namespace Microsoft { namespace MSR { namespace CNTK <nl> string environmentVariable = subPath . substr ( 1 , end - 1 ) ; <nl> <nl> BOOST_TEST_MESSAGE ( " Retrieving environment variable : " + environmentVariable ) ; <nl> + fprintf ( stderr , " Retrieving environment variable : % s \ n " , environmentVariable . c_str ( ) ) ; <nl> <nl> const char * p = std : : getenv ( environmentVariable . c_str ( ) ) ; <nl> if ( p ) <nl> namespace Microsoft { namespace MSR { namespace CNTK <nl> } <nl> <nl> BOOST_TEST_MESSAGE ( " Setting current path to : " + newCurrentPath ) ; <nl> - <nl> + fprintf ( stderr , " Set current path to : % s \ n " , newCurrentPath . c_str ( ) ) ; <nl> boost : : filesystem : : current_path ( newCurrentPath ) ; <nl> <nl> BOOST_TEST_MESSAGE ( " Current working directory is now : " + boost : : filesystem : : current_path ( ) . generic_string ( ) ) ; <nl> + fprintf ( stderr , " Current working directory is now : % s \ n " , boost : : filesystem : : current_path ( ) . generic_string ( ) . c_str ( ) ) ; <nl> } <nl> <nl> ~ ReaderFixture ( ) <nl> { <nl> BOOST_TEST_MESSAGE ( " Teardown fixture " ) ; <nl> + BOOST_TEST_MESSAGE ( " Reverting current path to : " + m_initialWorkingPath ) ; <nl> + fprintf ( stderr , " Set current path to : % s \ n " , m_initialWorkingPath . c_str ( ) ) ; <nl> + boost : : filesystem : : current_path ( m_initialWorkingPath ) ; <nl> } <nl> <nl> / / Limits the number of minibatches to read , to reduce time and data file size <nl> | Fixed issue with working paths between tests . | microsoft/CNTK | 241bf17dfa76e1f7733e31b048d8f3cbcdc51606 | 2015-12-11T17:07:04Z |
mmm a / third_party / mlir / BUILD <nl> ppp b / third_party / mlir / BUILD <nl> cc_binary ( <nl> " : tools / libcuda - runtime - wrappers . so " , <nl> ] , <nl> includes = [ " include " ] , <nl> - # cl / 262981524 introduced a regression to test / mlir - cuda - runner : gpu - to - cubin . mlir . test . <nl> - # TODO ( timshen ) : Remove the following workaround . <nl> - linkopts = [ " - Wl , - rpath , / var / google / persistent / kibbles / libcuda_running " ] , <nl> deps = [ <nl> " : GPUDialect " , <nl> " : GPUDialectRegistration " , <nl> | Internal build change | tensorflow/tensorflow | 7f3ade6f3661d4f68b2637199585af3e89169913 | 2019-08-13T21:40:42Z |
mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSP . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSP . cpp <nl> extern " C " { <nl> # include " settings / MediaSourceSettings . h " <nl> # include " settings / Settings . h " <nl> # include " utils / JobManager . h " <nl> - # include " utils / Log . h " <nl> + # include " utils / log . h " <nl> # include " utils / StringUtils . h " <nl> <nl> <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSPProcess . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSPProcess . cpp <nl> extern " C " { <nl> # include " cores / IPlayer . h " <nl> # include " settings / MediaSettings . h " <nl> # include " utils / TimeUtils . h " <nl> - # include " utils / Log . h " <nl> + # include " utils / log . h " <nl> <nl> using namespace ADDON ; <nl> using namespace ActiveAE ; <nl> | ActiveAEDSP : Fix incorrect case on log header includes | xbmc/xbmc | fd2012e8ffa41d1915a037ceaa36c96012b3623b | 2017-03-31T16:28:59Z |
mmm a / lib / IRGen / IRGen . cpp <nl> ppp b / lib / IRGen / IRGen . cpp <nl> swift : : createTargetMachine ( IRGenOptions & Opts , ASTContext & Ctx ) { <nl> <nl> <nl> / / Create a target machine . <nl> - auto cmodel = CodeModel : : Small ; <nl> + auto cmodel = CodeModel : : Default ; <nl> <nl> / / On Windows 64 bit , dlls are loaded above the max address for 32 bits . <nl> / / This means that a default CodeModel causes generated code to segfault <nl> | Revert " [ IRGen ] The " default " LLVM code model is now Small . " | apple/swift | 1d5400bf1a0d5203d45015759cace1bd83d0c13a | 2017-08-03T21:22:23Z |
mmm a / tensorflow / core / kernels / sparse_tensor_dense_matmul_op . cc <nl> ppp b / tensorflow / core / kernels / sparse_tensor_dense_matmul_op . cc <nl> REGISTER_KERNELS_CPU ( int32 ) ; <nl> REGISTER_KERNELS_CPU ( complex64 ) ; <nl> REGISTER_KERNELS_CPU ( complex128 ) ; <nl> <nl> - # if GOOGLE_CUDA <nl> + # if GOOGLE_CUDA | | TENSORFLOW_USE_ROCM <nl> <nl> namespace functor { <nl> # define DECLARE_GPU_SPEC ( T , Tindices , ADJ_A , ADJ_B ) \ <nl> DECLARE_ADJOINT_GPU_SPEC ( float ) ; <nl> REGISTER_KERNELS_GPU ( float ) ; <nl> # undef REGISTER_GPU <nl> # undef REGISTER_KERNELS_GPU <nl> - # endif / / GOOGLE_CUDA <nl> + # endif / / GOOGLE_CUDA | | TENSORFLOW_USE_ROCM <nl> <nl> namespace functor { <nl> <nl> mmm a / tensorflow / core / kernels / sparse_tensor_dense_matmul_op_gpu . cu . cc <nl> ppp b / tensorflow / core / kernels / sparse_tensor_dense_matmul_op_gpu . cu . cc <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> <nl> - # if GOOGLE_CUDA <nl> + # if GOOGLE_CUDA | | TENSORFLOW_USE_ROCM <nl> <nl> # define EIGEN_USE_GPU <nl> <nl> __global__ void SparseTensorDenseMatMulKernel ( int nnz , int m , int b_rows , <nl> / / out_ { ij } = sum_k { a_ik b_kj } <nl> / / out = A * B ' , out_ { ij } = sum_k { a_ik ( b ' ) _kj } ; b ' _ { kj } = b_ { jk } <nl> const int n = ( ADJ_B ) ? b_cols : b_rows ; <nl> - CUDA_1D_KERNEL_LOOP ( index , nnz * p ) { <nl> + GPU_1D_KERNEL_LOOP ( index , nnz * p ) { <nl> const int a_ix = index / p ; <nl> const int j = index % p ; <nl> const int i = ldg ( a_indices + 2 * a_ix + ( ( ADJ_A ) ? 1 : 0 ) ) ; <nl> __global__ void SparseTensorDenseMatMulKernel ( int nnz , int m , int b_rows , <nl> / / out [ i , j ] <nl> T * out_location = out + i * p + j ; <nl> if ( ! FastBoundsCheck ( k , n ) ) { <nl> - CudaAtomicAdd ( out_location , std : : numeric_limits < T > : : quiet_NaN ( ) ) ; <nl> + GpuAtomicAdd ( out_location , std : : numeric_limits < T > : : quiet_NaN ( ) ) ; <nl> continue ; <nl> } <nl> <nl> __global__ void SparseTensorDenseMatMulKernel ( int nnz , int m , int b_rows , <nl> <nl> / / b_value = = ( ADJ_B ) ? b [ j , k ] : b [ k , j ] <nl> const T b_value = ldg ( b + ( ( ADJ_B ) ? j * b_cols + k : k * b_cols + j ) ) ; <nl> - CudaAtomicAdd ( out_location , a_value * b_value ) ; <nl> + GpuAtomicAdd ( out_location , a_value * b_value ) ; <nl> } <nl> } <nl> <nl> struct SparseTensorDenseMatMulFunctor < GPUDevice , T , Tindices , ADJ_A , ADJ_B > { <nl> <nl> / / TODO ( ebrevdo ) : Should this be alpha * nnz instead of <nl> / / out . size ( ) ? Perhaps p * nnz ? <nl> - GpuLaunchConfig config = GetCudaLaunchConfig ( p * nnz , d ) ; <nl> + GpuLaunchConfig config = GetGpuLaunchConfig ( p * nnz , d ) ; <nl> <nl> - TF_CHECK_OK ( CudaLaunchKernel ( <nl> + TF_CHECK_OK ( GpuLaunchKernel ( <nl> SparseTensorDenseMatMulKernel < T , Tindices , ADJ_A , ADJ_B > , <nl> config . block_count , config . thread_per_block , 0 , d . stream ( ) , nnz , m , <nl> b_rows , b_cols , p , a_indices . data ( ) , a_values . data ( ) , b . data ( ) , <nl> DEFINE ( float , int64 ) ; <nl> <nl> } / / end namespace tensorflow <nl> <nl> - # endif / / GOOGLE_CUDA <nl> + # endif / / GOOGLE_CUDA | | TENSORFLOW_USE_ROCM <nl> | Adding ROCm support for " sparse_tensor_dense_matmul " op | tensorflow/tensorflow | 6cca3412ddb28f3e503e224948cd9b2f386c65e8 | 2019-05-23T18:17:47Z |
mmm a / stdlib / public / Platform / winsdk . modulemap <nl> ppp b / stdlib / public / Platform / winsdk . modulemap <nl> module WinSDK [ system ] { <nl> <nl> module console { <nl> header " consoleapi . h " <nl> + header " consoleapi2 . h " <nl> + header " consoleapi3 . h " <nl> export * <nl> } <nl> <nl> | Merge remote - tracking branch ' origin / master ' into master - next | apple/swift | ed1ab076e68bf2b7f2005bf119580916e14ab277 | 2020-09-20T19:45:37Z |
mmm a / src / core / hle / kernel / mutex . cpp <nl> ppp b / src / core / hle / kernel / mutex . cpp <nl> <nl> <nl> namespace Kernel { <nl> <nl> - / * * <nl> - * Boost ' s a thread ' s priority to the best priority among the thread ' s held mutexes . <nl> - * This prevents priority inversion via priority inheritance . <nl> - * / <nl> - static void UpdateThreadPriority ( Thread * thread ) { <nl> - s32 best_priority = THREADPRIO_LOWEST ; <nl> - for ( auto & mutex : thread - > held_mutexes ) { <nl> - if ( mutex - > priority < best_priority ) <nl> - best_priority = mutex - > priority ; <nl> - } <nl> - <nl> - best_priority = std : : min ( best_priority , thread - > nominal_priority ) ; <nl> - thread - > SetPriority ( best_priority ) ; <nl> - } <nl> - <nl> - / * * <nl> - * Elevate the mutex priority to the best priority <nl> - * among the priorities of all its waiting threads . <nl> - * / <nl> - static void UpdateMutexPriority ( Mutex * mutex ) { <nl> - s32 best_priority = THREADPRIO_LOWEST ; <nl> - for ( auto & waiter : mutex - > GetWaitingThreads ( ) ) { <nl> - if ( waiter - > current_priority < best_priority ) <nl> - best_priority = waiter - > current_priority ; <nl> - } <nl> - <nl> - if ( best_priority ! = mutex - > priority ) { <nl> - mutex - > priority = best_priority ; <nl> - UpdateThreadPriority ( mutex - > holding_thread . get ( ) ) ; <nl> - } <nl> - } <nl> - <nl> void ReleaseThreadMutexes ( Thread * thread ) { <nl> for ( auto & mtx : thread - > held_mutexes ) { <nl> mtx - > lock_count = 0 ; <nl> void Mutex : : Acquire ( Thread * thread ) { <nl> priority = thread - > current_priority ; <nl> thread - > held_mutexes . insert ( this ) ; <nl> holding_thread = thread ; <nl> - <nl> - UpdateThreadPriority ( thread ) ; <nl> - <nl> + thread - > UpdatePriority ( ) ; <nl> Core : : System : : GetInstance ( ) . PrepareReschedule ( ) ; <nl> } <nl> <nl> void Mutex : : Release ( ) { <nl> / / Yield to the next thread only if we ' ve fully released the mutex <nl> if ( lock_count = = 0 ) { <nl> holding_thread - > held_mutexes . erase ( this ) ; <nl> - UpdateThreadPriority ( holding_thread . get ( ) ) ; <nl> + holding_thread - > UpdatePriority ( ) ; <nl> holding_thread = nullptr ; <nl> WakeupAllWaitingThreads ( ) ; <nl> Core : : System : : GetInstance ( ) . PrepareReschedule ( ) ; <nl> void Mutex : : Release ( ) { <nl> <nl> void Mutex : : AddWaitingThread ( SharedPtr < Thread > thread ) { <nl> WaitObject : : AddWaitingThread ( thread ) ; <nl> - UpdateMutexPriority ( this ) ; <nl> + thread - > pending_mutexes . insert ( this ) ; <nl> + UpdatePriority ( ) ; <nl> } <nl> <nl> void Mutex : : RemoveWaitingThread ( Thread * thread ) { <nl> WaitObject : : RemoveWaitingThread ( thread ) ; <nl> - UpdateMutexPriority ( this ) ; <nl> + thread - > pending_mutexes . erase ( this ) ; <nl> + UpdatePriority ( ) ; <nl> + } <nl> + <nl> + void Mutex : : UpdatePriority ( ) { <nl> + if ( ! holding_thread ) <nl> + return ; <nl> + <nl> + s32 best_priority = THREADPRIO_LOWEST ; <nl> + for ( auto & waiter : GetWaitingThreads ( ) ) { <nl> + if ( waiter - > current_priority < best_priority ) <nl> + best_priority = waiter - > current_priority ; <nl> + } <nl> + <nl> + if ( best_priority ! = priority ) { <nl> + priority = best_priority ; <nl> + holding_thread - > UpdatePriority ( ) ; <nl> + } <nl> } <nl> <nl> } / / namespace <nl> mmm a / src / core / hle / kernel / mutex . h <nl> ppp b / src / core / hle / kernel / mutex . h <nl> class Mutex final : public WaitObject { <nl> std : : string name ; / / / < Name of mutex ( optional ) <nl> SharedPtr < Thread > holding_thread ; / / / < Thread that has acquired the mutex <nl> <nl> + / * * <nl> + * Elevate the mutex priority to the best priority <nl> + * among the priorities of all its waiting threads . <nl> + * / <nl> + void UpdatePriority ( ) ; <nl> + <nl> bool ShouldWait ( Thread * thread ) const override ; <nl> void Acquire ( Thread * thread ) override ; <nl> <nl> mmm a / src / core / hle / kernel / thread . cpp <nl> ppp b / src / core / hle / kernel / thread . cpp <nl> void Thread : : Stop ( ) { <nl> <nl> WakeupAllWaitingThreads ( ) ; <nl> <nl> - / / Release all the mutexes that this thread holds <nl> - ReleaseThreadMutexes ( this ) ; <nl> - <nl> / / Clean up any dangling references in objects that this thread was waiting for <nl> for ( auto & wait_object : wait_objects ) { <nl> wait_object - > RemoveWaitingThread ( this ) ; <nl> } <nl> wait_objects . clear ( ) ; <nl> <nl> + / / Release all the mutexes that this thread holds <nl> + ReleaseThreadMutexes ( this ) ; <nl> + <nl> / / Mark the TLS slot in the thread ' s page as free . <nl> u32 tls_page = ( tls_address - Memory : : TLS_AREA_VADDR ) / Memory : : PAGE_SIZE ; <nl> u32 tls_slot = <nl> void Thread : : SetPriority ( s32 priority ) { <nl> nominal_priority = current_priority = priority ; <nl> } <nl> <nl> + void Thread : : UpdatePriority ( ) { <nl> + s32 best_priority = nominal_priority ; <nl> + for ( auto & mutex : held_mutexes ) { <nl> + if ( mutex - > priority < best_priority ) <nl> + best_priority = mutex - > priority ; <nl> + } <nl> + BoostPriority ( best_priority ) ; <nl> + } <nl> + <nl> void Thread : : BoostPriority ( s32 priority ) { <nl> - ready_queue . move ( this , current_priority , priority ) ; <nl> + / / If thread was ready , adjust queues <nl> + if ( status = = THREADSTATUS_READY ) <nl> + ready_queue . move ( this , current_priority , priority ) ; <nl> + else <nl> + ready_queue . prepare ( priority ) ; <nl> current_priority = priority ; <nl> } <nl> <nl> mmm a / src / core / hle / kernel / thread . h <nl> ppp b / src / core / hle / kernel / thread . h <nl> class Thread final : public WaitObject { <nl> * / <nl> void SetPriority ( s32 priority ) ; <nl> <nl> + / * * <nl> + * Boost ' s a thread ' s priority to the best priority among the thread ' s held mutexes . <nl> + * This prevents priority inversion via priority inheritance . <nl> + * / <nl> + void UpdatePriority ( ) ; <nl> + <nl> / * * <nl> * Temporarily boosts the thread ' s priority until the next time it is scheduled <nl> * @ param priority The new priority <nl> class Thread final : public WaitObject { <nl> / / / Mutexes currently held by this thread , which will be released when it exits . <nl> boost : : container : : flat_set < SharedPtr < Mutex > > held_mutexes ; <nl> <nl> + / / / Mutexes that this thread is currently waiting for . <nl> + boost : : container : : flat_set < SharedPtr < Mutex > > pending_mutexes ; <nl> + <nl> SharedPtr < Process > owner_process ; / / / < Process that owns this thread <nl> <nl> / / / Objects that the thread is waiting on . <nl> mmm a / src / core / hle / svc . cpp <nl> ppp b / src / core / hle / svc . cpp <nl> static ResultCode SetThreadPriority ( Kernel : : Handle handle , s32 priority ) { <nl> return ERR_INVALID_HANDLE ; <nl> <nl> thread - > SetPriority ( priority ) ; <nl> + thread - > UpdatePriority ( ) ; <nl> + <nl> + / / Update the mutexes that this thread is waiting for <nl> + for ( auto & mutex : thread - > pending_mutexes ) <nl> + mutex - > UpdatePriority ( ) ; <nl> + <nl> Core : : System : : GetInstance ( ) . PrepareReschedule ( ) ; <nl> return RESULT_SUCCESS ; <nl> } <nl> | Kernel / Mutex : Propagate thread priority changes to other threads inheriting the priority via mutexes | yuzu-emu/yuzu | d3ff5b91e14356912589f9bac47fccbe79e07279 | 2017-01-04T20:58:48Z |
new file mode 100644 <nl> index 000000000000 . . ee5c89bad58e <nl> mmm / dev / null <nl> ppp b / caffe2 / operators / log_op . cc <nl> <nl> + # include < cmath > <nl> + <nl> + # include " caffe2 / operators / elementwise_op . h " <nl> + <nl> + namespace caffe2 { <nl> + <nl> + struct LogCPUFunctor { <nl> + template < typename T > <nl> + inline void <nl> + operator ( ) ( const int n , const T * x , T * y , CPUContext * device_context ) { <nl> + std : : transform ( x , x + n , y , log ) ; <nl> + } <nl> + } ; <nl> + <nl> + namespace { <nl> + REGISTER_CPU_OPERATOR ( <nl> + Log , <nl> + UnaryElementwiseOp < TensorTypes < float > , CPUContext , LogCPUFunctor > ) ; <nl> + <nl> + OPERATOR_SCHEMA ( Log ) <nl> + . NumInputs ( 1 ) <nl> + . NumOutputs ( 1 ) <nl> + . AllowInplace ( { { 0 , 0 } } ) <nl> + . IdenticalTypeAndShape ( ) <nl> + . SetDoc ( R " DOC ( <nl> + Calculates the natural log of the given input tensor , element - wise . This <nl> + operation can be done in an in - place fashion too , by providing the same input <nl> + and output blobs . <nl> + ) DOC " ) <nl> + . Input ( 0 , " input " , " Input tensor " ) <nl> + . Output ( <nl> + 0 , <nl> + " output " , <nl> + " The natural log of the input tensor computed " <nl> + " element - wise " ) ; <nl> + <nl> + class GetLogGradient : public GradientMakerBase { <nl> + using GradientMakerBase : : GradientMakerBase ; <nl> + vector < OperatorDef > GetGradientDefs ( ) override { <nl> + return SingleGradientDef ( <nl> + " Div " , <nl> + " " , <nl> + std : : vector < string > { GO ( 0 ) , I ( 0 ) } , <nl> + std : : vector < string > { GI ( 0 ) } ) ; <nl> + } <nl> + } ; <nl> + REGISTER_GRADIENT ( Log , GetLogGradient ) ; <nl> + } / / namespace <nl> + } / / namespace caffe2 <nl> mmm a / caffe2 / python / hypothesis_test . py <nl> ppp b / caffe2 / python / hypothesis_test . py <nl> def exp_ref ( input_tensor ) : <nl> inputs = [ input_tensor ] , <nl> reference = exp_ref ) <nl> <nl> + @ given ( input_tensor = hu . arrays ( <nl> + dims = [ 10 ] , elements = st . floats ( min_value = 1 , <nl> + max_value = 10000 ) ) , <nl> + * * hu . gcs_cpu_only ) <nl> + def test_log ( self , input_tensor , gc , dc ) : <nl> + op = core . CreateOperator ( <nl> + " Log " , <nl> + [ " input " ] , <nl> + [ " output " ] <nl> + ) <nl> + <nl> + def log_ref ( input_tensor ) : <nl> + return ( np . log ( input_tensor ) , ) <nl> + <nl> + self . assertReferenceChecks ( <nl> + device_option = gc , <nl> + op = op , <nl> + inputs = [ input_tensor ] , <nl> + reference = log_ref ) <nl> + self . assertGradientChecks ( gc , op , [ input_tensor ] , 0 , [ 0 ] ) <nl> + <nl> @ given ( num_threads = st . integers ( 1 , 10 ) , # noqa <nl> num_elements = st . integers ( 1 , 100 ) , <nl> capacity = st . integers ( 1 , 5 ) , <nl> | LogOP implementation | pytorch/pytorch | a386fe8b6a93080d26f636f85341b318cdb23cfc | 2017-02-07T04:19:19Z |
mmm a / examples / faces / testing . xml <nl> ppp b / examples / faces / testing . xml <nl> <nl> < ? xml version = ' 1 . 0 ' encoding = ' ISO - 8859 - 1 ' ? > <nl> < ? xml - stylesheet type = ' text / xsl ' href = ' image_metadata_stylesheet . xsl ' ? > <nl> < dataset > <nl> - < name > imglab dataset < / name > <nl> - < comment > Created by imglab tool . < / comment > <nl> + < name > Testing faces < / name > <nl> + < comment > These are images from the PASCAL VOC 2011 dataset . < / comment > <nl> < images > <nl> < image file = ' 2008_002470 . jpg ' > <nl> < box top = ' 181 ' left = ' 274 ' width = ' 52 ' height = ' 53 ' / > <nl> mmm a / examples / faces / training . xml <nl> ppp b / examples / faces / training . xml <nl> <nl> < ? xml version = ' 1 . 0 ' encoding = ' ISO - 8859 - 1 ' ? > <nl> < ? xml - stylesheet type = ' text / xsl ' href = ' image_metadata_stylesheet . xsl ' ? > <nl> < dataset > <nl> - < name > imglab dataset < / name > <nl> - < comment > Created by imglab tool . < / comment > <nl> + < name > Training faces < / name > <nl> + < comment > These are images from the PASCAL VOC 2011 dataset . < / comment > <nl> < images > <nl> < image file = ' 2007_007763 . jpg ' > <nl> < box top = ' 90 ' left = ' 194 ' width = ' 37 ' height = ' 37 ' / > <nl> | Added comments about the source of the face images | davisking/dlib | 101e316fd148d099e6330dde21ffc05782812d04 | 2014-01-02T23:49:05Z |
mmm a / documentation / sphinx / source / administration . rst <nl> ppp b / documentation / sphinx / source / administration . rst <nl> If a process has had more than 10 TCP segments retransmitted in the last 5 secon <nl> 10 . 0 . 4 . 1 : 4500 ( 3 % cpu ; 2 % machine ; 0 . 004 Gbps ; 0 % disk ; REXMIT ! 2 . 5 GB / 4 . 1 GB RAM ) <nl> <nl> Machine - readable status <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + mmmmmmmmmmmmmmmmmmmmm - - <nl> <nl> The status command can provide a complete summary of statistics about the cluster and the database with the ` ` json ` ` argument . Full documentation for ` ` status json ` ` output can be found : doc : ` here < mr - status > ` . <nl> From the output of ` ` status json ` ` , operators can find useful health metrics to determine whether or not their cluster is hitting performance limits . <nl> Durable version lag ` ` cluster . qos . worst_durability_lag_storage_server ` ` cont <nl> Transaction log queue ` ` cluster . qos . worst_queue_bytes_log_server ` ` contains the maximum size in bytes of the mutations stored on a transaction log that have not yet been popped by storage servers . A large transaction log queue size can potentially cause the ratekeeper to increase throttling . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> + Server - side latency band tracking <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> + <nl> + As part of the status document , ` ` status json ` ` provides some sampled latency metrics obtained by running probe transactions internally . While this can often be useful , it does not necessarily reflect the distribution of latencies for requests originated by clients . <nl> + <nl> + FoundationDB additionally provides optional functionality to measure the latencies of all incoming get read version ( GRV ) , read , and commit requests and report some basic details about those requests . The latencies are measured from the time the server receives the request to the point when it replies , and will therefore not include time spent in transit between the client and server or delays in the client process itself . <nl> + <nl> + The latency band tracking works by configuring various latency thresholds and counting the number of requests that occur in each band ( i . e . between two consecutive thresholds ) . For example , if you wanted to define a service - level objective ( SLO ) for your cluster where 99 . 9 % of read requests were answered within N seconds , you could set a read latency threshold at N . You could then count the number of requests below and above the threshold and determine whether the required percentage of requests are answered sufficiently quickly . <nl> + <nl> + Configuration of server - side latency bands is performed by setting the ` ` \ xff \ x02 / latencyBandConfig ` ` key to a string encoding the following JSON document : : <nl> + <nl> + { <nl> + " get_read_version " : { <nl> + " bands " : [ 0 . 01 , 0 . 1 ] <nl> + } , <nl> + " read " : { <nl> + " bands " : [ 0 . 01 , 0 . 1 ] , <nl> + " max_key_selector_offset " : 1000 , <nl> + " max_read_bytes " : 1000000 <nl> + } , <nl> + " commit " : { <nl> + " bands " : [ 0 . 01 , 0 . 1 ] , <nl> + " max_commit_bytes " : 1000000 <nl> + } <nl> + } <nl> + <nl> + Every field in this configuration is optional , and any missing fields will be left unset ( i . e . no bands will be tracked or limits will not apply ) . The configuration takes the following arguments : <nl> + <nl> + * ` ` bands ` ` - a list of thresholds ( in seconds ) to be measured for the given request type ( ` ` get_read_version ` ` , ` ` read ` ` , or ` ` commit ` ` ) <nl> + * ` ` max_key_selector_offset ` ` - an integer specifying the maximum key selector offset a read request can have and still be counted <nl> + * ` ` max_read_bytes ` ` - an integer specifying the maximum size in bytes of a read response that will be counted <nl> + * ` ` max_commit_bytes ` ` - an integer specifying the maximum size in bytes of a commit request that will be counted <nl> + <nl> + Setting this configuration key to a value that changes the configuration will result in the cluster controller server process logging a ` ` LatencyBandConfigChanged ` ` event . This event will indicate whether a configuration is present or not using its ` ` Present ` ` field . Specifying an invalid configuration will result in the latency band feature being unconfigured , and the server process running the cluster controller will log a ` ` InvalidLatencyBandConfiguration ` ` trace event . <nl> + <nl> + . . note : : GRV requests are counted only at default and immediate priority . Batch priority GRV requests are ignored for the purposes of latency band tracking . <nl> + <nl> + When configured , the ` ` status json ` ` output will include additional fields to report the number of requests in each latency band located at ` ` cluster . processes . < ID > . roles [ N ] . * _latency_bands ` ` : : <nl> + <nl> + " grv_latency_bands " : { <nl> + 0 . 01 : 10 , <nl> + 0 . 1 : 0 , <nl> + inf : 1 , <nl> + filtered : 0 <nl> + } , <nl> + " read_latency_bands " : { <nl> + 0 . 01 : 12 , <nl> + 0 . 1 : 1 , <nl> + inf : 0 , <nl> + filtered : 0 <nl> + } , <nl> + " commit_latency_bands " : { <nl> + 0 . 01 : 5 , <nl> + 0 . 1 : 5 , <nl> + inf : 2 , <nl> + filtered : 1 <nl> + } <nl> + <nl> + The ` ` grv_latency_bands ` ` and ` ` commit_latency_bands ` ` objects will only be logged for ` ` proxy ` ` roles , and ` ` read_latency_bands ` ` will only be logged for storage roles . Each threshold is represented as a key in the map , and its associated value will be the total number of requests in the lifetime of the process with a latency smaller than the threshold but larger than the next smaller threshold . <nl> + <nl> + For example , ` ` 0 . 1 : 1 ` ` in ` ` read_latency_bands ` ` indicates that there has been 1 read request with a latency in the range ` ` [ 0 . 01 , 0 . 1 ) ` ` . For the smallest specified threshold , the lower bound is 0 ( e . g . ` ` [ 0 , 0 . 01 ) ` ` in the example above ) . Requests that took longer than any defined latency band will be reported in the ` ` inf ` ` ( infinity ) band . Requests that were filtered by the configuration ( e . g . using ` ` max_read_bytes ` ` ) are reported in the ` ` filtered ` ` category . <nl> + <nl> + Because each threshold reports latencies strictly in the range between the next lower threshold and itself , it may be necessary to sum up the counts for multiple bands to determine the total number of requests below a certain threshold . <nl> + <nl> + . . note : : No history of request counts is recorded for processes that ran in the past . This includes the history prior to restart for a process that has been restarted , for which the counts get reset to 0 . For this reason , it is recommended that you collect this information periodically if you need to be able to track requests from such processes . <nl> + <nl> . . _administration_fdbmonitor : <nl> <nl> ` ` fdbmonitor ` ` and ` ` fdbserver ` ` <nl> mmm a / documentation / sphinx / source / downloads . rst <nl> ppp b / documentation / sphinx / source / downloads . rst <nl> macOS <nl> <nl> The macOS installation package is supported on macOS 10 . 7 + . It includes the client and ( optionally ) the server . <nl> <nl> - * ` FoundationDB - 6 . 3 . 0 . pkg < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / macOS / installers / FoundationDB - 6 . 3 . 0 . pkg > ` _ <nl> + * ` FoundationDB - 6 . 3 . 1 . pkg < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / macOS / installers / FoundationDB - 6 . 3 . 1 . pkg > ` _ <nl> <nl> Ubuntu <nl> mmmmmm <nl> <nl> The Ubuntu packages are supported on 64 - bit Ubuntu 12 . 04 + , but beware of the Linux kernel bug in Ubuntu 12 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 3 . 0 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / ubuntu / installers / foundationdb - clients_6 . 3 . 0 - 1_amd64 . deb > ` _ <nl> - * ` foundationdb - server - 6 . 3 . 0 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / ubuntu / installers / foundationdb - server_6 . 3 . 0 - 1_amd64 . deb > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 3 . 1 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / ubuntu / installers / foundationdb - clients_6 . 3 . 1 - 1_amd64 . deb > ` _ <nl> + * ` foundationdb - server - 6 . 3 . 1 - 1_amd64 . deb < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / ubuntu / installers / foundationdb - server_6 . 3 . 1 - 1_amd64 . deb > ` _ ( depends on the clients package ) <nl> <nl> RHEL / CentOS EL6 <nl> mmmmmmmmmmmmmmm <nl> <nl> The RHEL / CentOS EL6 packages are supported on 64 - bit RHEL / CentOS 6 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 3 . 0 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / rhel6 / installers / foundationdb - clients - 6 . 3 . 0 - 1 . el6 . x86_64 . rpm > ` _ <nl> - * ` foundationdb - server - 6 . 3 . 0 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / rhel6 / installers / foundationdb - server - 6 . 3 . 0 - 1 . el6 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 3 . 1 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / rhel6 / installers / foundationdb - clients - 6 . 3 . 1 - 1 . el6 . x86_64 . rpm > ` _ <nl> + * ` foundationdb - server - 6 . 3 . 1 - 1 . el6 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / rhel6 / installers / foundationdb - server - 6 . 3 . 1 - 1 . el6 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> <nl> RHEL / CentOS EL7 <nl> mmmmmmmmmmmmmmm <nl> <nl> The RHEL / CentOS EL7 packages are supported on 64 - bit RHEL / CentOS 7 . x . <nl> <nl> - * ` foundationdb - clients - 6 . 3 . 0 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / rhel7 / installers / foundationdb - clients - 6 . 3 . 0 - 1 . el7 . x86_64 . rpm > ` _ <nl> - * ` foundationdb - server - 6 . 3 . 0 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / rhel7 / installers / foundationdb - server - 6 . 3 . 0 - 1 . el7 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> + * ` foundationdb - clients - 6 . 3 . 1 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / rhel7 / installers / foundationdb - clients - 6 . 3 . 1 - 1 . el7 . x86_64 . rpm > ` _ <nl> + * ` foundationdb - server - 6 . 3 . 1 - 1 . el7 . x86_64 . rpm < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / rhel7 / installers / foundationdb - server - 6 . 3 . 1 - 1 . el7 . x86_64 . rpm > ` _ ( depends on the clients package ) <nl> <nl> Windows <nl> mmmmmm - <nl> <nl> The Windows installer is supported on 64 - bit Windows XP and later . It includes the client and ( optionally ) the server . <nl> <nl> - * ` foundationdb - 6 . 3 . 0 - x64 . msi < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / windows / installers / foundationdb - 6 . 3 . 0 - x64 . msi > ` _ <nl> + * ` foundationdb - 6 . 3 . 1 - x64 . msi < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / windows / installers / foundationdb - 6 . 3 . 1 - x64 . msi > ` _ <nl> <nl> API Language Bindings <nl> = = = = = = = = = = = = = = = = = = = = = <nl> On macOS and Windows , the FoundationDB Python API bindings are installed as part <nl> <nl> If you need to use the FoundationDB Python API from other Python installations or paths , use the Python package manager ` ` pip ` ` ( ` ` pip install foundationdb ` ` ) or download the Python package : <nl> <nl> - * ` foundationdb - 6 . 3 . 0 . tar . gz < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / bindings / python / foundationdb - 6 . 3 . 0 . tar . gz > ` _ <nl> + * ` foundationdb - 6 . 3 . 1 . tar . gz < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / bindings / python / foundationdb - 6 . 3 . 1 . tar . gz > ` _ <nl> <nl> Ruby 1 . 9 . 3 / 2 . 0 . 0 + <nl> mmmmmmmmmmmmmmm - - <nl> <nl> - * ` fdb - 6 . 3 . 0 . gem < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / bindings / ruby / fdb - 6 . 3 . 0 . gem > ` _ <nl> + * ` fdb - 6 . 3 . 1 . gem < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / bindings / ruby / fdb - 6 . 3 . 1 . gem > ` _ <nl> <nl> Java 8 + <nl> mmmmmm - <nl> <nl> - * ` fdb - java - 6 . 3 . 0 . jar < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / bindings / java / fdb - java - 6 . 3 . 0 . jar > ` _ <nl> - * ` fdb - java - 6 . 3 . 0 - javadoc . jar < https : / / www . foundationdb . org / downloads / 6 . 3 . 0 / bindings / java / fdb - java - 6 . 3 . 0 - javadoc . jar > ` _ <nl> + * ` fdb - java - 6 . 3 . 1 . jar < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / bindings / java / fdb - java - 6 . 3 . 1 . jar > ` _ <nl> + * ` fdb - java - 6 . 3 . 1 - javadoc . jar < https : / / www . foundationdb . org / downloads / 6 . 3 . 1 / bindings / java / fdb - java - 6 . 3 . 1 - javadoc . jar > ` _ <nl> <nl> Go 1 . 11 + <nl> mmmmmm - - <nl> mmm a / documentation / sphinx / source / old - release - notes / release - notes - 620 . rst <nl> ppp b / documentation / sphinx / source / old - release - notes / release - notes - 620 . rst <nl> <nl> Release Notes <nl> # # # # # # # # # # # # # <nl> <nl> + 6 . 2 . 22 <nl> + = = = = = = <nl> + <nl> + Fixes <nl> + mmm - - <nl> + <nl> + * Coordinator class processes could be recruited as the cluster controller . ` ( PR # 3282 ) < https : / / github . com / apple / foundationdb / pull / 3282 > ` _ <nl> + * HTTPS requests made by backup would fail ( introduced in 6 . 2 . 21 ) . ` ( PR # 3284 ) < https : / / github . com / apple / foundationdb / pull / 3284 > ` _ <nl> + <nl> 6 . 2 . 21 <nl> = = = = = = <nl> <nl> mmm a / documentation / sphinx / source / release - notes . rst <nl> ppp b / documentation / sphinx / source / release - notes . rst <nl> <nl> Release Notes <nl> # # # # # # # # # # # # # <nl> <nl> - 6 . 3 . 0 <nl> + 6 . 3 . 1 <nl> = = = = = <nl> <nl> Features <nl> mmm a / fdbclient / HTTP . actor . cpp <nl> ppp b / fdbclient / HTTP . actor . cpp <nl> namespace HTTP { <nl> send_start = timer ( ) ; <nl> <nl> loop { <nl> + wait ( conn - > onWritable ( ) ) ; <nl> + wait ( delay ( 0 , TaskPriority : : WriteSocket ) ) ; <nl> + <nl> / / If we already got a response , before finishing sending the request , then close the connection , <nl> / / set the Connection header to " close " as a hint to the caller that this connection can ' t be used <nl> / / again , and break out of the send loop . <nl> namespace HTTP { <nl> pContent - > sent ( len ) ; <nl> if ( pContent - > empty ( ) ) <nl> break ; <nl> - <nl> - if ( len = = 0 ) { <nl> - wait ( conn - > onWritable ( ) ) ; <nl> - wait ( delay ( 0 , TaskPriority : : WriteSocket ) ) ; <nl> - } <nl> } <nl> <nl> wait ( responseReading ) ; <nl> mmm a / fdbserver / worker . actor . cpp <nl> ppp b / fdbserver / worker . actor . cpp <nl> ACTOR Future < Void > fdbd ( <nl> Reference < AsyncVar < ServerDBInfo > > dbInfo ( new AsyncVar < ServerDBInfo > ( ServerDBInfo ( ) ) ) ; <nl> <nl> actors . push_back ( reportErrors ( monitorAndWriteCCPriorityInfo ( fitnessFilePath , asyncPriorityInfo ) , " MonitorAndWriteCCPriorityInfo " ) ) ; <nl> - if ( processClass = = ProcessClass : : TesterClass ) { <nl> + if ( processClass . machineClassFitness ( ProcessClass : : ClusterController ) = = ProcessClass : : NeverAssign ) { <nl> actors . push_back ( reportErrors ( monitorLeader ( connFile , cc ) , " ClusterController " ) ) ; <nl> } else if ( processClass = = ProcessClass : : StorageClass & & SERVER_KNOBS - > MAX_DELAY_STORAGE_CANDIDACY_SECONDS > 0 ) { <nl> actors . push_back ( reportErrors ( monitorLeaderRemotelyWithDelayedCandidacy ( connFile , cc , asyncPriorityInfo , recoveredDiskFiles . getFuture ( ) , localities , dbInfo ) , " ClusterController " ) ) ; <nl> mmm a / flow / Net2 . actor . cpp <nl> ppp b / flow / Net2 . actor . cpp <nl> class BindPromise { <nl> } <nl> } ; <nl> <nl> + struct SendBufferIterator { <nl> + typedef boost : : asio : : const_buffer value_type ; <nl> + typedef std : : forward_iterator_tag iterator_category ; <nl> + typedef size_t difference_type ; <nl> + typedef boost : : asio : : const_buffer * pointer ; <nl> + typedef boost : : asio : : const_buffer & reference ; <nl> + <nl> + SendBuffer const * p ; <nl> + int limit ; <nl> + <nl> + SendBufferIterator ( SendBuffer const * p = 0 , int limit = std : : numeric_limits < int > : : max ( ) ) : p ( p ) , limit ( limit ) { <nl> + ASSERT ( limit > 0 ) ; <nl> + } <nl> + <nl> + bool operator = = ( SendBufferIterator const & r ) const { return p = = r . p ; } <nl> + bool operator ! = ( SendBufferIterator const & r ) const { return p ! = r . p ; } <nl> + void operator + + ( ) { <nl> + limit - = p - > bytes_written - p - > bytes_sent ; <nl> + if ( limit > 0 ) <nl> + p = p - > next ; <nl> + else <nl> + p = NULL ; <nl> + } <nl> + <nl> + boost : : asio : : const_buffer operator * ( ) const { <nl> + return boost : : asio : : const_buffer ( p - > data + p - > bytes_sent , std : : min ( limit , p - > bytes_written - p - > bytes_sent ) ) ; <nl> + } <nl> + } ; <nl> + <nl> class Connection : public IConnection , ReferenceCounted < Connection > { <nl> public : <nl> virtual void addref ( ) { ReferenceCounted < Connection > : : addref ( ) ; } <nl> class Connection : public IConnection , ReferenceCounted < Connection > { <nl> tcp : : socket socket ; <nl> NetworkAddress peer_address ; <nl> <nl> - struct SendBufferIterator { <nl> - typedef boost : : asio : : const_buffer value_type ; <nl> - typedef std : : forward_iterator_tag iterator_category ; <nl> - typedef size_t difference_type ; <nl> - typedef boost : : asio : : const_buffer * pointer ; <nl> - typedef boost : : asio : : const_buffer & reference ; <nl> - <nl> - SendBuffer const * p ; <nl> - int limit ; <nl> - <nl> - SendBufferIterator ( SendBuffer const * p = 0 , int limit = std : : numeric_limits < int > : : max ( ) ) : p ( p ) , limit ( limit ) { <nl> - ASSERT ( limit > 0 ) ; <nl> - } <nl> - <nl> - bool operator = = ( SendBufferIterator const & r ) const { return p = = r . p ; } <nl> - bool operator ! = ( SendBufferIterator const & r ) const { return p ! = r . p ; } <nl> - void operator + + ( ) { <nl> - limit - = p - > bytes_written - p - > bytes_sent ; <nl> - if ( limit > 0 ) <nl> - p = p - > next ; <nl> - else <nl> - p = NULL ; <nl> - } <nl> - <nl> - boost : : asio : : const_buffer operator * ( ) const { <nl> - return boost : : asio : : const_buffer ( p - > data + p - > bytes_sent , std : : min ( limit , p - > bytes_written - p - > bytes_sent ) ) ; <nl> - } <nl> - } ; <nl> - <nl> void init ( ) { <nl> / / Socket settings that have to be set after connect or accept succeeds <nl> socket . non_blocking ( true ) ; <nl> class SSLConnection : public IConnection , ReferenceCounted < SSLConnection > { <nl> <nl> / / Writes as many bytes as possible from the given SendBuffer chain into the write buffer and returns the number of bytes written ( might be 0 ) <nl> virtual int write ( SendBuffer const * data , int limit ) { <nl> + # ifdef __APPLE__ <nl> + / / For some reason , writing ssl_sock with more than 2016 bytes when socket is writeable sometimes results in a broken pipe error . <nl> + limit = std : : min ( limit , 2016 ) ; <nl> + # endif <nl> boost : : system : : error_code err ; <nl> + + g_net2 - > countWrites ; <nl> <nl> class SSLConnection : public IConnection , ReferenceCounted < SSLConnection > { <nl> NetworkAddress peer_address ; <nl> Reference < ReferencedObject < boost : : asio : : ssl : : context > > sslContext ; <nl> <nl> - struct SendBufferIterator { <nl> - typedef boost : : asio : : const_buffer value_type ; <nl> - typedef std : : forward_iterator_tag iterator_category ; <nl> - typedef size_t difference_type ; <nl> - typedef boost : : asio : : const_buffer * pointer ; <nl> - typedef boost : : asio : : const_buffer & reference ; <nl> - <nl> - SendBuffer const * p ; <nl> - int limit ; <nl> - <nl> - SendBufferIterator ( SendBuffer const * p = 0 , int limit = std : : numeric_limits < int > : : max ( ) ) : p ( p ) , limit ( limit ) { <nl> - ASSERT ( limit > 0 ) ; <nl> - } <nl> - <nl> - bool operator = = ( SendBufferIterator const & r ) const { return p = = r . p ; } <nl> - bool operator ! = ( SendBufferIterator const & r ) const { return p ! = r . p ; } <nl> - void operator + + ( ) { <nl> - limit - = p - > bytes_written - p - > bytes_sent ; <nl> - if ( limit > 0 ) <nl> - p = p - > next ; <nl> - else <nl> - p = NULL ; <nl> - } <nl> - <nl> - boost : : asio : : const_buffer operator * ( ) const { <nl> - return boost : : asio : : const_buffer ( p - > data + p - > bytes_sent , std : : min ( limit , p - > bytes_written - p - > bytes_sent ) ) ; <nl> - } <nl> - } ; <nl> - <nl> void init ( ) { <nl> / / Socket settings that have to be set after connect or accept succeeds <nl> socket . non_blocking ( true ) ; <nl> | Merge pull request from etschannen / release - 6 . 3 | apple/foundationdb | 16719974e02926ba8fcfb3357574333857ebbeba | 2020-06-05T02:59:26Z |
mmm a / fdbserver / DataDistributionTracker . actor . cpp <nl> ppp b / fdbserver / DataDistributionTracker . actor . cpp <nl> struct DataDistributionTracker { <nl> / / be accessed <nl> bool const & trackerCancelled ; <nl> <nl> + / / This class extracts the trackerCancelled reference from a DataDistributionTracker object <nl> + / / Because some actors spawned by the dataDistributionTracker outlive the DataDistributionTracker <nl> + / / object , we must guard against memory errors by using a GetTracker functor to access <nl> + / / the DataDistributionTracker object . <nl> + class SafeAccessor { <nl> + bool const & trackerCancelled ; <nl> + DataDistributionTracker & tracker ; <nl> + <nl> + public : <nl> + SafeAccessor ( DataDistributionTracker * tracker ) <nl> + : trackerCancelled ( tracker - > trackerCancelled ) , tracker ( * tracker ) { <nl> + ASSERT ( ! trackerCancelled ) ; <nl> + } <nl> + <nl> + DataDistributionTracker * operator ( ) ( ) { <nl> + if ( trackerCancelled ) { <nl> + throw dd_tracker_cancelled ( ) ; <nl> + } <nl> + return & tracker ; <nl> + } <nl> + } ; <nl> + <nl> DataDistributionTracker ( Database cx , UID distributorId , Promise < Void > const & readyToStart , <nl> PromiseStream < RelocateShard > const & output , <nl> Reference < ShardsAffectedByTeamFailure > shardsAffectedByTeamFailure , <nl> int64_t getMaxShardSize ( double dbSizeEstimate ) { <nl> ( int64_t ) SERVER_KNOBS - > MAX_SHARD_BYTES ) ; <nl> } <nl> <nl> - / / This class extracts the trackerCancelled reference from a DataDistributionTracker object <nl> - / / Because some actors spawned by the dataDistributionTracker outlive the DataDistributionTracker <nl> - / / object , we must guard against memory errors by using a GetTracker functor to access <nl> - / / the DataDistributionTracker object . <nl> - / / <nl> - / / Ideally this would be implemented with a lambda instead , but the actor compiler does not do <nl> - / / type deduction . <nl> - class GetTracker { <nl> - bool const & trackerCancelled ; <nl> - DataDistributionTracker & tracker ; <nl> - <nl> - public : <nl> - GetTracker ( DataDistributionTracker * tracker ) : trackerCancelled ( tracker - > trackerCancelled ) , tracker ( * tracker ) { <nl> - ASSERT ( ! trackerCancelled ) ; <nl> - } <nl> - <nl> - DataDistributionTracker * operator ( ) ( ) { <nl> - if ( trackerCancelled ) { <nl> - throw dd_tracker_cancelled ( ) ; <nl> - } <nl> - return & tracker ; <nl> - } <nl> - } ; <nl> - <nl> - ACTOR Future < Void > trackShardBytes ( <nl> - DataDistributionTracker * self , <nl> - KeyRange keys , <nl> - Reference < AsyncVar < Optional < ShardMetrics > > > shardSize ) <nl> - { <nl> - state GetTracker getSelf ( self ) ; <nl> + ACTOR Future < Void > trackShardBytes ( DataDistributionTracker : : SafeAccessor self , KeyRange keys , <nl> + Reference < AsyncVar < Optional < ShardMetrics > > > shardSize ) { <nl> state BandwidthStatus bandwidthStatus = shardSize - > get ( ) . present ( ) ? getBandwidthStatus ( shardSize - > get ( ) . get ( ) . metrics ) : BandwidthStatusNormal ; <nl> state double lastLowBandwidthStartTime = shardSize - > get ( ) . present ( ) ? shardSize - > get ( ) . get ( ) . lastLowBandwidthStartTime : now ( ) ; <nl> state int shardCount = shardSize - > get ( ) . present ( ) ? shardSize - > get ( ) . get ( ) . shardCount : 1 ; <nl> ACTOR Future < Void > trackShardBytes ( <nl> bounds . permittedError . iosPerKSecond = bounds . permittedError . infinity ; <nl> <nl> loop { <nl> - Transaction tr ( getSelf ( ) - > cx ) ; <nl> + Transaction tr ( self ( ) - > cx ) ; <nl> / / metrics . second is the number of key - ranges ( i . e . , shards ) in the ' keys ' key - range <nl> std : : pair < Optional < StorageMetrics > , int > metrics = wait ( tr . waitStorageMetrics ( keys , bounds . min , bounds . max , bounds . permittedError , CLIENT_KNOBS - > STORAGE_METRICS_SHARD_LIMIT , shardCount ) ) ; <nl> if ( metrics . first . present ( ) ) { <nl> ACTOR Future < Void > trackShardBytes ( <nl> . detail ( " TrackerID " , trackerID ) ; * / <nl> <nl> if ( shardSize - > get ( ) . present ( ) ) { <nl> - getSelf ( ) - > dbSizeEstimate - > set ( getSelf ( ) - > dbSizeEstimate - > get ( ) + metrics . first . get ( ) . bytes - <nl> - shardSize - > get ( ) . get ( ) . metrics . bytes ) ; <nl> + self ( ) - > dbSizeEstimate - > set ( self ( ) - > dbSizeEstimate - > get ( ) + metrics . first . get ( ) . bytes - <nl> + shardSize - > get ( ) . get ( ) . metrics . bytes ) ; <nl> if ( keys . begin > = systemKeys . begin ) { <nl> - getSelf ( ) - > systemSizeEstimate + = <nl> + self ( ) - > systemSizeEstimate + = <nl> metrics . first . get ( ) . bytes - shardSize - > get ( ) . get ( ) . metrics . bytes ; <nl> } <nl> } <nl> ACTOR Future < Void > trackShardBytes ( <nl> } catch ( Error & e ) { <nl> if ( e . code ( ) ! = error_code_actor_cancelled & & e . code ( ) ! = error_code_broken_promise & & <nl> e . code ( ) ! = error_code_dd_tracker_cancelled ) { <nl> - getSelf ( ) - > output . sendError ( e ) ; / / Propagate failure to dataDistributionTracker <nl> + self ( ) - > output . sendError ( e ) ; / / Propagate failure to dataDistributionTracker <nl> } <nl> throw e ; <nl> } <nl> ACTOR Future < Void > shardEvaluator ( <nl> return Void ( ) ; <nl> } <nl> <nl> - ACTOR Future < Void > shardTracker ( DataDistributionTracker * self , KeyRange keys , <nl> + ACTOR Future < Void > shardTracker ( DataDistributionTracker : : SafeAccessor self , KeyRange keys , <nl> Reference < AsyncVar < Optional < ShardMetrics > > > shardSize ) { <nl> - state GetTracker getSelf ( self ) ; <nl> - wait ( yieldedFuture ( self - > readyToStart . getFuture ( ) ) ) ; <nl> + wait ( yieldedFuture ( self ( ) - > readyToStart . getFuture ( ) ) ) ; <nl> <nl> if ( ! shardSize - > get ( ) . present ( ) ) <nl> wait ( shardSize - > onChange ( ) ) ; <nl> <nl> - if ( ! getSelf ( ) - > maxShardSize - > get ( ) . present ( ) ) wait ( yieldedFuture ( getSelf ( ) - > maxShardSize - > onChange ( ) ) ) ; <nl> + if ( ! self ( ) - > maxShardSize - > get ( ) . present ( ) ) wait ( yieldedFuture ( self ( ) - > maxShardSize - > onChange ( ) ) ) ; <nl> <nl> / / Since maxShardSize will become present for all shards at once , avoid slow tasks with a short delay <nl> wait ( delay ( 0 , TaskPriority : : DataDistribution ) ) ; <nl> ACTOR Future < Void > shardTracker ( DataDistributionTracker * self , KeyRange keys , <nl> / / Survives multiple calls to shardEvaluator and keeps merges from happening too quickly . <nl> state Reference < HasBeenTrueFor > wantsToMerge ( new HasBeenTrueFor ( shardSize - > get ( ) ) ) ; <nl> <nl> - / * TraceEvent ( " ShardTracker " , getSelf ( ) - > distributorId ) <nl> + / * TraceEvent ( " ShardTracker " , self ( ) - > distributorId ) <nl> . detail ( " Begin " , keys . begin ) <nl> . detail ( " End " , keys . end ) <nl> . detail ( " TrackerID " , trackerID ) <nl> - . detail ( " MaxBytes " , getSelf ( ) - > maxShardSize - > get ( ) . get ( ) ) <nl> + . detail ( " MaxBytes " , self ( ) - > maxShardSize - > get ( ) . get ( ) ) <nl> . detail ( " ShardSize " , shardSize - > get ( ) . get ( ) . bytes ) <nl> . detail ( " BytesPerKSec " , shardSize - > get ( ) . get ( ) . bytesPerKSecond ) ; * / <nl> <nl> try { <nl> loop { <nl> / / Use the current known size to check for ( and start ) splits and merges . <nl> - wait ( shardEvaluator ( getSelf ( ) , keys , shardSize , wantsToMerge ) ) ; <nl> + wait ( shardEvaluator ( self ( ) , keys , shardSize , wantsToMerge ) ) ; <nl> <nl> / / We could have a lot of actors being released from the previous wait at the same time . Immediately calling <nl> / / delay ( 0 ) mitigates the resulting SlowTask <nl> ACTOR Future < Void > shardTracker ( DataDistributionTracker * self , KeyRange keys , <nl> / / If e is broken_promise then self may have already been deleted <nl> if ( e . code ( ) ! = error_code_actor_cancelled & & e . code ( ) ! = error_code_broken_promise & & <nl> e . code ( ) ! = error_code_dd_tracker_cancelled ) { <nl> - getSelf ( ) - > output . sendError ( e ) ; / / Propagate failure to dataDistributionTracker <nl> + self ( ) - > output . sendError ( e ) ; / / Propagate failure to dataDistributionTracker <nl> } <nl> throw e ; <nl> } <nl> void restartShardTrackers ( DataDistributionTracker * self , KeyRangeRef keys , Optio <nl> <nl> ShardTrackedData data ; <nl> data . stats = shardSize ; <nl> - data . trackShard = shardTracker ( self , ranges [ i ] , shardSize ) ; <nl> - data . trackBytes = trackShardBytes ( self , ranges [ i ] , shardSize ) ; <nl> + data . trackShard = shardTracker ( DataDistributionTracker : : SafeAccessor ( self ) , ranges [ i ] , shardSize ) ; <nl> + data . trackBytes = trackShardBytes ( DataDistributionTracker : : SafeAccessor ( self ) , ranges [ i ] , shardSize ) ; <nl> self - > shards . insert ( ranges [ i ] , data ) ; <nl> } <nl> } <nl> | Prevent shardTracker or trackShardBytes from accidentally unsafely accessing DataDistributionTracker | apple/foundationdb | 6235d087a6f28ce46d25c7e016ddd96a12acb379 | 2020-11-16T20:46:21Z |
mmm a / Examples / ReinforcementLearning / DeepQNeuralNetwork . py <nl> ppp b / Examples / ReinforcementLearning / DeepQNeuralNetwork . py <nl> def is_exploring ( self , step ) : <nl> <nl> <nl> class DeepQAgent ( object ) : <nl> - @ staticmethod <nl> - def huber_loss ( y , y_hat , delta ) : <nl> - " " " <nl> - Compute the Huber Loss as part of the model graph <nl> - <nl> - Huber Loss is more robust to outliers . It is defined as : <nl> - if | y - y_hat | < delta : <nl> - 0 . 5 * ( y - y_hat ) * * 2 <nl> - else : <nl> - delta * | y - y_hat | - 0 . 5 * delta * * 2 <nl> - <nl> - : param y : Target value <nl> - : param y_hat : Estimated value <nl> - : param delta : Outliers threshold <nl> - : return : float <nl> - " " " <nl> - half_delta_squared = 0 . 5 * delta * delta <nl> - error = y - y_hat <nl> - abs_error = abs ( error ) <nl> - <nl> - less_than = 0 . 5 * square ( error ) <nl> - more_than = ( delta * abs_error ) - half_delta_squared <nl> - loss_per_sample = element_select ( less ( abs_error , delta ) , less_than , more_than ) <nl> - <nl> - return reduce_sum ( loss_per_sample , name = ' loss ' ) <nl> - <nl> + " " " <nl> + Implementation of Deep Q Neural Network agent like in : <nl> + Nature 518 . " Human - level control through deep reinforcement learning " ( Mnih & al . 2015 ) <nl> + " " " <nl> def __init__ ( self , input_shape , nb_actions , <nl> gamma = 0 . 99 , explorer = LinearEpsilonAnnealingExplorer ( 1 , 0 . 1 , 1000000 ) , <nl> learning_rate = 0 . 00025 , momentum = 0 . 95 , minibatch_size = 32 , device_id = - 1 , <nl> def _plot_metrics ( self ) : <nl> <nl> self . _metrics_writer . write_value ( ' Sum rewards per ep . ' , sum ( self . _episode_rewards ) , self . _num_actions_taken ) <nl> <nl> + @ staticmethod <nl> + def huber_loss ( y , y_hat , delta ) : <nl> + " " " <nl> + Compute the Huber Loss as part of the model graph <nl> + <nl> + Huber Loss is more robust to outliers . It is defined as : <nl> + if | y - y_hat | < delta : <nl> + 0 . 5 * ( y - y_hat ) * * 2 <nl> + else : <nl> + delta * | y - y_hat | - 0 . 5 * delta * * 2 <nl> + <nl> + : param y : Target value <nl> + : param y_hat : Estimated value <nl> + : param delta : Outliers threshold <nl> + : return : float <nl> + " " " <nl> + half_delta_squared = 0 . 5 * delta * delta <nl> + error = y - y_hat <nl> + abs_error = abs ( error ) <nl> + <nl> + less_than = 0 . 5 * square ( error ) <nl> + more_than = ( delta * abs_error ) - half_delta_squared <nl> + loss_per_sample = element_select ( less ( abs_error , delta ) , less_than , more_than ) <nl> + <nl> + return reduce_sum ( loss_per_sample , name = ' loss ' ) <nl> + <nl> <nl> def as_ale_input ( environment ) : <nl> " " " <nl> | Addressing CR comments , added Nature reference | microsoft/CNTK | 464dad20bd0f2865693c9f13193a2c23d1c4a790 | 2017-05-04T15:55:11Z |
mmm a / tensorflow / compiler / xla / service / BUILD <nl> ppp b / tensorflow / compiler / xla / service / BUILD <nl> cc_library ( <nl> srcs = [ <nl> " dfs_hlo_visitor . cc " , <nl> " hlo_computation . cc " , <nl> + " hlo_input_output_alias_config . cc " , <nl> " hlo_instruction . cc " , <nl> " hlo_instructions . cc " , <nl> " hlo_module . cc " , <nl> cc_library ( <nl> " hlo_clone_context . h " , <nl> " hlo_computation . h " , <nl> " hlo_domain_metadata . h " , <nl> + " hlo_input_output_alias_config . h " , <nl> " hlo_instruction . h " , <nl> " hlo_instructions . h " , <nl> " hlo_module . h " , <nl> tf_cc_test ( <nl> ] , <nl> ) <nl> <nl> + tf_cc_test ( <nl> + name = " hlo_input_output_alias_config_test " , <nl> + srcs = [ " hlo_input_output_alias_config_test . cc " ] , <nl> + deps = [ <nl> + " : hlo " , <nl> + " : hlo_dce " , <nl> + " : hlo_memory_scheduler " , <nl> + " : hlo_ordering " , <nl> + " : hlo_parser " , <nl> + " / / tensorflow / compiler / xla : shape_util " , <nl> + " / / tensorflow / compiler / xla : types " , <nl> + " / / tensorflow / compiler / xla : xla_data_proto " , <nl> + " / / tensorflow / compiler / xla / tests : hlo_test_base " , <nl> + " / / tensorflow / compiler / xla / tests : xla_internal_test_main " , <nl> + " / / tensorflow / core : test " , <nl> + " @ com_google_absl / / absl / algorithm : container " , <nl> + ] , <nl> + ) <nl> + <nl> cc_library ( <nl> name = " hlo_memory_scheduler " , <nl> srcs = [ " hlo_memory_scheduler . cc " ] , <nl> mmm a / tensorflow / compiler / xla / service / buffer_assignment . cc <nl> ppp b / tensorflow / compiler / xla / service / buffer_assignment . cc <nl> BufferAllocation : : Slice BufferAllocation : : GetSlice ( <nl> <nl> void BufferAllocation : : AddAssignment ( const LogicalBuffer & buffer , int64 offset , <nl> int64 size ) { <nl> - VLOG ( 4 ) < < " Trying to add " < < buffer < < " to " < < this ; <nl> + VLOG ( 4 ) < < " Trying to add " < < buffer < < " to allocation # " < < index ( ) ; <nl> CHECK ( assigned_buffers_ . count ( & buffer ) = = 0 ) <nl> < < " LogicalBuffer " < < buffer < < " already assigned to allocation " <nl> < < index_ ; <nl> bool BufferAssigner : : MaybeAssignBuffer ( BufferAllocation * allocation , <nl> } <nl> } <nl> <nl> - if ( allow_input_output_aliasing_ & & allocation - > maybe_live_out ( ) ) { <nl> - const HloComputation * entry_computation = <nl> - assignment - > module_ - > entry_computation ( ) ; <nl> - for ( auto param : entry_computation - > parameter_instructions ( ) ) { <nl> - for ( auto & param_buffer : <nl> - assignment - > points_to_analysis ( ) . GetBuffersDefinedByInstruction ( <nl> - param ) ) { <nl> - if ( assignment - > liveness ( ) . MayInterfere ( * param_buffer , buffer ) ) { <nl> - VLOG ( 4 ) < < " Can ' t assign : Parameter interference with result " ; <nl> - return false ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> / / If the buffer is live out of the computation then it should only be <nl> / / assigned a buffer which exactly fits the result to avoid wasting memory <nl> / / ( result buffers can have arbitrary lifetimes ) . <nl> BufferAssigner : : MergeColocatedBufferSets ( <nl> <nl> / / Builds sets of buffers in ' colocated_buffer_sets ' which should be colocated <nl> / / in the same allocation ( currently just supports kWhile , kCall , and <nl> - / / kConditional ) . <nl> + / / kConditional and input output aliasing ) . <nl> void BufferAssigner : : BuildColocatedBufferSets ( <nl> const HloModule * module , const BufferLiveness & buffer_liveness , <nl> const LogicalBuffer : : SizeFunction & buffer_size , <nl> std : : vector < ColocatedBufferSet > * colocated_buffer_sets ) { <nl> const TuplePointsToAnalysis & points_to_analysis = <nl> buffer_liveness . points_to_analysis ( ) ; <nl> + <nl> + / / Set up colocated buffer set for input and output . <nl> + module - > input_output_alias_config ( ) . ForEachAlias ( <nl> + [ & ] ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) { <nl> + std : : vector < const LogicalBuffer * > colocated_set ; <nl> + AddBufferToColocatedSet ( module - > entry_computation ( ) - > root_instruction ( ) , <nl> + output_index , points_to_analysis , <nl> + & colocated_set ) ; <nl> + AddBufferToColocatedSet ( <nl> + module - > entry_computation ( ) - > parameter_instruction ( param_number ) , <nl> + param_index , points_to_analysis , & colocated_set ) ; <nl> + AddSetToColocatedBufferSets ( colocated_set , colocated_buffer_sets ) ; <nl> + } ) ; <nl> + <nl> for ( const HloComputation * computation : module - > MakeComputationPostOrder ( ) ) { <nl> if ( computation - > IsFusionComputation ( ) ) { <nl> continue ; <nl> mmm a / tensorflow / compiler / xla / service / buffer_value . h <nl> ppp b / tensorflow / compiler / xla / service / buffer_value . h <nl> class BufferValue { <nl> / / operator < is required for std : : set . <nl> bool operator < ( const BufferValue & other ) const { return id_ < other . id_ ; } <nl> <nl> + bool operator = = ( const BufferValue & other ) const { return id_ = = other . id_ ; } <nl> + bool operator ! = ( const BufferValue & other ) const { return id_ ! = other . id_ ; } <nl> + <nl> virtual string ToString ( ) const = 0 ; <nl> <nl> / / TODO ( lauj ) rename LogicalBufferProto to BufferValueProto . <nl> mmm a / tensorflow / compiler / xla / service / copy_insertion . cc <nl> ppp b / tensorflow / compiler / xla / service / copy_insertion . cc <nl> namespace { <nl> <nl> using absl : : StrAppend ; <nl> <nl> - bool IsEntryParameterValue ( const HloValue & value ) { <nl> + bool IsReadonlyEntryParameterValue ( const HloValue & value ) { <nl> const HloComputation * computation = value . defining_instruction ( ) - > parent ( ) ; <nl> return value . defining_instruction ( ) - > opcode ( ) = = HloOpcode : : kParameter & & <nl> - computation = = computation - > parent ( ) - > entry_computation ( ) ; <nl> + computation = = computation - > parent ( ) - > entry_computation ( ) & & <nl> + ! computation - > parent ( ) - > input_output_alias_config ( ) . ParameterHasAlias ( <nl> + value . defining_instruction ( ) - > parameter_number ( ) , value . index ( ) ) ; <nl> } <nl> <nl> bool IsConstantValue ( const HloValue & value ) { <nl> bool IsConstantValue ( const HloValue & value ) { <nl> } <nl> <nl> bool ValueIsReadOnly ( const HloValue & value ) { <nl> - return IsConstantValue ( value ) | | IsEntryParameterValue ( value ) ; <nl> + return IsConstantValue ( value ) | | IsReadonlyEntryParameterValue ( value ) ; <nl> } <nl> <nl> / / Data structure describing the action which should be taken on parts of a <nl> SpecialCaseCopyPolicy GetSpecialCaseCopyPolicy ( const CallGraphNode & node , <nl> bool ShouldCopyRootValue ( const HloValue & value , <nl> const SpecialCaseCopyPolicy & policy ) { <nl> if ( policy . copy_parameters_and_constants ) { <nl> - return IsConstantValue ( value ) | | <nl> - value . defining_instruction ( ) - > opcode ( ) = = HloOpcode : : kParameter ; <nl> + return ValueIsReadOnly ( value ) ; <nl> } <nl> return false ; <nl> } <nl> Status AddCopiesForConditional ( const HloAliasAnalysis & alias_analysis , <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> + / / Conservatively adds copies before root instruction of entry computation and <nl> + / / each aliased parameter to resolve interference of aliased input and output <nl> + / / buffer . We later rely on the CopyRemover to drop the unnecessary ones . <nl> + Status AddCopiesForAliasedInputOutputs ( HloModule * module ) { <nl> + HloComputation * entry = module - > entry_computation ( ) ; <nl> + HloInstruction * root = entry - > root_instruction ( ) ; <nl> + <nl> + ShapeTree < bool > output_indices_to_copy ( root - > shape ( ) ) ; <nl> + std : : vector < ShapeTree < HloInstruction * > > copied_parameters ; <nl> + bool has_alias = false ; <nl> + for ( auto * param : entry - > parameter_instructions ( ) ) { <nl> + bool param_has_alias = false ; <nl> + ShapeTree < bool > param_indices_to_copy ( param - > shape ( ) ) ; <nl> + <nl> + module - > input_output_alias_config ( ) . ForEachAlias ( <nl> + [ & ] ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) { <nl> + if ( param_number = = param - > parameter_number ( ) ) { <nl> + param_has_alias = true ; <nl> + * ( param_indices_to_copy . mutable_element ( param_index ) ) = true ; <nl> + * ( output_indices_to_copy . mutable_element ( output_index ) ) = true ; <nl> + } <nl> + } ) ; <nl> + <nl> + if ( ! param_has_alias ) { <nl> + continue ; <nl> + } <nl> + <nl> + has_alias = true ; <nl> + / / Store a snapshot of users before DeepCopyInstruction , as <nl> + / / DeepCopyInstruction introduces new users of the instruction . <nl> + std : : vector < HloInstruction * > users = param - > users ( ) ; <nl> + ShapeTree < HloInstruction * > param_copy_tree ( param - > shape ( ) , <nl> + / * init_value = * / nullptr ) ; <nl> + TF_ASSIGN_OR_RETURN ( HloInstruction * copied , <nl> + entry - > DeepCopyInstruction ( <nl> + param , & param_indices_to_copy , & param_copy_tree ) ) ; <nl> + for ( HloInstruction * user : users ) { <nl> + TF_RETURN_IF_ERROR ( param - > ReplaceUseWith ( user , copied ) ) ; <nl> + } <nl> + <nl> + copied_parameters . push_back ( param_copy_tree ) ; <nl> + } <nl> + <nl> + if ( ! has_alias ) { <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + / / Add copies before root instruction . <nl> + ShapeTree < HloInstruction * > output_copy_tree ( root - > shape ( ) , <nl> + / * init_value = * / nullptr ) ; <nl> + <nl> + TF_ASSIGN_OR_RETURN ( HloInstruction * root_copied , <nl> + root - > parent ( ) - > DeepCopyInstruction ( <nl> + root , & output_indices_to_copy , & output_copy_tree ) ) ; <nl> + <nl> + / / Add control dependencies between the input / output copies . <nl> + TF_RETURN_IF_ERROR ( module - > input_output_alias_config ( ) . ForEachAliasWithStatus ( <nl> + [ & ] ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & input_index ) - > Status { <nl> + HloInstruction * from = <nl> + copied_parameters [ param_number ] . element ( input_index ) ; <nl> + HloInstruction * to = output_copy_tree . element ( output_index ) ; <nl> + <nl> + TF_RET_CHECK ( from ! = nullptr ) ; <nl> + TF_RET_CHECK ( to ! = nullptr ) ; <nl> + TF_RETURN_IF_ERROR ( from - > AddControlDependencyTo ( to ) ) ; <nl> + return Status : : OK ( ) ; <nl> + } ) ) ; <nl> + <nl> + entry - > set_root_instruction ( root_copied ) ; <nl> + <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> / / Removes any control dependencies to or from the given instruction . <nl> Status StripControlDependenciesFrom ( HloInstruction * instruction ) { <nl> while ( ! instruction - > control_successors ( ) . empty ( ) ) { <nl> Status CopyInsertion : : AddCopiesToResolveInterference ( HloModule * module ) { <nl> } <nl> } <nl> } <nl> + <nl> + TF_RETURN_IF_ERROR ( AddCopiesForAliasedInputOutputs ( module ) ) ; <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> mmm a / tensorflow / compiler / xla / service / copy_insertion_test . cc <nl> ppp b / tensorflow / compiler / xla / service / copy_insertion_test . cc <nl> TEST_F ( CopyInsertionTest , SwizzlingWhile ) { <nl> EXPECT_THAT ( xla_while - > operand ( 0 ) , op : : Tuple ( op : : Copy ( ) , op : : Copy ( ) ) ) ; <nl> } <nl> <nl> + TEST_F ( CopyInsertionTest , CrossingParameters ) { <nl> + / / Test a case where two parameters ' dataflow cross with each other while <nl> + / / input and output are aliased with same index : <nl> + / / <nl> + / / ( p0 , p1 ) <nl> + / / | \ / | <nl> + / / | \ / | <nl> + / / alias X alias <nl> + / / | / \ | <nl> + / / | / \ | <nl> + / / ( p1 , p0 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " 0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { gte1 , gte0 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 4 ) ; <nl> + } <nl> + <nl> + TEST_F ( CopyInsertionTest , ParametersAliasing ) { <nl> + / / Test a case where two parameters ' dataflow don ' t interfere with each other <nl> + / / while aliased . <nl> + / / <nl> + / / ( p0 , p1 ) <nl> + / / | | <nl> + / / | | <nl> + / / alias alias <nl> + / / | | <nl> + / / | | <nl> + / / ( p0 , p1 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { gte0 , gte1 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 0 ) ; <nl> + } <nl> + <nl> + TEST_F ( CopyInsertionTest , ParameterWithNoAliasing ) { <nl> + / / Test a case where no parameter is aliased with result . In this case , copy <nl> + / / should be added <nl> + / / <nl> + / / ( p0 , p1 ) <nl> + / / | | <nl> + / / | | <nl> + / / | | <nl> + / / | | <nl> + / / | | <nl> + / / ( p0 , p1 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { gte0 , gte1 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_THAT ( module - > entry_computation ( ) - > root_instruction ( ) , <nl> + op : : Tuple ( op : : Copy ( op : : GetTupleElement ( param , 0 ) ) , <nl> + op : : Copy ( op : : GetTupleElement ( param , 1 ) ) ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 2 ) ; <nl> + } <nl> + <nl> + TEST_F ( CopyInsertionTest , ParameterWithPartialAliasing ) { <nl> + / / Test a case where one parameter is aliased with result while another one <nl> + / / isn ' t . <nl> + / / <nl> + / / ( p0 , p1 ) <nl> + / / | | <nl> + / / | | <nl> + / / alias | <nl> + / / | | <nl> + / / | | <nl> + / / ( p0 , p1 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { gte0 , gte1 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_THAT ( module - > entry_computation ( ) - > root_instruction ( ) , <nl> + op : : Tuple ( op : : GetTupleElement ( param , 0 ) , <nl> + op : : Copy ( op : : GetTupleElement ( param , 1 ) ) ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 1 ) ; <nl> + } <nl> + <nl> + TEST_F ( CopyInsertionTest , ParameterAndParallelOpsWithPartialAliasing ) { <nl> + / / Test a case where one parameter is aliased with result while another one <nl> + / / isn ' t . <nl> + / / <nl> + / / + - - ( p0 , p1 ) <nl> + / / | | | <nl> + / / | | | <nl> + / / alias Negate Negate <nl> + / / | | | <nl> + / / | | | <nl> + / / + - - ( p0 , p1 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + <nl> + auto negate0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte0 ) ) ; <nl> + <nl> + auto negate1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { negate0 , negate1 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 0 ) ; <nl> + } <nl> + <nl> + TEST_F ( CopyInsertionTest , ParameterAndOpsWithPartialAliasing ) { <nl> + / / Test a case where one parameter is aliased with result while another one <nl> + / / isn ' t . <nl> + / / <nl> + / / + - - ( p0 , p1 ) <nl> + / / | | | <nl> + / / | | | <nl> + / / alias Negate Negate <nl> + / / | | | <nl> + / / | Addmmm - + <nl> + / / | | | <nl> + / / + - - ( p0 , p1 ) <nl> + auto module = CreateNewModule ( ) ; <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + <nl> + auto negate0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte0 ) ) ; <nl> + <nl> + auto negate1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte1 ) ) ; <nl> + <nl> + auto add = builder . AddInstruction ( HloInstruction : : CreateBinary ( <nl> + scalar_shape_ , HloOpcode : : kAdd , negate0 , negate1 ) ) ; <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { add , negate1 } ) ) ; <nl> + module - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + ASSERT_IS_OK ( module - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + InsertCopies ( module . get ( ) ) ; <nl> + <nl> + EXPECT_EQ ( CountCopies ( * module ) , 0 ) ; <nl> + } <nl> + <nl> TEST_F ( CopyInsertionTest , SwizzlingWhileWithOneOp ) { <nl> / / Test a while instruction with a body which permutes its tuple parameter <nl> / / elements and applies one operation to one of the elements . The addition of <nl> mmm a / tensorflow / compiler / xla / service / hlo . proto <nl> ppp b / tensorflow / compiler / xla / service / hlo . proto <nl> message HloScheduleProto { <nl> map < int64 , InstructionSequence > sequences = 1 ; <nl> } <nl> <nl> + message HloInputOutputAliasProto { <nl> + / / The following proto describes a pair of aliased an input <nl> + / / ( described by parameter number and a ShapeIndex of the parameter ) <nl> + / / and an output ( described by a ShapeIndex of the root <nl> + / / instruction ) . For example : <nl> + / / <nl> + / / entry = { <nl> + / / output_shape_index = { 1 } , <nl> + / / parameter_number = 0 , <nl> + / / parameter_shape_index = { 1 , 2 } , <nl> + / / } <nl> + / / <nl> + / / This entry indicates that the first paremter ' s { 1 , 2 } element is <nl> + / / aliased with the { 1 } element of the root instruction . <nl> + message AliasEntryProto { <nl> + / / ShapeIndex of the root hlo . <nl> + repeated int64 output_shape_index = 1 ; <nl> + / / Number of the parameter in entry computation . <nl> + int64 parameter_number = 2 ; <nl> + / / ShapeIndex of the parameter instruction . <nl> + repeated int64 parameter_shape_index = 3 ; <nl> + } <nl> + <nl> + repeated AliasEntryProto entries = 1 ; <nl> + } <nl> + <nl> / / Serialization of HloModule . <nl> message HloModuleProto { <nl> string name = 1 ; <nl> message HloModuleProto { <nl> <nl> / / The schedule for this module . <nl> HloScheduleProto schedule = 7 ; <nl> + <nl> + / / Describes alias information between inputs and outputs . <nl> + HloInputOutputAliasProto input_output_alias = 8 ; <nl> } <nl> <nl> / / Serialization of LogicalBuffer . <nl> mmm a / tensorflow / compiler / xla / service / hlo_alias_analysis . cc <nl> ppp b / tensorflow / compiler / xla / service / hlo_alias_analysis . cc <nl> class BufferValueMap { <nl> / / construction process . <nl> using BufferNumber = int64 ; <nl> <nl> - explicit BufferValueMap ( const HloDataflowAnalysis & dataflow ) <nl> - : dataflow_ ( dataflow ) { <nl> + explicit BufferValueMap ( HloModule * module , <nl> + const HloDataflowAnalysis & dataflow ) <nl> + : module_ ( module ) , dataflow_ ( dataflow ) { <nl> buffers_ . reserve ( dataflow_ . values ( ) . size ( ) ) ; <nl> value_to_buffer_number_ . reserve ( dataflow_ . values ( ) . size ( ) ) ; <nl> for ( const HloValue * value : dataflow_ . values ( ) ) { <nl> class BufferValueMap { <nl> return value_to_buffer_number_ . at ( & value ) ; <nl> } <nl> <nl> + void ComputeInputOutputAliasedBuffers ( <nl> + const HloValue & value , std : : vector < BufferNumber > * aliased_buffers ) { <nl> + / / Get parameter value from an aliased_input object . <nl> + const auto get_parameter_value = <nl> + [ this ] ( const std : : pair < int64 , ShapeIndex > & aliased_input ) <nl> + - > const HloValue & { <nl> + int64 param_number = aliased_input . first ; <nl> + const ShapeIndex & param_index = aliased_input . second ; <nl> + return dataflow_ . GetUniqueValueAt ( <nl> + module_ - > entry_computation ( ) - > parameter_instruction ( param_number ) , <nl> + param_index ) ; <nl> + } ; <nl> + <nl> + / / If the value shows up in a root instruction , alias it with parameter <nl> + / / intruction . <nl> + for ( const HloPosition & pos : value . positions ( ) ) { <nl> + if ( pos . instruction = = module_ - > entry_computation ( ) - > root_instruction ( ) ) { <nl> + ShapeIndex output_index = pos . index ; <nl> + <nl> + auto aliased_input = <nl> + module_ - > input_output_alias_config ( ) . GetAliasedParameter ( <nl> + output_index ) ; <nl> + if ( aliased_input ) { <nl> + aliased_buffers - > push_back ( <nl> + GetBufferForValue ( get_parameter_value ( * aliased_input ) ) ) ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + / / If the value is parameter instruction itself , alias it with itself . <nl> + if ( value . instruction ( ) - > opcode ( ) = = HloOpcode : : kParameter & & <nl> + value . instruction ( ) - > parent ( ) = = module_ - > entry_computation ( ) ) { <nl> + aliased_buffers - > push_back ( GetBufferForValue ( value ) ) ; <nl> + } <nl> + } <nl> + <nl> void ComputeWhileAliasedBuffers ( const HloValue & value , <nl> std : : vector < BufferNumber > * aliased_buffers ) { <nl> VLOG ( 3 ) < < " Compute kWhile aliases " ; <nl> class BufferValueMap { <nl> VLOG ( 2 ) < < " Use of value " < < value . ToShortString ( ) < < " : " < < use ; <nl> } <nl> std : : vector < BufferNumber > aliased_buffers ; <nl> + ComputeInputOutputAliasedBuffers ( value , & aliased_buffers ) ; <nl> ComputeWhileAliasedBuffers ( value , & aliased_buffers ) ; <nl> ComputeConditionalAliasedBuffers ( value , & aliased_buffers ) ; <nl> / / Uniquify aliased buffers . <nl> class BufferValueMap { <nl> return aliased_buffers ; <nl> } <nl> <nl> + HloModule * module_ ; <nl> + <nl> / / Dataflow analysis used to construct the buffer map . <nl> const HloDataflowAnalysis & dataflow_ ; <nl> <nl> StatusOr < std : : unique_ptr < HloAliasAnalysis > > HloAliasAnalysis : : Run ( <nl> / * bitcast_defines_value = * / false , <nl> fusion_can_share_buffer ) ) ; <nl> <nl> - BufferValueMap buffer_map ( alias_analysis - > dataflow_analysis ( ) ) ; <nl> + BufferValueMap buffer_map ( module , alias_analysis - > dataflow_analysis ( ) ) ; <nl> buffer_map . MergeAliasedBuffers ( ) ; <nl> <nl> / / Create a vector of HloBuffers , one for each set of values in the <nl> mmm a / tensorflow / compiler / xla / service / hlo_alias_analysis_test . cc <nl> ppp b / tensorflow / compiler / xla / service / hlo_alias_analysis_test . cc <nl> TEST_F ( HloAliasAnalysisTest , NondistinctTuple ) { <nl> EXPECT_FALSE ( AnyValuesInSameBufferInterfere ( ) ) ; <nl> } <nl> <nl> + TEST_F ( HloAliasAnalysisTest , ParametersWithAliasing ) { <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + <nl> + auto negate0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte0 ) ) ; <nl> + auto negate1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateUnary ( scalar_shape_ , HloOpcode : : kNegate , gte1 ) ) ; <nl> + <nl> + auto tuple = <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { negate0 , negate1 } ) ) ; <nl> + module_ - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + <nl> + / / Cannot alias an output twice . <nl> + ASSERT_IS_NOT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + <nl> + const HloAliasAnalysis & analysis = RunAnalysis ( ) ; <nl> + <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte0 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 0 } ) ) ; <nl> + <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte1 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 1 } ) ) ; <nl> + } <nl> + <nl> + TEST_F ( HloAliasAnalysisTest , ParametersWithCrossAliasing ) { <nl> + / / parameter 0 aliased with output 1 and parameter 1 aliased with output 0 . <nl> + / / <nl> + / / ( p0 , p1 ) <nl> + / / \ / <nl> + / / \ / <nl> + / / alias X <nl> + / / / \ <nl> + / / / \ <nl> + / / ( p0 , p1 ) <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + auto gte0 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 0 ) ) ; <nl> + auto gte1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , param , 1 ) ) ; <nl> + auto tuple = <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { gte0 , gte1 } ) ) ; <nl> + module_ - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + <nl> + / / Cannot alias an output twice . <nl> + ASSERT_IS_NOT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + <nl> + const HloAliasAnalysis & analysis = RunAnalysis ( ) ; <nl> + <nl> + / / Every Ops in this graph are aliased with each other . <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte0 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 0 } ) ) ; <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte0 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 1 } ) ) ; <nl> + <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte1 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 0 } ) ) ; <nl> + EXPECT_EQ ( analysis . GetUniqueBufferAt ( gte1 ) , <nl> + analysis . GetUniqueBufferAt ( tuple , / * index = * / { 1 } ) ) ; <nl> + } <nl> + <nl> + TEST_F ( HloAliasAnalysisTest , InputOutputAliasingWithWhile ) { <nl> + / / Test a simple single while instruction can be aliased with input and output <nl> + / / of the computation . <nl> + / / <nl> + / / body ( ( F32 [ ] , F32 [ ] ) % tuple_param ) : <nl> + / / % add = Add ( % tuple_param { 0 } , % tuple_param { 1 } ) <nl> + / / return Tuple ( % tuple_param { 0 } , % add ) <nl> + / / <nl> + / / condition ( ( F32 [ ] , F32 [ ] ) % tuple_param ) : <nl> + / / return Constant ( false ) <nl> + / / <nl> + / / entry : <nl> + / / % param1 = param1 <nl> + / / % while = While ( % param1 , body , condition ) <nl> + / / % while_1 = GTE ( % while , 0 ) <nl> + / / % while_2 = GTE ( % while , 1 ) <nl> + / / % negate_1 = Negate ( % while_1 ) <nl> + / / % negate_2 = Negate ( % while_2 ) <nl> + / / return Tuple ( negate_1 , negate_2 ) <nl> + / / <nl> + const Shape tuple_shape = <nl> + ShapeUtil : : MakeTupleShape ( { scalar_shape_ , scalar_shape_ } ) ; <nl> + <nl> + / / Element 0 passes transparently through the body . <nl> + auto body_builder = HloComputation : : Builder ( " body " ) ; <nl> + auto body_param = body_builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " param " ) ) ; <nl> + auto body_element_0 = body_builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , body_param , 0 ) ) ; <nl> + auto body_element_1 = body_builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , body_param , 1 ) ) ; <nl> + auto add = body_builder . AddInstruction ( HloInstruction : : CreateBinary ( <nl> + scalar_shape_ , HloOpcode : : kAdd , body_element_0 , body_element_1 ) ) ; <nl> + auto body_tuple = body_builder . AddInstruction ( <nl> + HloInstruction : : CreateTuple ( { body_element_0 , add } ) ) ; <nl> + HloComputation * body = module_ - > AddEmbeddedComputation ( body_builder . Build ( ) ) ; <nl> + <nl> + / / Condition computation trivially returns a constant " false " . <nl> + auto cond_builder = HloComputation : : Builder ( " condition " ) ; <nl> + auto cond_param = cond_builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " param " ) ) ; <nl> + cond_builder . AddInstruction ( <nl> + HloInstruction : : CreateConstant ( LiteralUtil : : CreateR0 < bool > ( false ) ) ) ; <nl> + HloComputation * condition = <nl> + module_ - > AddEmbeddedComputation ( cond_builder . Build ( ) ) ; <nl> + <nl> + auto builder = HloComputation : : Builder ( TestName ( ) ) ; <nl> + auto param = builder . AddInstruction ( <nl> + HloInstruction : : CreateParameter ( 0 , tuple_shape , " p0 " ) ) ; <nl> + <nl> + auto xla_while = builder . AddInstruction ( <nl> + HloInstruction : : CreateWhile ( tuple_shape , condition , body , param ) ) ; <nl> + auto while_element_1 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , xla_while , 0 ) ) ; <nl> + auto while_element_2 = builder . AddInstruction ( <nl> + HloInstruction : : CreateGetTupleElement ( scalar_shape_ , xla_while , 1 ) ) ; <nl> + auto negate_1 = builder . AddInstruction ( HloInstruction : : CreateUnary ( <nl> + scalar_shape_ , HloOpcode : : kNegate , while_element_1 ) ) ; <nl> + auto negate_2 = builder . AddInstruction ( HloInstruction : : CreateUnary ( <nl> + scalar_shape_ , HloOpcode : : kNegate , while_element_2 ) ) ; <nl> + auto tuple = <nl> + builder . AddInstruction ( HloInstruction : : CreateTuple ( { negate_1 , negate_2 } ) ) ; <nl> + module_ - > AddEntryComputation ( builder . Build ( ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 0 } , / * param_number = * / 0 , / * param_index = * / { 0 } ) ) ; <nl> + TF_ASSERT_OK ( module_ - > input_output_alias_config ( ) . SetUpAlias ( <nl> + / * output_index = * / { 1 } , / * param_number = * / 0 , / * param_index = * / { 1 } ) ) ; <nl> + <nl> + const HloAliasAnalysis & analysis = RunAnalysis ( ) ; <nl> + <nl> + EXPECT_THAT ( <nl> + GetValuesInBuffer ( analysis . GetUniqueBufferAt ( xla_while , / * index = * / { 1 } ) ) , <nl> + UnorderedElementsAre ( GetValueDefinedAt ( param , { 1 } ) , <nl> + GetValueDefinedAt ( xla_while , / * index = * / { 1 } ) , <nl> + GetValueDefinedAt ( body_param , { 1 } ) , <nl> + GetValueDefinedAt ( cond_param , { 1 } ) , <nl> + GetValueDefinedAt ( add ) , <nl> + GetValueDefinedAt ( negate_2 ) ) ) ; <nl> + <nl> + EXPECT_THAT ( <nl> + analysis . GetUniqueBufferAt ( xla_while , / * index = * / { 1 } ) . ComputePositions ( ) , <nl> + UnorderedElementsAre ( <nl> + HloPosition { param , { 1 } } , HloPosition { xla_while , { 1 } } , <nl> + HloPosition { while_element_2 , { } } , HloPosition { body_param , { 1 } } , <nl> + HloPosition { body_element_1 , { } } , HloPosition { add , { } } , <nl> + HloPosition { body_tuple , { 1 } } , HloPosition { tuple , { 1 } } , <nl> + HloPosition { cond_param , { 1 } } , HloPosition { negate_2 , { } } ) ) ; <nl> + <nl> + EXPECT_FALSE ( AnyValuesInSameBufferInterfere ( ) ) ; <nl> + } <nl> + <nl> TEST_F ( HloAliasAnalysisTest , SingleCall ) { <nl> / / Test a single call of a subcomputation . The subcomputation adds its two <nl> / / array - shaped parameters . <nl> mmm a / tensorflow / compiler / xla / service / hlo_dataflow_analysis . cc <nl> ppp b / tensorflow / compiler / xla / service / hlo_dataflow_analysis . cc <nl> bool HloDataflowAnalysis : : ValueIsDefinedAt ( const HloInstruction * instruction , <nl> <nl> const HloValue & HloDataflowAnalysis : : GetValueDefinedAt ( <nl> const HloInstruction * instruction , const ShapeIndex & index ) const { <nl> - CHECK ( ValueIsDefinedAt ( instruction , index ) ) ; <nl> + CHECK ( ValueIsDefinedAt ( instruction , index ) ) < < instruction - > ToString ( ) ; <nl> return GetUniqueValueAt ( instruction , index ) ; <nl> } <nl> <nl> new file mode 100644 <nl> index 0000000000000 . . 8128fad07ca0b <nl> mmm / dev / null <nl> ppp b / tensorflow / compiler / xla / service / hlo_input_output_alias_config . cc <nl> <nl> + / * Copyright 2018 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + <nl> + # include " tensorflow / compiler / xla / service / hlo_input_output_alias_config . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_module . h " <nl> + <nl> + namespace xla { <nl> + Status HloInputOutputAliasConfig : : SetUpAlias ( const ShapeIndex & output_index , <nl> + int64 param_number , <nl> + const ShapeIndex & param_index ) { <nl> + TF_RET_CHECK ( ShapeUtil : : IndexIsValid ( alias_ . shape ( ) , output_index ) ) <nl> + < < absl : : StrCat ( " Tring to set up alias at " , output_index . ToString ( ) , <nl> + " which is an invalid index for shape " , <nl> + ShapeUtil : : HumanString ( alias_ . shape ( ) ) ) ; <nl> + / / Output can ' t be aliased with multiple parameters . <nl> + TF_RET_CHECK ( ! alias_ . element ( output_index ) ) < < absl : : StrFormat ( <nl> + " Trying to set up output alias for param % lld at % s but failed : output " <nl> + " index % s is already aliased with param % lld at % s " , <nl> + param_number , param_index . ToString ( ) , output_index . ToString ( ) , <nl> + alias_ . element ( output_index ) - > first , <nl> + alias_ . element ( output_index ) - > second . ToString ( ) ) ; <nl> + ( * alias_ . mutable_element ( output_index ) ) = <nl> + std : : make_pair ( param_number , param_index ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + HloInputOutputAliasProto HloInputOutputAliasConfig : : ToProto ( ) const { <nl> + HloInputOutputAliasProto result ; <nl> + alias_ . ForEachElement ( <nl> + [ & ] ( const ShapeIndex & index , <nl> + const absl : : optional < std : : pair < int64 , ShapeIndex > > & data ) { <nl> + if ( data ) { <nl> + HloInputOutputAliasProto : : AliasEntryProto entry ; <nl> + for ( int64 i : index ) { <nl> + entry . add_output_shape_index ( i ) ; <nl> + } <nl> + entry . set_parameter_number ( data - > first ) ; <nl> + for ( int64 i : data - > second ) { <nl> + entry . add_parameter_shape_index ( i ) ; <nl> + } <nl> + result . add_entries ( ) - > Swap ( & entry ) ; <nl> + } <nl> + } ) ; <nl> + return result ; <nl> + } <nl> + <nl> + StatusOr < HloInputOutputAliasConfig > HloInputOutputAliasConfig : : CreateFromProto ( <nl> + const Shape & output_shape , const HloInputOutputAliasProto & proto ) { <nl> + HloInputOutputAliasConfig result ( output_shape ) ; <nl> + for ( const HloInputOutputAliasProto : : AliasEntryProto & entry : <nl> + proto . entries ( ) ) { <nl> + ShapeIndex output_index ( entry . output_shape_index ( ) . begin ( ) , <nl> + entry . output_shape_index ( ) . end ( ) ) ; <nl> + <nl> + int64 param_number = entry . parameter_number ( ) ; <nl> + ShapeIndex param_index ( entry . parameter_shape_index ( ) . begin ( ) , <nl> + entry . parameter_shape_index ( ) . end ( ) ) ; <nl> + TF_RETURN_IF_ERROR ( <nl> + result . SetUpAlias ( output_index , param_number , param_index ) ) ; <nl> + } <nl> + <nl> + return result ; <nl> + } <nl> + <nl> + string HloInputOutputAliasConfig : : ToString ( ) const { <nl> + std : : vector < string > pieces ; <nl> + pieces . push_back ( " HloInputOutputAliasConfig " ) ; <nl> + <nl> + ForEachAlias ( [ & ] ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) { <nl> + pieces . push_back ( absl : : StrFormat ( <nl> + " OutputIndex % s is aliased with parameter % lld at % s : " , <nl> + output_index . ToString ( ) , param_number , param_index . ToString ( ) ) ) ; <nl> + } ) ; <nl> + <nl> + return absl : : StrJoin ( pieces , " \ n " ) ; <nl> + } <nl> + <nl> + bool HloInputOutputAliasConfig : : ParameterHasAlias ( <nl> + int64 param_number , const ShapeIndex & param_index ) const { <nl> + bool output = false ; <nl> + alias_ . ForEachElement ( <nl> + [ & ] ( const xla : : ShapeIndex & , <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > alias ) { <nl> + if ( alias & & alias - > first = = param_number & & <nl> + alias - > second = = param_index ) { <nl> + output = true ; <nl> + } <nl> + } ) ; <nl> + return output ; <nl> + } <nl> + <nl> + absl : : optional < ShapeIndex > HloInputOutputAliasConfig : : GetAliasedOutput ( <nl> + int64 param_number , const ShapeIndex & param_index ) const { <nl> + absl : : optional < ShapeIndex > output ; <nl> + alias_ . ForEachElement ( <nl> + [ & ] ( const xla : : ShapeIndex & output_index , <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > alias ) { <nl> + if ( alias & & alias - > first = = param_number & & <nl> + alias - > second = = param_index ) { <nl> + output = output_index ; <nl> + } <nl> + } ) ; <nl> + return output ; <nl> + } <nl> + <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > <nl> + HloInputOutputAliasConfig : : GetAliasedParameter ( <nl> + const ShapeIndex & output_index ) const { <nl> + CHECK ( ShapeUtil : : IndexIsValid ( alias_ . shape ( ) , output_index ) ) ; <nl> + return alias_ . element ( output_index ) ; <nl> + } <nl> + <nl> + void HloInputOutputAliasConfig : : ForEachAlias ( AliasFn fn ) const { <nl> + alias_ . ForEachElement ( <nl> + [ & ] ( const ShapeIndex & output_index , <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > aliased ) { <nl> + if ( aliased ) { <nl> + fn ( output_index , aliased - > first , aliased - > second ) ; <nl> + } <nl> + } ) ; <nl> + } <nl> + <nl> + Status HloInputOutputAliasConfig : : ForEachAliasWithStatus ( <nl> + AliasFnWithStatus fn ) const { <nl> + return alias_ . ForEachElementWithStatus ( <nl> + [ & ] ( const ShapeIndex & output_index , <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > aliased ) { <nl> + if ( aliased ) { <nl> + TF_RETURN_IF_ERROR ( fn ( output_index , aliased - > first , aliased - > second ) ) ; <nl> + } <nl> + return Status : : OK ( ) ; <nl> + } ) ; <nl> + } <nl> + <nl> + Status HloInputOutputAliasConfig : : Verify ( const HloModule & module ) const { <nl> + std : : vector < ShapeTree < bool > > param_has_seen ; <nl> + const HloComputation * entry = module . entry_computation ( ) ; <nl> + for ( int64 i = 0 ; i < entry - > num_parameters ( ) ; + + i ) { <nl> + HloInstruction * param = entry - > parameter_instruction ( i ) ; <nl> + param_has_seen . emplace_back ( param - > shape ( ) ) ; <nl> + } <nl> + return ForEachAliasWithStatus ( [ & ] ( const ShapeIndex & output_index , <nl> + int64 param_number , <nl> + const ShapeIndex & param_index ) - > Status { <nl> + const HloInstruction * root = entry - > root_instruction ( ) ; <nl> + <nl> + const Shape & param_shape = <nl> + entry - > parameter_instruction ( param_number ) - > shape ( ) ; <nl> + const Shape & output_shape = root - > shape ( ) ; <nl> + TF_RET_CHECK ( entry - > num_parameters ( ) > param_number ) ; <nl> + TF_RET_CHECK ( ShapeUtil : : IndexIsValid ( param_shape , param_index ) ) ; <nl> + TF_RET_CHECK ( ShapeUtil : : IndexIsValid ( output_shape , output_index ) ) ; <nl> + <nl> + / / Check each param_number and param_index pair only show up once . No <nl> + / / input can be aliased with output buffers . <nl> + TF_RET_CHECK ( param_has_seen [ param_number ] . element ( param_index ) = = false ) ; <nl> + <nl> + * ( param_has_seen [ param_number ] . mutable_element ( param_index ) ) = true ; <nl> + <nl> + return Status : : OK ( ) ; <nl> + } ) ; <nl> + } <nl> + <nl> + std : : ostream & operator < < ( std : : ostream & out , <nl> + const HloInputOutputAliasConfig & config ) { <nl> + out < < config . ToString ( ) ; <nl> + return out ; <nl> + } <nl> + } / / namespace xla <nl> new file mode 100644 <nl> index 0000000000000 . . 0fae75842ba28 <nl> mmm / dev / null <nl> ppp b / tensorflow / compiler / xla / service / hlo_input_output_alias_config . h <nl> <nl> + / * Copyright 2018 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + <nl> + # ifndef TENSORFLOW_COMPILER_XLA_SERVICE_HLO_INPUT_OUTPUT_ALIAS_CONFIG_H_ <nl> + # define TENSORFLOW_COMPILER_XLA_SERVICE_HLO_INPUT_OUTPUT_ALIAS_CONFIG_H_ <nl> + <nl> + # include < utility > <nl> + <nl> + # include " absl / types / optional . h " <nl> + # include " tensorflow / compiler / xla / service / hlo . pb . h " <nl> + # include " tensorflow / compiler / xla / shape_tree . h " <nl> + # include " tensorflow / compiler / xla / shape_util . h " <nl> + <nl> + namespace xla { <nl> + <nl> + class HloModule ; <nl> + <nl> + / / This class specifies the alias map from output index to parameter number and <nl> + / / parameter index in the entry computation . <nl> + class HloInputOutputAliasConfig { <nl> + public : <nl> + HloInputOutputAliasConfig ( ) = default ; <nl> + <nl> + explicit HloInputOutputAliasConfig ( Shape shape ) : alias_ ( shape ) { } <nl> + <nl> + virtual ~ HloInputOutputAliasConfig ( ) = default ; <nl> + <nl> + / / Sets up alias config from ` output_index ` to ` param_index ` at <nl> + / / ` param_number ` . <nl> + Status SetUpAlias ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) ; <nl> + <nl> + / / Returns true if the given parameter is aliased with one of the output <nl> + / / buffers . <nl> + bool ParameterHasAlias ( int64 param_number , <nl> + const ShapeIndex & param_index ) const ; <nl> + <nl> + / / ( De ) Serializes an HloInputOutoutAliasConfig to / from an <nl> + / / HloInputOutoutAliasProto . <nl> + HloInputOutputAliasProto ToProto ( ) const ; <nl> + <nl> + static StatusOr < HloInputOutputAliasConfig > CreateFromProto ( <nl> + const Shape & output_shape , const HloInputOutputAliasProto & proto ) ; <nl> + <nl> + / / Returns the output index that the given parameter and parameter index is <nl> + / / aliased with . A nullopt is returned if there is no output that is aliased <nl> + / / with the parameter number and index . <nl> + absl : : optional < ShapeIndex > GetAliasedOutput ( <nl> + int64 param_number , const ShapeIndex & param_index ) const ; <nl> + <nl> + / / Returns the number of parameter and index of the parameter buffer that the <nl> + / / given output buffer index is aliased with . A nullopt is returned if there <nl> + / / is no parameter is aliased with the specific output . <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > GetAliasedParameter ( <nl> + const ShapeIndex & output_index ) const ; <nl> + <nl> + using AliasFn = <nl> + std : : function < void ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) > ; <nl> + <nl> + / / Iterates through each aliased output and input . <nl> + void ForEachAlias ( AliasFn fn ) const ; <nl> + <nl> + using AliasFnWithStatus = <nl> + std : : function < Status ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index ) > ; <nl> + <nl> + / / Verifies that the given config is valid for the given module . <nl> + / / Specifically , the config ' s input and output should be in - bound and size of <nl> + / / the aliased buffers should match . <nl> + Status Verify ( const HloModule & module ) const ; <nl> + <nl> + Status ForEachAliasWithStatus ( AliasFnWithStatus fn ) const ; <nl> + <nl> + string ToString ( ) const ; <nl> + <nl> + private : <nl> + / / A ShapeTree which indicates the list of buffers that ' s expected to be <nl> + / / aliased . The key on this shape tree represents the output index . The value <nl> + / / is a pair of parameter number and index into the buffer . If the value is <nl> + / / nullopt , it means there is no parameter aliasing for this output . <nl> + ShapeTree < absl : : optional < std : : pair < int64 , ShapeIndex > > > alias_ ; <nl> + } ; <nl> + <nl> + std : : ostream & operator < < ( std : : ostream & out , <nl> + const HloInputOutputAliasConfig & config ) ; <nl> + <nl> + } / / namespace xla <nl> + <nl> + # endif / / TENSORFLOW_COMPILER_XLA_SERVICE_HLO_INPUT_OUTPUT_ALIAS_CONFIG_H_ <nl> new file mode 100644 <nl> index 0000000000000 . . 3b61ff04e6d7e <nl> mmm / dev / null <nl> ppp b / tensorflow / compiler / xla / service / hlo_input_output_alias_config_test . cc <nl> <nl> + / * Copyright 2017 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + <nl> + # include " tensorflow / compiler / xla / service / hlo_input_output_alias_config . h " <nl> + <nl> + # include < memory > <nl> + # include < string > <nl> + <nl> + # include " absl / algorithm / container . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_computation . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_dce . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_instruction . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_memory_scheduler . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_opcode . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_ordering . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_parser . h " <nl> + # include " tensorflow / compiler / xla / shape_util . h " <nl> + # include " tensorflow / compiler / xla / tests / hlo_test_base . h " <nl> + # include " tensorflow / compiler / xla / types . h " <nl> + # include " tensorflow / compiler / xla / xla_data . pb . h " <nl> + # include " tensorflow / core / lib / core / status_test_util . h " <nl> + <nl> + namespace xla { <nl> + namespace { <nl> + class HloInputOutputAliasConfigTest : public HloTestBase { <nl> + protected : <nl> + void expect_aliased ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index , <nl> + const HloInputOutputAliasConfig & config ) { <nl> + absl : : optional < ShapeIndex > aliased_output = <nl> + config . GetAliasedOutput ( param_number , param_index ) ; <nl> + <nl> + EXPECT_TRUE ( aliased_output ) ; <nl> + EXPECT_EQ ( aliased_output . value ( ) , output_index ) ; <nl> + <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > aliased_param = <nl> + config . GetAliasedParameter ( output_index ) ; <nl> + <nl> + EXPECT_TRUE ( aliased_param ) ; <nl> + EXPECT_EQ ( aliased_param . value ( ) , std : : make_pair ( param_number , param_index ) ) ; <nl> + } <nl> + <nl> + void expect_not_aliased ( const ShapeIndex & output_index , int64 param_number , <nl> + const ShapeIndex & param_index , <nl> + const HloInputOutputAliasConfig & config ) { <nl> + absl : : optional < ShapeIndex > aliased_output = <nl> + config . GetAliasedOutput ( param_number , param_index ) ; <nl> + <nl> + EXPECT_FALSE ( aliased_output & & aliased_output = = output_index ) ; <nl> + <nl> + absl : : optional < std : : pair < int64 , ShapeIndex > > aliased_param = <nl> + config . GetAliasedParameter ( output_index ) ; <nl> + <nl> + EXPECT_FALSE ( aliased_param & & aliased_param - > first = = param_number & & <nl> + aliased_param - > second = = param_index ) ; <nl> + } <nl> + } ; <nl> + <nl> + TEST_F ( HloInputOutputAliasConfigTest , SimpleAliasing ) { <nl> + const string module_str = R " ( <nl> + HloModule TEST <nl> + <nl> + ENTRY main { <nl> + a = f32 [ ] parameter ( 0 ) <nl> + b = f32 [ ] parameter ( 1 ) <nl> + ROOT root = ( f32 [ ] , f32 [ ] ) tuple ( % a , % b ) <nl> + } <nl> + ) " ; <nl> + TF_ASSERT_OK_AND_ASSIGN ( std : : unique_ptr < HloModule > module , <nl> + ParseHloString ( module_str ) ) ; <nl> + <nl> + HloInputOutputAliasConfig config ( <nl> + module - > entry_computation ( ) - > root_instruction ( ) - > shape ( ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 0 } , / * param_number = * / 1 , <nl> + / * param_index = * / { } ) ) ; <nl> + <nl> + expect_aliased ( / * output_index = * / { 0 } , / * param_number = * / 1 , <nl> + / * param_index = * / { } , config ) ; <nl> + <nl> + expect_not_aliased ( / * output_index = * / { 1 } , / * param_number = * / 1 , <nl> + / * param_index = * / { } , config ) ; <nl> + <nl> + expect_not_aliased ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { } , config ) ; <nl> + } <nl> + <nl> + TEST_F ( HloInputOutputAliasConfigTest , SimpleAliasingWithTupleInput ) { <nl> + const string module_str = R " ( <nl> + HloModule TEST <nl> + <nl> + ENTRY main { <nl> + param = ( f32 [ ] , f32 [ ] ) parameter ( 0 ) <nl> + gte1 = f32 [ ] get - tuple - element ( % param ) , index = 0 <nl> + gte2 = f32 [ ] get - tuple - element ( % param ) , index = 1 <nl> + ROOT root = ( f32 [ ] , f32 [ ] ) tuple ( % gte1 , % gte2 ) <nl> + } <nl> + ) " ; <nl> + TF_ASSERT_OK_AND_ASSIGN ( std : : unique_ptr < HloModule > module , <nl> + ParseHloString ( module_str ) ) ; <nl> + <nl> + HloInputOutputAliasConfig config ( <nl> + module - > entry_computation ( ) - > root_instruction ( ) - > shape ( ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { 0 } ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 1 } , / * param_number = * / 0 , <nl> + / * param_index = * / { 1 } ) ) ; <nl> + <nl> + expect_aliased ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { 0 } , config ) ; <nl> + <nl> + expect_aliased ( / * output_index = * / { 1 } , / * param_number = * / 0 , <nl> + / * param_index = * / { 1 } , config ) ; <nl> + <nl> + expect_not_aliased ( / * output_index = * / { 1 } , / * param_number = * / 1 , <nl> + / * param_index = * / { } , config ) ; <nl> + <nl> + expect_not_aliased ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { } , config ) ; <nl> + } <nl> + <nl> + TEST_F ( HloInputOutputAliasConfigTest , InputDoNotAliasTwice ) { <nl> + const string module_str = R " ( <nl> + HloModule TEST <nl> + <nl> + ENTRY main { <nl> + a = f32 [ ] parameter ( 0 ) <nl> + b = f32 [ ] parameter ( 1 ) <nl> + ROOT root = ( f32 [ ] , f32 [ ] ) tuple ( % a , % b ) <nl> + } <nl> + ) " ; <nl> + TF_ASSERT_OK_AND_ASSIGN ( std : : unique_ptr < HloModule > module , <nl> + ParseHloString ( module_str ) ) ; <nl> + <nl> + HloInputOutputAliasConfig config ( <nl> + module - > entry_computation ( ) - > root_instruction ( ) - > shape ( ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { } ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 1 } , / * param_number = * / 0 , <nl> + / * param_index = * / { } ) ) ; <nl> + <nl> + ASSERT_IS_NOT_OK ( config . Verify ( * module ) ) ; <nl> + } <nl> + <nl> + TEST_F ( HloInputOutputAliasConfigTest , OutputDoNotAliasTwice ) { <nl> + const string module_str = R " ( <nl> + HloModule TEST <nl> + <nl> + ENTRY main { <nl> + a = f32 [ ] parameter ( 0 ) <nl> + b = f32 [ ] parameter ( 1 ) <nl> + ROOT root = ( f32 [ ] , f32 [ ] ) tuple ( % a , % b ) <nl> + } <nl> + ) " ; <nl> + TF_ASSERT_OK_AND_ASSIGN ( std : : unique_ptr < HloModule > module , <nl> + ParseHloString ( module_str ) ) ; <nl> + <nl> + HloInputOutputAliasConfig config ( <nl> + module - > entry_computation ( ) - > root_instruction ( ) - > shape ( ) ) ; <nl> + <nl> + TF_ASSERT_OK ( config . SetUpAlias ( / * output_index = * / { 0 } , / * param_number = * / 0 , <nl> + / * param_index = * / { } ) ) ; <nl> + <nl> + ASSERT_IS_NOT_OK ( config . SetUpAlias ( / * output_index = * / { 0 } , / * param_number = * / 1 , <nl> + / * param_index = * / { } ) ) ; <nl> + } <nl> + } / / namespace <nl> + } / / namespace xla <nl> mmm a / tensorflow / compiler / xla / service / hlo_module . cc <nl> ppp b / tensorflow / compiler / xla / service / hlo_module . cc <nl> HloComputation * HloModule : : AddComputationInternal ( <nl> config_ . SetDefaultComputationLayout ( <nl> entry_computation_ - > ComputeProgramShape ( ) ) ; <nl> } <nl> + input_output_alias_config_ = HloInputOutputAliasConfig ( <nl> + entry_computation_ - > root_instruction ( ) - > shape ( ) ) ; <nl> } <nl> <nl> if ( uniquify_identifiers ) { <nl> HloModuleProto HloModule : : ToProto ( ) const { <nl> if ( has_schedule ( ) ) { <nl> * proto . mutable_schedule ( ) = schedule ( ) . ToProto ( ) . ValueOrDie ( ) ; <nl> } <nl> + <nl> + * proto . mutable_input_output_alias ( ) = input_output_alias_config ( ) . ToProto ( ) ; <nl> + <nl> return proto ; <nl> } <nl> <nl> StatusOr < std : : unique_ptr < HloModule > > HloModule : : CreateFromProto ( <nl> } <nl> TF_RET_CHECK ( module - > entry_computation_ ! = nullptr ) ; <nl> <nl> + TF_ASSIGN_OR_RETURN ( module - > input_output_alias_config_ , <nl> + HloInputOutputAliasConfig : : CreateFromProto ( <nl> + result_shape , proto . input_output_alias ( ) ) ) ; <nl> + <nl> / / Because we didn ' t uniquify the names or the ids , double - check that the <nl> / / instruction and computation names and ids are unique from the proto . <nl> absl : : flat_hash_set < string > computation_names ; <nl> mmm a / tensorflow / compiler / xla / service / hlo_module . h <nl> ppp b / tensorflow / compiler / xla / service / hlo_module . h <nl> limitations under the License . <nl> # include " tensorflow / compiler / xla / service / hlo . pb . h " <nl> # include " tensorflow / compiler / xla / service / hlo_clone_context . h " <nl> # include " tensorflow / compiler / xla / service / hlo_computation . h " <nl> + # include " tensorflow / compiler / xla / service / hlo_input_output_alias_config . h " <nl> # include " tensorflow / compiler / xla / service / hlo_instruction . h " <nl> # include " tensorflow / compiler / xla / service / hlo_module_config . h " <nl> # include " tensorflow / compiler / xla / service / hlo_schedule . h " <nl> class HloModule { <nl> return result ; <nl> } <nl> <nl> + / / input_output_alias_config indicates the list of aliased buffers that are <nl> + / / expected from the module . <nl> + HloInputOutputAliasConfig & input_output_alias_config ( ) { <nl> + return input_output_alias_config_ ; <nl> + } <nl> + const HloInputOutputAliasConfig & input_output_alias_config ( ) const { <nl> + return input_output_alias_config_ ; <nl> + } <nl> + <nl> / / Returns an id that is unique to this module across all modules created over <nl> / / the lifetime of this process . <nl> int unique_id ( ) const { return unique_id_ ; } <nl> class HloModule { <nl> / / sequential order of instructions for each non - fusion computation in the <nl> / / module . <nl> absl : : optional < HloSchedule > schedule_ ; <nl> + <nl> + / / alias_config indicates the alias information of input / output buffers that <nl> + / / are expected from the module . <nl> + HloInputOutputAliasConfig input_output_alias_config_ ; <nl> } ; <nl> <nl> } / / namespace xla <nl> mmm a / tensorflow / compiler / xla / service / hlo_verifier . cc <nl> ppp b / tensorflow / compiler / xla / service / hlo_verifier . cc <nl> StatusOr < bool > HloVerifier : : Run ( HloModule * module ) { <nl> TF_RETURN_IF_ERROR ( module - > schedule ( ) . Verify ( ) ) ; <nl> } <nl> <nl> + TF_RETURN_IF_ERROR ( module - > input_output_alias_config ( ) . Verify ( * module ) ) ; <nl> + <nl> return false ; <nl> } <nl> <nl> mmm a / tensorflow / compiler / xla / shape_util . h <nl> ppp b / tensorflow / compiler / xla / shape_util . h <nl> class ShapeIndex { <nl> void push_back ( int64 value ) { indices_ . push_back ( value ) ; } <nl> void pop_back ( ) { indices_ . pop_back ( ) ; } <nl> <nl> - / / push_front is O ( n ^ 2 ) , but shapes don ' t usually have a ton of dimensions . <nl> + / / push_front is O ( n ) , but shapes don ' t usually have a ton of dimensions . <nl> void push_front ( int64 value ) { indices_ . insert ( indices_ . begin ( ) , value ) ; } <nl> <nl> using container_type = absl : : InlinedVector < int64 , 2 > ; <nl> | [ Resubmit ] [ XLA ] Introduce input / output alias config . | tensorflow/tensorflow | 028410c7f4b0555c5ec3b818892ff8fdac90fc25 | 2018-10-11T19:19:03Z |
mmm a / test / test_nn . py <nl> ppp b / test / test_nn . py <nl> def __init__ ( self ) : <nl> self . assertEqual ( num_params ( n ) , 2 ) <nl> self . assertEqual ( num_params ( s ) , 2 ) <nl> <nl> + def test_Sequential_getitem ( self ) : <nl> + l1 = nn . Linear ( 10 , 20 ) <nl> + l2 = nn . Linear ( 20 , 30 ) <nl> + l3 = nn . Linear ( 30 , 40 ) <nl> + l4 = nn . Linear ( 40 , 50 ) <nl> + n = nn . Sequential ( l1 , l2 , l3 , l4 ) <nl> + self . assertEqual ( n [ 0 ] , l1 ) <nl> + self . assertEqual ( n [ 1 ] , l2 ) <nl> + self . assertEqual ( n [ 2 ] , l3 ) <nl> + self . assertEqual ( n [ 3 ] , l4 ) <nl> + <nl> def test_add_module ( self ) : <nl> l = nn . Linear ( 10 , 20 ) <nl> net = nn . Container ( <nl> mmm a / torch / nn / modules / container . py <nl> ppp b / torch / nn / modules / container . py <nl> def __call__ ( self , input ) : <nl> " " " <nl> def __init__ ( self , * * kwargs ) : <nl> super ( Container , self ) . __init__ ( ) <nl> - self . modules = OrderedDict ( ) <nl> + self . _modules = OrderedDict ( ) <nl> for key , value in kwargs . items ( ) : <nl> self . add_module ( key , value ) <nl> <nl> def add_module ( self , name , module ) : <nl> if not isinstance ( module , Module ) and module is not None : <nl> raise ValueError ( " { } is not a Module subclass " . format ( <nl> torch . typename ( module ) ) ) <nl> - self . modules [ name ] = module <nl> + self . _modules [ name ] = module <nl> <nl> def __getattr__ ( self , name ) : <nl> - if ' modules ' in self . __dict__ : <nl> - modules = self . __dict__ [ ' modules ' ] <nl> + if ' _modules ' in self . __dict__ : <nl> + modules = self . __dict__ [ ' _modules ' ] <nl> if name in modules : <nl> return modules [ name ] <nl> return Module . __getattr__ ( self , name ) <nl> def parameters ( self , memo = None ) : <nl> if memo is None : <nl> memo = set ( ) <nl> super ( Container , self ) . parameters ( memo ) <nl> - for module in self . modules . values ( ) : <nl> - if module is not None : <nl> - for p in module . parameters ( memo ) : <nl> - yield p <nl> + for module in self . children ( ) : <nl> + for p in module . parameters ( memo ) : <nl> + yield p <nl> + <nl> + def children ( self ) : <nl> + memo = set ( ) <nl> + for module in self . _modules . values ( ) : <nl> + if module is not None and module not in memo : <nl> + memo . add ( module ) <nl> + yield module <nl> + <nl> + def modules ( self , memo = None ) : <nl> + if memo is None : <nl> + memo = set ( ) <nl> + if self not in memo : <nl> + super ( Container , self ) . modules ( memo ) <nl> + for module in self . children ( ) : <nl> + for m in module . modules ( memo ) : <nl> + yield m <nl> <nl> def _apply ( self , fn ) : <nl> - for module in self . modules . values ( ) : <nl> - if module is not None : <nl> - module . _apply ( fn ) <nl> + for module in self . children ( ) : <nl> + module . _apply ( fn ) <nl> return super ( Container , self ) . _apply ( fn ) <nl> <nl> <nl> def __init__ ( self , * args ) : <nl> idx + = 1 <nl> <nl> def __getitem__ ( self , idx ) : <nl> - if idx > = len ( self . modules ) : <nl> + if idx < 0 or idx > = len ( self . _modules ) : <nl> raise IndexError ( ' index { } is out of range ' . format ( idx ) ) <nl> - it = self . modules . values ( ) <nl> - for i in range ( idx - 1 ) : <nl> - it . next ( ) <nl> - return it . next ( ) <nl> + it = iter ( self . _modules . values ( ) ) <nl> + for i in range ( idx ) : <nl> + next ( it ) <nl> + return next ( it ) <nl> <nl> def forward ( self , input ) : <nl> - for module in self . modules . values ( ) : <nl> + for module in self . _modules . values ( ) : <nl> input = module ( input ) <nl> return input <nl> mmm a / torch / nn / modules / module . py <nl> ppp b / torch / nn / modules / module . py <nl> def parameters ( self , memo = None ) : <nl> memo . add ( p ) <nl> yield p <nl> <nl> + def children ( self ) : <nl> + if False : <nl> + yield <nl> + <nl> + def modules ( self , memo = None ) : <nl> + if memo is None : <nl> + memo = set ( ) <nl> + if self not in memo : <nl> + memo . add ( self ) <nl> + yield self <nl> + <nl> def zero_grad ( self ) : <nl> for p in self . parameters ( ) : <nl> p . grad . zero_ ( ) <nl> - <nl> mmm a / torch / nn / parallel / replicate . py <nl> ppp b / torch / nn / parallel / replicate . py <nl> def _replicate_module ( module , gpu , param_remap ) : <nl> for key , param in module . _parameters . items ( ) : <nl> replica . _parameters [ key ] = param_remap . get ( param ) <nl> if isinstance ( replica , Container ) : <nl> - replica . modules = OrderedDict ( ) <nl> - for name , child in module . modules . items ( ) : <nl> - replica . modules [ name ] = _replicate_module ( child , gpu , param_remap ) <nl> + replica . _modules = OrderedDict ( ) <nl> + for name , child in module . _modules . items ( ) : <nl> + replica . _modules [ name ] = _replicate_module ( child , gpu , param_remap ) <nl> return replica <nl> <nl> <nl> | Add Module . modules ( ) and Module . children ( ) ( ) | pytorch/pytorch | f4ebc65a122e7a73c1b559ede35c4e2733665929 | 2016-10-02T01:18:53Z |
mmm a / filament / src / PostProcessManager . cpp <nl> ppp b / filament / src / PostProcessManager . cpp <nl> FrameGraphResource PostProcessManager : : ssao ( FrameGraph & fg , RenderPass & pass , <nl> . format = TextureFormat : : R8 } ) ; <nl> <nl> data . ssao = builder . useRenderTarget ( " SSAO Target " , <nl> - { . attachments . color = data . ssao } , TargetBufferFlags : : NONE ) . color ; <nl> + { . attachments . color = data . ssao , <nl> + . attachments . depth = { data . depth , FrameGraphRenderTarget : : Attachments : : READ } <nl> + } , TargetBufferFlags : : NONE ) . color ; <nl> } , <nl> [ this , fullScreenRenderPrimitive ] ( FrameGraphPassResources const & resources , <nl> SSAOPassData const & data , DriverApi & driver ) { <nl> FrameGraphResource PostProcessManager : : ssao ( FrameGraph & fg , RenderPass & pass , <nl> PipelineState pipeline ; <nl> pipeline . program = mSSAOProgram ; <nl> pipeline . rasterState = mSSAOMaterial - > getRasterState ( ) ; <nl> + pipeline . rasterState . depthFunc = RasterState : : DepthFunc : : G ; <nl> <nl> driver . beginRenderPass ( ssao . target , ssao . params ) ; <nl> driver . draw ( pipeline , fullScreenRenderPrimitive ) ; <nl> mmm a / filament / src / materials / sao . mat <nl> ppp b / filament / src / materials / sao . mat <nl> material { <nl> ] , <nl> vertexDomain : device , <nl> depthWrite : false , <nl> - depthCulling : false , <nl> + depthCulling : true , <nl> shadingModel : unlit , <nl> variantFilter : [ skinning ] , <nl> culling : none <nl> vertex { <nl> void materialVertex ( inout MaterialVertexInputs material ) { <nl> / / far - plane in view space <nl> vec4 position = getPosition ( ) ; / / clip - space <nl> + position . z = 1 . 0 ; / / far plane <nl> material . vertex . xy = ( position . xy * 0 . 5 + 0 . 5 ) ; <nl> material . vertex . zw = position . xy ; <nl> } <nl> mmm a / filament / src / materials / ssao . mat <nl> ppp b / filament / src / materials / ssao . mat <nl> material { <nl> ] , <nl> vertexDomain : device , <nl> depthWrite : false , <nl> - depthCulling : false , <nl> + depthCulling : true , <nl> shadingModel : unlit , <nl> variantFilter : [ skinning ] , <nl> culling : none <nl> vertex { <nl> void materialVertex ( inout MaterialVertexInputs material ) { <nl> / / far - plane in view space <nl> vec4 position = getPosition ( ) ; / / clip - space <nl> + position . z = 1 . 0 ; / / far plane <nl> material . vertex . xy = ( position . xy * 0 . 5 + 0 . 5 ) ; <nl> material . vertex . zw = position . xy ; <nl> } <nl> | Use depth culling when computing SSAO | google/filament | 1b41ef78cd0975da1ab016dc850949bc53db0e4b | 2019-05-23T23:35:16Z |
mmm a / modules / planning / common / obstacle . cc <nl> ppp b / modules / planning / common / obstacle . cc <nl> Obstacle : : Obstacle ( const std : : string & id , <nl> perception_obstacle_ . theta ( ) , <nl> perception_obstacle_ . length ( ) , <nl> perception_obstacle_ . width ( ) ) { <nl> + is_caution_level_obstacle_ = ( obstacle_priority = = ObstaclePriority : : CAUTION ) ; <nl> std : : vector < common : : math : : Vec2d > polygon_points ; <nl> if ( FLAGS_use_navigation_mode | | <nl> perception_obstacle . polygon_point_size ( ) < = 2 ) { <nl> Obstacle : : Obstacle ( const std : : string & id , <nl> const ObstaclePriority : : Priority & obstacle_priority , <nl> const bool is_static ) <nl> : Obstacle ( id , perception_obstacle , obstacle_priority , is_static ) { <nl> + is_caution_level_obstacle_ = ( obstacle_priority = = ObstaclePriority : : CAUTION ) ; <nl> trajectory_ = trajectory ; <nl> auto & trajectory_points = * trajectory_ . mutable_trajectory_point ( ) ; <nl> double cumulative_s = 0 . 0 ; <nl> bool Obstacle : : BuildTrajectoryStBoundary ( const ReferenceLine & reference_line , <nl> kSkipLDistanceFactor + <nl> adc_width / 2 . 0 ; <nl> <nl> - if ( std : : fmin ( object_boundary . start_l ( ) , object_boundary . end_l ( ) ) > <nl> + if ( ! IsCautionLevelObstacle ( ) & & ( <nl> + std : : fmin ( object_boundary . start_l ( ) , object_boundary . end_l ( ) ) > <nl> skip_l_distance | | <nl> std : : fmax ( object_boundary . start_l ( ) , object_boundary . end_l ( ) ) < <nl> - - skip_l_distance ) { <nl> + - skip_l_distance ) ) { <nl> continue ; <nl> } <nl> <nl> - if ( object_boundary . end_s ( ) < 0 ) { / / skip if behind reference line <nl> + if ( ! IsCautionLevelObstacle ( ) & & object_boundary . end_s ( ) < 0 ) { <nl> + / / skip if behind reference line <nl> continue ; <nl> } <nl> constexpr double kSparseMappingS = 20 . 0 ; <nl> mmm a / modules / planning / common / obstacle . h <nl> ppp b / modules / planning / common / obstacle . h <nl> class Obstacle { <nl> <nl> static bool IsValidTrajectoryPoint ( const common : : TrajectoryPoint & point ) ; <nl> <nl> + inline bool IsCautionLevelObstacle ( ) const { <nl> + return is_caution_level_obstacle_ ; <nl> + } <nl> + <nl> / / const Obstacle * obstacle ( ) const ; <nl> <nl> / * * <nl> class Obstacle { <nl> <nl> bool is_lane_change_blocking_ = false ; <nl> <nl> + bool is_caution_level_obstacle_ = false ; <nl> + <nl> double min_radius_stop_distance_ = - 1 . 0 ; <nl> <nl> struct ObjectTagCaseHash { <nl> mmm a / modules / planning / common / reference_line_info . cc <nl> ppp b / modules / planning / common / reference_line_info . cc <nl> bool ReferenceLineInfo : : AddObstacles ( <nl> } <nl> <nl> bool ReferenceLineInfo : : IsUnrelaventObstacle ( const Obstacle * obstacle ) { <nl> + if ( obstacle - > IsCautionLevelObstacle ( ) ) { <nl> + return false ; <nl> + } <nl> / / if adc is on the road , and obstacle behind adc , ignore <nl> if ( obstacle - > PerceptionSLBoundary ( ) . end_s ( ) > reference_line_ . Length ( ) ) { <nl> return true ; <nl> mmm a / modules / planning / traffic_rules / backside_vehicle . cc <nl> ppp b / modules / planning / traffic_rules / backside_vehicle . cc <nl> void BacksideVehicle : : MakeLaneKeepingObstacleDecision ( <nl> adc_sl_boundary . end_s ( ) - adc_sl_boundary . start_s ( ) ; <nl> for ( const auto * obstacle : path_decision - > obstacles ( ) . Items ( ) ) { <nl> if ( obstacle - > PerceptionSLBoundary ( ) . end_s ( ) > = <nl> - adc_sl_boundary . end_s ( ) ) { / / don ' t ignore such vehicles . <nl> + adc_sl_boundary . end_s ( ) | | obstacle - > IsCautionLevelObstacle ( ) ) { <nl> + / / don ' t ignore such vehicles . <nl> continue ; <nl> } <nl> <nl> | Planning : add backside caution - level vechile to St - boundary | ApolloAuto/apollo | 62d0997c38f38dedd66a49e8f889399cc1b7315b | 2019-03-26T04:23:29Z |
mmm a / Tests / UnitTests / ReaderTests / ImageReaderTests . cpp <nl> ppp b / Tests / UnitTests / ReaderTests / ImageReaderTests . cpp <nl> BOOST_AUTO_TEST_CASE ( ImageAndTextReaderSimple ) <nl> HelperRunReaderTest < float > ( <nl> testDataPath ( ) + " / Config / ImageAndTextReaderSimple_Config . cntk " , <nl> testDataPath ( ) + " / Control / ImageReaderSimple_Control . txt " , <nl> - testDataPath ( ) + " / Control / ImageReaderSimple_Output . txt " , <nl> + testDataPath ( ) + " / Control / ImageAndTextReaderSimple_Output . txt " , <nl> " Simple_Test " , <nl> " reader " , <nl> 4 , <nl> | Changing output file | microsoft/CNTK | f1ef5c447da66d751f35128e6a110ca39c80f33e | 2016-05-18T15:14:13Z |
mmm a / buildscripts / resmokeconfig / suites / sharding_continuous_config_stepdown . yml <nl> ppp b / buildscripts / resmokeconfig / suites / sharding_continuous_config_stepdown . yml <nl> selector : <nl> - jstests / sharding / commands_that_write_accept_wc_configRS . js <nl> - jstests / sharding / commands_that_write_accept_wc_shards . js <nl> - jstests / sharding / move_chunk_wc . js <nl> + # Test never completes because the CSRS balancer gets restarted too many times <nl> + - jstests / sharding / jumbo1 . js <nl> + <nl> executor : <nl> js_test : <nl> config : <nl> mmm a / jstests / sharding / shard3 . js <nl> ppp b / jstests / sharding / shard3 . js <nl> <nl> s . adminCommand ( { enablesharding : " test " } ) ; <nl> s . ensurePrimaryShard ( ' test ' , ' shard0001 ' ) ; <nl> s . adminCommand ( { shardcollection : " test . foo " , key : { num : 1 } } ) ; <nl> - if ( s . configRS ) { <nl> - / / Ensure that the second mongos will see the movePrimary <nl> - s . configRS . awaitLastOpCommitted ( ) ; <nl> - } <nl> + <nl> + / / Ensure that the second mongos will see the movePrimary <nl> + s . configRS . awaitLastOpCommitted ( ) ; <nl> <nl> assert ( sh . getBalancerState ( ) , " A1 " ) ; <nl> <nl> - sh . stopBalancer ( ) ; <nl> + s . stopBalancer ( ) ; <nl> assert ( ! sh . getBalancerState ( ) , " A2 " ) ; <nl> <nl> - sh . startBalancer ( ) ; <nl> + s . startBalancer ( ) ; <nl> assert ( sh . getBalancerState ( ) , " A3 " ) ; <nl> <nl> - sh . stopBalancer ( ) ; <nl> + s . stopBalancer ( ) ; <nl> assert ( ! sh . getBalancerState ( ) , " A4 " ) ; <nl> <nl> s . config . databases . find ( ) . forEach ( printjson ) ; <nl> | SERVER - 22672 shard3 . js should not stop the balancer through sh | mongodb/mongo | 9b00106b56966b334c878f36cca14deb71f6d8c7 | 2016-06-15T21:37:33Z |
mmm a / dbms / src / DataStreams / PushingToViewsBlockOutputStream . cpp <nl> ppp b / dbms / src / DataStreams / PushingToViewsBlockOutputStream . cpp <nl> void PushingToViewsBlockOutputStream : : write ( const Block & block ) <nl> <nl> / / Insert data into materialized views only after successful insert into main table <nl> bool allow_concurrent_view_processing = context . getSettingsRef ( ) . allow_concurrent_view_processing ; <nl> - if ( allow_concurrent_view_processing & & views . size ( ) > 1 ) { <nl> + if ( allow_concurrent_view_processing & & views . size ( ) > 1 ) <nl> + { <nl> / / Push to views concurrently if enabled , and more than one view is attached <nl> ThreadPool pool ( std : : min ( getNumberOfPhysicalCPUCores ( ) , views . size ( ) ) ) ; <nl> for ( size_t view_num = 0 ; view_num < views . size ( ) ; + + view_num ) <nl> { <nl> auto thread_group = CurrentThread : : getGroup ( ) ; <nl> - pool . schedule ( [ = ] ( ) { <nl> + pool . schedule ( [ = ] ( ) <nl> + { <nl> setThreadName ( " PushingToViewsBlockOutputStream " ) ; <nl> CurrentThread : : attachToIfDetached ( thread_group ) ; <nl> process ( block , view_num ) ; <nl> void PushingToViewsBlockOutputStream : : write ( const Block & block ) <nl> } <nl> / / Wait for concurrent view processing <nl> pool . wait ( ) ; <nl> - } else { <nl> + } <nl> + else <nl> + { <nl> / / Process sequentially <nl> for ( size_t view_num = 0 ; view_num < views . size ( ) ; + + view_num ) <nl> process ( block , view_num ) ; <nl> | Update PushingToViewsBlockOutputStream . cpp | ClickHouse/ClickHouse | c4939a1bec0602c4be5fce49540f072b2c00a455 | 2018-10-01T01:39:00Z |
mmm a / src / heap / spaces . h <nl> ppp b / src / heap / spaces . h <nl> class Space : public Malloced { <nl> <nl> protected : <nl> intptr_t GetNextInlineAllocationStepSize ( ) ; <nl> + bool AllocationObserversActive ( ) { <nl> + return ! allocation_observers_paused_ & & ! allocation_observers_ . empty ( ) ; <nl> + } <nl> <nl> std : : vector < AllocationObserver * > allocation_observers_ ; <nl> bool allocation_observers_paused_ ; <nl> class SpaceWithLinearArea : public Space { <nl> allocation_info_ . Reset ( nullptr , nullptr ) ; <nl> } <nl> <nl> + virtual bool SupportsInlineAllocation ( ) = 0 ; <nl> + <nl> / / Returns the allocation pointer in this space . <nl> Address top ( ) { return allocation_info_ . top ( ) ; } <nl> Address limit ( ) { return allocation_info_ . limit ( ) ; } <nl> class V8_EXPORT_PRIVATE PagedSpace <nl> } <nl> void DecreaseLimit ( Address new_limit ) ; <nl> void StartNextInlineAllocationStep ( ) override ; <nl> - bool SupportsInlineAllocation ( ) { <nl> + bool SupportsInlineAllocation ( ) override { <nl> return identity ( ) = = OLD_SPACE & & ! is_local ( ) ; <nl> } <nl> <nl> class NewSpace : public SpaceWithLinearArea { <nl> HistogramInfo * promoted_histogram_ ; <nl> <nl> bool EnsureAllocation ( int size_in_bytes , AllocationAlignment alignment ) ; <nl> - <nl> + bool SupportsInlineAllocation ( ) override { return true ; } <nl> void StartNextInlineAllocationStep ( ) override ; <nl> <nl> friend class SemiSpaceIterator ; <nl> | [ heap ] make SupportsInlineAllocation virtual in SpaceWithLinearArea | v8/v8 | eb62a4aba4b329592979d80ab89e981d59f06344 | 2017-12-08T15:13:50Z |
new file mode 100644 <nl> index 000000000000 . . 7a367ec19dfb <nl> mmm / dev / null <nl> ppp b / include / swift / AST / TypeLoc . h <nl> <nl> + / / = = = mmm TypeLoc . h - Swift Language Type Locations mmmmmmmmmmmm - - * - C + + - * - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2015 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See http : / / swift . org / LICENSE . txt for license information <nl> + / / See http : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / <nl> + / / This file defines the TypeLoc class . <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + # ifndef SWIFT_TYPELOC_H <nl> + # define SWIFT_TYPELOC_H <nl> + <nl> + # include " swift / Basic / SourceLoc . h " <nl> + <nl> + namespace swift { <nl> + class ASTContext ; <nl> + <nl> + / / / TypeLoc - Provides source location information for a parsed type . <nl> + / / / A TypeLoc * is stored in AST nodes which use an explicitly written type . <nl> + class TypeLoc { <nl> + private : <nl> + TypeLoc ( SourceRange Range ) ; <nl> + / / FIXME : Currently , there ' s only one kind of TypeLoc ; we need multiple kinds <nl> + / / for more accurate source location information . <nl> + SourceRange Range ; <nl> + <nl> + void * operator new ( size_t Bytes ) throw ( ) = delete ; <nl> + void operator delete ( void * Data ) throw ( ) = delete ; <nl> + void * operator new ( size_t Bytes , void * Mem ) throw ( ) = delete ; <nl> + void * operator new ( size_t Bytes , ASTContext & C , <nl> + unsigned Alignment = 8 ) ; <nl> + public : <nl> + SourceRange getSourceRange ( ) { <nl> + return Range ; <nl> + } <nl> + <nl> + static TypeLoc * get ( ASTContext & Context , SourceRange Range ) ; <nl> + } ; <nl> + <nl> + } / / end namespace llvm <nl> + <nl> + # endif <nl> mmm a / lib / AST / Type . cpp <nl> ppp b / lib / AST / Type . cpp <nl> <nl> # include " swift / AST / Types . h " <nl> # include " swift / AST / Decl . h " <nl> # include " swift / AST / AST . h " <nl> + # include " swift / AST / TypeLoc . h " <nl> # include " llvm / ADT / APFloat . h " <nl> # include " llvm / ADT / SmallMap . h " <nl> # include " llvm / ADT / SmallPtrSet . h " <nl> void SubstArchetypeType : : print ( raw_ostream & OS ) const { <nl> getSubstType ( ) - > print ( OS ) ; <nl> } <nl> <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / TypeLoc implementation <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + TypeLoc : : TypeLoc ( SourceRange Range ) : Range ( Range ) { } <nl> + <nl> + void * TypeLoc : : operator new ( size_t Bytes , ASTContext & C , <nl> + unsigned Alignment ) { <nl> + return C . Allocate ( Bytes , Alignment ) ; <nl> + } <nl> + <nl> + TypeLoc * TypeLoc : : get ( ASTContext & Context , SourceRange Range ) { <nl> + return new ( Context ) TypeLoc ( Range ) ; <nl> + } <nl> mmm a / lib / Parse / ParseDecl . cpp <nl> ppp b / lib / Parse / ParseDecl . cpp <nl> bool Parser : : parseInheritance ( SmallVectorImpl < Type > & Inherited ) { <nl> do { <nl> / / Parse the inherited type ( which must be a protocol ) . <nl> Type Ty ; <nl> - if ( parseTypeIdentifier ( Ty ) ) <nl> + TypeLoc * Loc ; <nl> + if ( parseTypeIdentifier ( Ty , Loc ) ) <nl> return true ; <nl> <nl> / / Record the type . <nl> Decl * Parser : : parseDeclExtension ( ) { <nl> SourceLoc ExtensionLoc = consumeToken ( tok : : kw_extension ) ; <nl> <nl> Type Ty ; <nl> - SourceLoc TyLoc = Tok . getLoc ( ) ; <nl> + SourceLoc TyStartLoc = Tok . getLoc ( ) ; <nl> + TypeLoc * TyLoc ; <nl> SourceLoc LBLoc , RBLoc ; <nl> - if ( parseTypeIdentifier ( Ty ) ) <nl> + if ( parseTypeIdentifier ( Ty , TyLoc ) ) <nl> return nullptr ; <nl> <nl> / / Parse optional inheritance clause . <nl> Decl * Parser : : parseDeclExtension ( ) { <nl> return nullptr ; <nl> <nl> ExtensionDecl * ED <nl> - = new ( Context ) ExtensionDecl ( ExtensionLoc , Ty , TyLoc , <nl> + = new ( Context ) ExtensionDecl ( ExtensionLoc , Ty , TyStartLoc , <nl> Context . AllocateCopy ( Inherited ) , <nl> CurDeclContext ) ; <nl> ContextChange CC ( * this , ED ) ; <nl> TypeAliasDecl * Parser : : parseDeclTypeAlias ( bool WantDefinition ) { <nl> <nl> Identifier Id ; <nl> Type Ty ; <nl> + TypeLoc * TyLoc ; <nl> SourceLoc IdLoc = Tok . getLoc ( ) ; <nl> if ( parseIdentifier ( Id , diag : : expected_identifier_in_decl , " typealias " ) ) <nl> return nullptr ; <nl> TypeAliasDecl * Parser : : parseDeclTypeAlias ( bool WantDefinition ) { <nl> <nl> if ( WantDefinition | | Tok . is ( tok : : equal ) ) { <nl> if ( parseToken ( tok : : equal , diag : : expected_equal_in_typealias ) | | <nl> - parseType ( Ty , diag : : expected_type_in_typealias ) ) <nl> + parseType ( Ty , TyLoc , diag : : expected_type_in_typealias ) ) <nl> return nullptr ; <nl> <nl> if ( ! WantDefinition ) { <nl> bool Parser : : parseGetSet ( bool HasContainerType , Pattern * Indices , <nl> <nl> / / Getter has type : ( ) - > T . <nl> Type FuncTy = ElementTy ; <nl> - if ( buildFunctionSignature ( Params , FuncTy ) ) { <nl> + TypeLoc * FuncLoc = nullptr ; <nl> + if ( buildFunctionSignature ( Params , FuncTy , FuncLoc ) ) { <nl> skipUntilDeclRBrace ( ) ; <nl> Invalid = true ; <nl> break ; <nl> bool Parser : : parseGetSet ( bool HasContainerType , Pattern * Indices , <nl> <nl> / / Getter has type : ( value : T ) - > ( ) <nl> Type FuncTy = TupleType : : getEmpty ( Context ) ; <nl> - if ( buildFunctionSignature ( Params , FuncTy ) ) { <nl> + TypeLoc * FuncLoc = nullptr ; <nl> + if ( buildFunctionSignature ( Params , FuncTy , FuncLoc ) ) { <nl> skipUntilDeclRBrace ( ) ; <nl> Invalid = true ; <nl> break ; <nl> FuncDecl * Parser : : parseDeclFunc ( bool hasContainerType ) { <nl> Params . push_back ( buildImplicitThisParameter ( ) ) ; <nl> <nl> Type FuncTy ; <nl> - if ( parseFunctionSignature ( Params , FuncTy ) ) <nl> + TypeLoc * FuncTyLoc ; <nl> + if ( parseFunctionSignature ( Params , FuncTy , FuncTyLoc ) ) <nl> return 0 ; <nl> <nl> / / Enter the arguments for the function into a new function - body scope . We <nl> bool Parser : : parseDeclOneOf ( SmallVectorImpl < Decl * > & Decls ) { <nl> SourceLoc NameLoc ; <nl> StringRef Name ; <nl> Type EltType ; <nl> + TypeLoc * EltTypeLoc ; <nl> } ; <nl> SmallVector < OneOfElementInfo , 8 > ElementInfos ; <nl> <nl> bool Parser : : parseDeclOneOf ( SmallVectorImpl < Decl * > & Decls ) { <nl> <nl> / / See if we have a type specifier for this oneof element . If so , parse it . <nl> if ( consumeIf ( tok : : colon ) & & <nl> - parseTypeAnnotation ( ElementInfo . EltType , <nl> + parseTypeAnnotation ( ElementInfo . EltType , ElementInfo . EltTypeLoc , <nl> diag : : expected_type_oneof_element ) ) { <nl> skipUntil ( tok : : r_brace ) ; <nl> return true ; <nl> bool Parser : : parseDeclSubscript ( bool HasContainerType , <nl> <nl> / / type <nl> Type ElementTy ; <nl> - if ( parseTypeAnnotation ( ElementTy , diag : : expected_type_subscript ) ) <nl> + TypeLoc * ElementTyLoc ; <nl> + if ( parseTypeAnnotation ( ElementTy , ElementTyLoc , <nl> + diag : : expected_type_subscript ) ) <nl> return true ; <nl> if ( checkFullyTyped ( ElementTy ) ) <nl> Invalid = true ; <nl> mmm a / lib / Parse / ParseExpr . cpp <nl> ppp b / lib / Parse / ParseExpr . cpp <nl> NullablePtr < Expr > Parser : : parseExprNew ( ) { <nl> <nl> / / FIXME : this should probably be type - simple . <nl> Type elementTy ; <nl> - if ( parseTypeIdentifier ( elementTy ) ) <nl> + TypeLoc * elementLoc ; <nl> + if ( parseTypeIdentifier ( elementTy , elementLoc ) ) <nl> return nullptr ; <nl> <nl> / / TODO : we should probably allow a tuple - expr here as an initializer . <nl> NullablePtr < Expr > Parser : : parseExprFunc ( ) { <nl> <nl> SmallVector < Pattern * , 4 > Params ; <nl> Type Ty ; <nl> + TypeLoc * Loc ; <nl> if ( Tok . is ( tok : : l_brace ) ) { <nl> / / If the func - signature isn ' t present , then this is a ( ) - > ( ) function . <nl> Params . push_back ( TuplePattern : : create ( Context , SourceLoc ( ) , <nl> NullablePtr < Expr > Parser : : parseExprFunc ( ) { <nl> } else if ( Tok . isNotAnyLParen ( ) ) { <nl> diagnose ( Tok , diag : : func_decl_without_paren ) ; <nl> return 0 ; <nl> - } else if ( parseFunctionSignature ( Params , Ty ) ) { <nl> + } else if ( parseFunctionSignature ( Params , Ty , Loc ) ) { <nl> return 0 ; <nl> } <nl> <nl> mmm a / lib / Parse / ParsePattern . cpp <nl> ppp b / lib / Parse / ParsePattern . cpp <nl> bool Parser : : checkFullyTyped ( Pattern * pattern , Type & funcTy ) { <nl> / / / func - signature - result : <nl> / / / ' - > ' type <nl> bool Parser : : parseFunctionSignature ( SmallVectorImpl < Pattern * > & params , <nl> - Type & type ) { <nl> + Type & type , TypeLoc * & loc ) { <nl> / / Parse curried function argument clauses as long as we can . <nl> do { <nl> NullablePtr < Pattern > pattern = parsePatternTuple ( ) ; <nl> bool Parser : : parseFunctionSignature ( SmallVectorImpl < Pattern * > & params , <nl> <nl> / / If there ' s a trailing arrow , parse the rest as the result type . <nl> if ( consumeIf ( tok : : arrow ) ) { <nl> - if ( parseType ( type ) ) <nl> + if ( parseType ( type , loc ) ) <nl> return true ; <nl> <nl> checkFullyTyped ( type ) ; <nl> bool Parser : : parseFunctionSignature ( SmallVectorImpl < Pattern * > & params , <nl> / / Otherwise , we implicitly return ( ) . <nl> } else { <nl> type = TupleType : : getEmpty ( Context ) ; <nl> + loc = nullptr ; <nl> } <nl> <nl> / / Now build up the function type . We require all function <nl> / / signatures to be fully - typed : that is , all top - down paths to a <nl> / / leaf pattern must pass through a TypedPattern . <nl> - return buildFunctionSignature ( params , type ) ; <nl> + return buildFunctionSignature ( params , type , loc ) ; <nl> } <nl> <nl> bool Parser : : buildFunctionSignature ( SmallVectorImpl < Pattern * > & params , <nl> - Type & type ) { <nl> + Type & type , TypeLoc * & loc ) { <nl> / / Now build up the function type . We require all function <nl> / / signatures to be fully - typed : that is , all top - down paths to a <nl> / / leaf pattern must pass through a TypedPattern . <nl> NullablePtr < Pattern > Parser : : parsePattern ( ) { <nl> / / Now parse an optional type annotation . <nl> if ( consumeIf ( tok : : colon ) ) { <nl> Type type ; <nl> - if ( parseTypeAnnotation ( type ) ) <nl> + TypeLoc * loc ; <nl> + if ( parseTypeAnnotation ( type , loc ) ) <nl> return nullptr ; <nl> <nl> pattern = new ( Context ) TypedPattern ( pattern . get ( ) , type ) ; <nl> mmm a / lib / Parse / ParseType . cpp <nl> ppp b / lib / Parse / ParseType . cpp <nl> <nl> <nl> # include " Parser . h " <nl> # include " swift / AST / Attr . h " <nl> + # include " swift / AST / TypeLoc . h " <nl> # include " llvm / ADT / APInt . h " <nl> # include " llvm / ADT / SmallString . h " <nl> # include " llvm / ADT / Twine . h " <nl> using namespace swift ; <nl> <nl> - bool Parser : : parseTypeAnnotation ( Type & result ) { <nl> - return parseTypeAnnotation ( result , diag : : expected_type ) ; <nl> + bool Parser : : parseTypeAnnotation ( Type & result , TypeLoc * & resultLoc ) { <nl> + return parseTypeAnnotation ( result , resultLoc , diag : : expected_type ) ; <nl> } <nl> <nl> / / / parseTypeAnnotation <nl> / / / type - annotation : <nl> / / / attribute - list type <nl> - bool Parser : : parseTypeAnnotation ( Type & result , Diag < > message ) { <nl> + bool Parser : : parseTypeAnnotation ( Type & result , TypeLoc * & resultLoc , <nl> + Diag < > message ) { <nl> / / Parse attributes . <nl> DeclAttributes attrs ; <nl> parseAttributeList ( attrs ) ; <nl> <nl> / / Parse the type . <nl> - if ( parseType ( result , message ) ) <nl> + if ( parseType ( result , resultLoc , message ) ) <nl> return true ; <nl> <nl> / / Apply those attributes that do apply . <nl> bool Parser : : parseTypeAnnotation ( Type & result , Diag < > message ) { <nl> if ( ! attrs . isByrefHeap ( ) ) quals | = LValueType : : Qual : : NonHeap ; <nl> result = LValueType : : get ( result , quals , Context ) ; <nl> attrs . Byref = false ; / / so that the empty ( ) check below works <nl> + resultLoc = TypeLoc : : get ( Context , <nl> + SourceRange ( attrs . LSquareLoc , <nl> + resultLoc - > getSourceRange ( ) . End ) ) ; <nl> } <nl> <nl> / / Handle the auto_closure attribute . <nl> bool Parser : : parseTypeAnnotation ( Type & result , Diag < > message ) { <nl> return false ; <nl> } <nl> <nl> - bool Parser : : parseType ( Type & Result ) { <nl> - return parseType ( Result , diag : : expected_type ) ; <nl> + bool Parser : : parseType ( Type & Result , TypeLoc * & ResultLoc ) { <nl> + return parseType ( Result , ResultLoc , diag : : expected_type ) ; <nl> } <nl> <nl> / / / parseType <nl> bool Parser : : parseType ( Type & Result ) { <nl> / / / type - tuple <nl> / / / type - composition <nl> / / / <nl> - bool Parser : : parseType ( Type & Result , Diag < > MessageID ) { <nl> + bool Parser : : parseType ( Type & Result , TypeLoc * & ResultLoc , Diag < > MessageID ) { <nl> / / Parse type - simple first . <nl> - SourceLoc TypeLoc = Tok . getLoc ( ) ; <nl> + SourceLoc StartLoc = Tok . getLoc ( ) ; <nl> bool isTupleType = false ; <nl> switch ( Tok . getKind ( ) ) { <nl> case tok : : identifier : <nl> - if ( parseTypeIdentifier ( Result ) ) <nl> + if ( parseTypeIdentifier ( Result , ResultLoc ) ) <nl> return true ; <nl> break ; <nl> case tok : : kw_protocol : <nl> - if ( parseTypeComposition ( Result ) ) <nl> + if ( parseTypeComposition ( Result , ResultLoc ) ) <nl> return true ; <nl> break ; <nl> case tok : : l_paren : <nl> case tok : : l_paren_space : { <nl> isTupleType = true ; <nl> SourceLoc LPLoc = consumeToken ( ) , RPLoc ; <nl> - if ( parseTypeTupleBody ( LPLoc , Result ) | | <nl> + if ( parseTypeTupleBody ( LPLoc , Result , ResultLoc ) | | <nl> parseMatchingToken ( tok : : r_paren , RPLoc , <nl> diag : : expected_rparen_tuple_type_list , <nl> LPLoc , diag : : opening_paren ) ) <nl> bool Parser : : parseType ( Type & Result , Diag < > MessageID ) { <nl> if ( consumeIf ( tok : : arrow ) ) { <nl> / / If the argument was not syntactically a tuple type , report an error . <nl> if ( ! isTupleType ) { <nl> - diagnose ( TypeLoc , diag : : expected_function_argument_must_be_paren ) ; <nl> + diagnose ( StartLoc , diag : : expected_function_argument_must_be_paren ) ; <nl> } <nl> <nl> Type SecondHalf ; <nl> - if ( parseType ( SecondHalf , diag : : expected_type_function_result ) ) <nl> + TypeLoc * SecondHalfLoc ; <nl> + if ( parseType ( SecondHalf , SecondHalfLoc , <nl> + diag : : expected_type_function_result ) ) <nl> return true ; <nl> Result = FunctionType : : get ( Result , SecondHalf , Context ) ; <nl> + SourceRange FnTypeRange { ResultLoc - > getSourceRange ( ) . Start , <nl> + SecondHalfLoc - > getSourceRange ( ) . End } ; <nl> + ResultLoc = TypeLoc : : get ( Context , FnTypeRange ) ; <nl> return false ; <nl> } <nl> <nl> / / If there is a square bracket without a space , we have an array . <nl> if ( Tok . is ( tok : : l_square ) ) <nl> - return parseTypeArray ( Result ) ; <nl> + return parseTypeArray ( Result , ResultLoc ) ; <nl> <nl> return false ; <nl> } <nl> bool Parser : : parseType ( Type & Result , Diag < > MessageID ) { <nl> / / / type - identifier : <nl> / / / identifier ( ' . ' identifier ) * <nl> / / / <nl> - bool Parser : : parseTypeIdentifier ( Type & Result ) { <nl> + bool Parser : : parseTypeIdentifier ( Type & Result , TypeLoc * & ResultLoc ) { <nl> + SourceLoc StartLoc = Tok . getLoc ( ) ; <nl> if ( Tok . isNot ( tok : : identifier ) ) { <nl> diagnose ( Tok . getLoc ( ) , diag : : expected_identifier_for_type ) ; <nl> return true ; <nl> bool Parser : : parseTypeIdentifier ( Type & Result ) { <nl> SmallVector < IdentifierType : : Component , 4 > Components ; <nl> Components . push_back ( IdentifierType : : Component ( Tok . getLoc ( ) , <nl> Context . getIdentifier ( Tok . getText ( ) ) ) ) ; <nl> - consumeToken ( tok : : identifier ) ; <nl> + SourceLoc EndLoc = consumeToken ( tok : : identifier ) ; <nl> <nl> while ( consumeIf ( tok : : period ) ) { <nl> SourceLoc Loc = Tok . getLoc ( ) ; <nl> bool Parser : : parseTypeIdentifier ( Type & Result ) { <nl> if ( parseIdentifier ( Name , diag : : expected_identifier_in_dotted_type ) ) <nl> return true ; <nl> Components . push_back ( IdentifierType : : Component ( Loc , Name ) ) ; <nl> + EndLoc = Loc ; <nl> } <nl> <nl> / / Lookup element # 0 through our current scope chains in case it is some thing <nl> bool Parser : : parseTypeIdentifier ( Type & Result ) { <nl> auto Ty = IdentifierType : : getNew ( Context , Components ) ; <nl> UnresolvedIdentifierTypes . emplace_back ( Ty , CurDeclContext ) ; <nl> Result = Ty ; <nl> + ResultLoc = TypeLoc : : get ( Context , SourceRange ( StartLoc , EndLoc ) ) ; <nl> return false ; <nl> } <nl> <nl> bool Parser : : parseTypeIdentifier ( Type & Result ) { <nl> / / / type - composition - list : <nl> / / / type - identifier ( ' , ' type - identifier ) * <nl> / / / <nl> - bool Parser : : parseTypeComposition ( Type & Result ) { <nl> + bool Parser : : parseTypeComposition ( Type & Result , TypeLoc * & ResultLoc ) { <nl> SourceLoc ProtocolLoc = consumeToken ( tok : : kw_protocol ) ; <nl> <nl> / / Check for the starting ' < ' . <nl> bool Parser : : parseTypeComposition ( Type & Result ) { <nl> do { <nl> / / Parse the type - identifier . <nl> Type Protocol ; <nl> - if ( parseTypeIdentifier ( Protocol ) ) { <nl> + TypeLoc * ProtocolLoc ; <nl> + if ( parseTypeIdentifier ( Protocol , ProtocolLoc ) ) { <nl> Invalid = true ; <nl> break ; <nl> } <nl> bool Parser : : parseTypeComposition ( Type & Result ) { <nl> } while ( true ) ; <nl> <nl> / / Check for the terminating ' > ' . <nl> + SourceLoc EndLoc = Tok . getLoc ( ) ; <nl> if ( ! startsWithGreater ( Tok ) ) { <nl> if ( ! Invalid ) { <nl> diagnose ( Tok . getLoc ( ) , diag : : expected_rangle_protocol ) ; <nl> bool Parser : : parseTypeComposition ( Type & Result ) { <nl> / / Skip until we hit the ' > ' . <nl> skipUntil ( tok : : oper ) ; <nl> if ( startsWithGreater ( Tok ) ) <nl> - consumeStartingGreater ( ) ; <nl> + EndLoc = consumeStartingGreater ( ) ; <nl> <nl> } else { <nl> - consumeStartingGreater ( ) ; <nl> + EndLoc = consumeStartingGreater ( ) ; <nl> } <nl> <nl> Result = ProtocolCompositionType : : get ( Context , ProtocolLoc , <nl> Context . AllocateCopy ( Protocols ) ) ; <nl> + ResultLoc = TypeLoc : : get ( Context , SourceRange ( ProtocolLoc , EndLoc ) ) ; <nl> return false ; <nl> } <nl> <nl> bool Parser : : parseTypeComposition ( Type & Result ) { <nl> / / / type - tuple - element : <nl> / / / identifier value - specifier <nl> / / / type - annotation <nl> - bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> + bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result , TypeLoc * & ResultLoc ) { <nl> SmallVector < TupleTypeElt , 8 > Elements ; <nl> bool HadExpr = false ; <nl> <nl> bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> <nl> NullablePtr < Expr > init ; <nl> Type type ; <nl> - if ( ( HadError = parseValueSpecifier ( type , init ) ) ) <nl> + TypeLoc * loc ; <nl> + if ( ( HadError = parseValueSpecifier ( type , loc , init ) ) ) <nl> break ; <nl> <nl> HadExpr | = init . isNonNull ( ) ; <nl> bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> <nl> / / Otherwise , this has to be a type . <nl> Type type ; <nl> - if ( ( HadError = parseTypeAnnotation ( type ) ) ) <nl> + TypeLoc * typeLoc ; <nl> + if ( ( HadError = parseTypeAnnotation ( type , typeLoc ) ) ) <nl> break ; <nl> <nl> Elements . push_back ( TupleTypeElt ( type , Identifier ( ) , nullptr ) ) ; <nl> bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> if ( Elements . size ( ) = = 1 & & ! Elements . back ( ) . hasName ( ) & & ! HadEllipsis ) { <nl> assert ( ! HadExpr & & " Only TupleTypes have default values " ) ; <nl> Result = ParenType : : get ( Context , Elements . back ( ) . getType ( ) ) ; <nl> + ResultLoc = TypeLoc : : get ( Context , SourceRange ( LPLoc , Tok . getLoc ( ) ) ) ; <nl> return false ; <nl> } <nl> <nl> bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> if ( HadExpr ) <nl> TypesWithDefaultValues . emplace_back ( TT , CurDeclContext ) ; <nl> Result = TT ; <nl> + ResultLoc = TypeLoc : : get ( Context , SourceRange ( LPLoc , Tok . getLoc ( ) ) ) ; <nl> return false ; <nl> } <nl> <nl> bool Parser : : parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) { <nl> / / / type - array ' [ ' ' ] ' <nl> / / / type - array ' [ ' expr ' ] ' <nl> / / / <nl> - bool Parser : : parseTypeArray ( Type & result ) { <nl> + bool Parser : : parseTypeArray ( Type & result , TypeLoc * & resultLoc ) { <nl> SourceLoc lsquareLoc = Tok . getLoc ( ) ; <nl> consumeToken ( tok : : l_square ) ; <nl> <nl> / / Handle the [ ] production , meaning an array slice . <nl> - if ( consumeIf ( tok : : r_square ) ) { <nl> + if ( Tok . is ( tok : : r_square ) ) { <nl> + SourceLoc rsquareLoc = consumeToken ( tok : : r_square ) ; <nl> + <nl> / / If we ' re starting another square - bracket clause , recurse . <nl> - if ( Tok . is ( tok : : l_square ) & & parseTypeArray ( result ) ) { <nl> + if ( Tok . is ( tok : : l_square ) & & parseTypeArray ( result , resultLoc ) ) { <nl> return true ; <nl> <nl> / / Propagate an error type out . <nl> bool Parser : : parseTypeArray ( Type & result ) { <nl> <nl> / / Just build a normal array slice type . <nl> result = ArraySliceType : : get ( result , lsquareLoc , Context ) ; <nl> + SourceRange arrayRange { resultLoc - > getSourceRange ( ) . Start , rsquareLoc } ; <nl> + resultLoc = TypeLoc : : get ( Context , arrayRange ) ; <nl> return false ; <nl> } <nl> <nl> bool Parser : : parseTypeArray ( Type & result ) { <nl> return true ; <nl> <nl> / / If we ' re starting another square - bracket clause , recurse . <nl> - if ( Tok . is ( tok : : l_square ) & & parseTypeArray ( result ) ) { <nl> + if ( Tok . is ( tok : : l_square ) & & parseTypeArray ( result , resultLoc ) ) { <nl> return true ; <nl> <nl> / / If we had a semantic error on the size or if the base type is invalid , <nl> mmm a / lib / Parse / Parser . cpp <nl> ppp b / lib / Parse / Parser . cpp <nl> bool Parser : : parseMatchingToken ( tok K , SourceLoc & TokLoc , Diag < > ErrorDiag , <nl> / / / ' : ' type - annotation <nl> / / / ' : ' type - annotation ' = ' expr <nl> / / / ' = ' expr <nl> - bool Parser : : parseValueSpecifier ( Type & Ty , NullablePtr < Expr > & Init ) { <nl> + bool Parser : : parseValueSpecifier ( Type & Ty , TypeLoc * & Loc , <nl> + NullablePtr < Expr > & Init ) { <nl> / / Diagnose when we don ' t have a type or an expression . <nl> if ( Tok . isNot ( tok : : colon ) & & Tok . isNot ( tok : : equal ) ) { <nl> diagnose ( Tok , diag : : expected_type_or_init ) ; <nl> bool Parser : : parseValueSpecifier ( Type & Ty , NullablePtr < Expr > & Init ) { <nl> <nl> / / Parse the type if present . <nl> if ( consumeIf ( tok : : colon ) & & <nl> - parseTypeAnnotation ( Ty , diag : : expected_type ) ) <nl> + parseTypeAnnotation ( Ty , Loc , diag : : expected_type ) ) <nl> return true ; <nl> <nl> / / Parse the initializer , if present . <nl> mmm a / lib / Parse / Parser . h <nl> ppp b / lib / Parse / Parser . h <nl> namespace swift { <nl> class DiagnosticEngine ; <nl> class Lexer ; <nl> class ScopeInfo ; <nl> + class TypeLoc ; <nl> class TupleType ; <nl> <nl> struct OneOfElementInfo ; <nl> class Parser { <nl> SourceLoc OtherLoc , Diag < > OtherNote , <nl> tok SkipToTok = tok : : unknown ) ; <nl> <nl> - bool parseValueSpecifier ( Type & Ty , NullablePtr < Expr > & Init ) ; <nl> + bool parseValueSpecifier ( Type & Ty , TypeLoc * & Loc , NullablePtr < Expr > & Init ) ; <nl> <nl> void parseBraceItemList ( SmallVectorImpl < ExprStmtOrDecl > & Decls , <nl> bool IsTopLevel ) ; <nl> class Parser { <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - = = = / / <nl> / / Type Parsing <nl> <nl> - bool parseType ( Type & Result ) ; <nl> - bool parseType ( Type & Result , Diag < > ID ) ; <nl> - bool parseTypeAnnotation ( Type & Result ) ; <nl> - bool parseTypeAnnotation ( Type & Result , Diag < > ID ) ; <nl> - bool parseTypeIdentifier ( Type & Result ) ; <nl> - bool parseTypeComposition ( Type & Result ) ; <nl> - bool parseTypeTupleBody ( SourceLoc LPLoc , Type & Result ) ; <nl> - <nl> - bool parseTypeArray ( Type & result ) ; <nl> + bool parseType ( Type & Result , TypeLoc * & ResultLoc ) ; <nl> + bool parseType ( Type & Result , TypeLoc * & ResultLoc , Diag < > ID ) ; <nl> + bool parseTypeAnnotation ( Type & Result , TypeLoc * & ResultLoc ) ; <nl> + bool parseTypeAnnotation ( Type & Result , TypeLoc * & ResultLoc , Diag < > ID ) ; <nl> + bool parseTypeIdentifier ( Type & Result , TypeLoc * & ResultLoc ) ; <nl> + bool parseTypeComposition ( Type & Result , TypeLoc * & ResultLoc ) ; <nl> + bool parseTypeTupleBody ( SourceLoc LPLoc , Type & Result , TypeLoc * & ResultLoc ) ; <nl> + bool parseTypeArray ( Type & result , TypeLoc * & ResultLoc ) ; <nl> <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - = = = / / <nl> / / Pattern Parsing <nl> <nl> - bool parseFunctionSignature ( SmallVectorImpl < Pattern * > & params , Type & type ) ; <nl> - bool buildFunctionSignature ( SmallVectorImpl < Pattern * > & params , Type & type ) ; <nl> + bool parseFunctionSignature ( SmallVectorImpl < Pattern * > & params , Type & type , <nl> + TypeLoc * & loc ) ; <nl> + bool buildFunctionSignature ( SmallVectorImpl < Pattern * > & params , Type & type , <nl> + TypeLoc * & loc ) ; <nl> NullablePtr < Pattern > parsePattern ( ) ; <nl> NullablePtr < Pattern > parsePatternTuple ( ) ; <nl> NullablePtr < Pattern > parsePatternAtom ( ) ; <nl> | Add a basic TypeLoc ; start threading it through the parser . | apple/swift | ecc56538b3fc323cb7a57a8f3a59ab96e51372f1 | 2012-06-05T00:11:59Z |
mmm a / src / rdb_protocol / serialize_datum . cc <nl> ppp b / src / rdb_protocol / serialize_datum . cc <nl> <nl> # include " rdb_protocol / serialize_datum . hpp " <nl> <nl> + # include < cmath > <nl> # include < string > <nl> # include < vector > <nl> <nl> | Add an include in serialize_datum . cc . | rethinkdb/rethinkdb | 755ea6f6203e02efd94a65d4a3a0c14625713b17 | 2014-07-01T03:28:33Z |
mmm a / Marlin / Conditionals . h <nl> ppp b / Marlin / Conditionals . h <nl> <nl> # define DEFAULT_LCD_CONTRAST 95 <nl> # elif defined ( VIKI2 ) <nl> # define DEFAULT_LCD_CONTRAST 40 <nl> - # elif defined ( ADAFRUIT_ST7565 ) <nl> - # define DEFAULT_LCD_CONTRAST 110 <nl> + # elif defined ( ADAFRUIT_ST7565 ) <nl> + # define DEFAULT_LCD_CONTRAST 110 <nl> # endif <nl> <nl> # define ENCODER_PULSES_PER_STEP 4 <nl> mmm a / Marlin / Configuration . h <nl> ppp b / Marlin / Configuration . h <nl> const bool Z_PROBE_ENDSTOP_INVERTING = false ; / / set to true to invert the logic <nl> / / # define VIKI2 <nl> / / # define miniVIKI <nl> <nl> - # define ADAFRUIT_ST7565 <nl> + / / The Adafruit ST7565 is a custom display . More info will be posted as the project files are finialized . <nl> + / / # define ADAFRUIT_ST7565 <nl> <nl> / / The RepRapDiscount Smart Controller ( white PCB ) <nl> / / http : / / reprap . org / wiki / RepRapDiscount_Smart_Controller <nl> mmm a / Marlin / dogm_lcd_implementation . h <nl> ppp b / Marlin / dogm_lcd_implementation . h <nl> <nl> / / Mini Viki and Viki 2 . 0 LCD , ST7565 controller as well <nl> U8GLIB_NHD_C12864 u8g ( DOGLCD_CS , DOGLCD_A0 ) ; <nl> # elif defined ( ADAFRUIT_ST7565 ) <nl> - U8GLIB_LM6059 u8g ( DOGLCD_CS , DOGLCD_A0 ) ; <nl> + U8GLIB_LM6059 u8g ( DOGLCD_CS , DOGLCD_A0 ) ; <nl> # else <nl> / / for regular DOGM128 display with HW - SPI <nl> U8GLIB_DOGM128 u8g ( DOGLCD_CS , DOGLCD_A0 ) ; / / HW - SPI Com : CS , A0 <nl> mmm a / Marlin / pins_RAMPS_13 . h <nl> ppp b / Marlin / pins_RAMPS_13 . h <nl> <nl> # define BTN_ENC - 1 <nl> # define LCD_SDSS 53 <nl> # define SDCARDDETECT 49 <nl> - # elif defined ( ADAFRUIT_ST7565 ) <nl> + # elif defined ( ADAFRUIT_ST7565 ) <nl> # define BTN_EN1 35 <nl> # define BTN_EN2 37 <nl> # define BTN_ENC 31 <nl> # define SDCARDDETECT 49 <nl> - # define SDCARDDETECTINVERTED <nl> - # define SDSLOW <nl> + # define SDCARDDETECTINVERTED <nl> + # define SDSLOW <nl> # define LCD_SDSS 53 <nl> # define KILL_PIN 41 <nl> # define BEEPER 23 <nl> <nl> # endif <nl> <nl> # endif <nl> - # else / / Old - style panel with shift register <nl> + # else / / Old - style panel with shift register <nl> / / Arduino pin witch triggers an piezzo beeper <nl> # define BEEPER 33 / / No Beeper added <nl> <nl> | Formatting Changes | MarlinFirmware/Marlin | 1999ac415cd1e0a1786a601bbfbd13e9aacf2648 | 2015-05-17T20:39:24Z |
mmm a / compute <nl> ppp b / compute <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit 520f9b35b1d41cbb34befe77b43cc0188e4e5dc1 <nl> + Subproject commit 509ebe4a9282eec8a92c65ce3bbc1925f1fdbe07 <nl> | switched to develop branch of boost compute submodule ( ) | microsoft/LightGBM | 277f7e473f17b5051ca5d88bb4be0ff9d16da120 | 2018-06-16T15:54:25Z |
mmm a / src / clustering / immediate_consistency / branch / listener . cc <nl> ppp b / src / clustering / immediate_consistency / branch / listener . cc <nl> listener_t < protocol_t > : : listener_t ( mailbox_manager_t * mm , <nl> version_range_t ( version_t ( branch_id , backfill_end_timestamp ) ) ) ; <nl> # endif <nl> <nl> - rassert ( backfill_end_point = = expected_backfill_endpoint ) ; <nl> + / / debugf_print ( " expected_backfill_endpoint " , expected_backfill_endpoint ) ; <nl> + / / debugf_print ( " backfill_end_point " , backfill_end_point ) ; <nl> <nl> - rassert ( backfill_end_timestamp > = streaming_begin_point ) ; <nl> + guarantee ( backfill_end_point = = expected_backfill_endpoint ) ; <nl> + <nl> + guarantee ( backfill_end_timestamp > = streaming_begin_point ) ; <nl> <nl> current_timestamp = backfill_end_timestamp ; <nl> backfill_done_cond . pulse ( backfill_end_timestamp ) ; <nl> mmm a / src / clustering / immediate_consistency / branch / metadata . hpp <nl> ppp b / src / clustering / immediate_consistency / branch / metadata . hpp <nl> <nl> <nl> # include < map > <nl> <nl> - # include " errors . hpp " <nl> - # include < boost / uuid / uuid . hpp > <nl> - <nl> # include " clustering / registration_metadata . hpp " <nl> # include " clustering / resource . hpp " <nl> # include " concurrency / fifo_checker . hpp " <nl> # include " concurrency / fifo_enforcer . hpp " <nl> # include " concurrency / fifo_enforcer . hpp " <nl> # include " concurrency / promise . hpp " <nl> + # include " containers / printf_buffer . hpp " <nl> + # include " containers / uuid . hpp " <nl> # include " protocol_api . hpp " <nl> # include " rpc / mailbox / typed . hpp " <nl> # include " rpc / semilattice / joins / map . hpp " <nl> class version_t { <nl> RDB_MAKE_ME_SERIALIZABLE_2 ( branch , timestamp ) ; <nl> } ; <nl> <nl> + inline void debug_print ( append_only_printf_buffer_t * buf , const version_t & v ) { <nl> + buf - > appendf ( " v { " ) ; <nl> + debug_print ( buf , v . branch ) ; <nl> + buf - > appendf ( " , " ) ; <nl> + debug_print ( buf , v . timestamp ) ; <nl> + buf - > appendf ( " } " ) ; <nl> + } <nl> + <nl> / * ` version_range_t ` is a pair of ` version_t ` s . It ' s used to keep track of <nl> backfills ; when a backfill is interrupted , the state of the individual keys is <nl> unknown and all we know is that they lie within some range of versions . * / <nl> class version_range_t { <nl> RDB_MAKE_ME_SERIALIZABLE_2 ( earliest , latest ) ; <nl> } ; <nl> <nl> + inline void debug_print ( append_only_printf_buffer_t * buf , const version_range_t & vr ) { <nl> + buf - > appendf ( " vr { earliest = " ) ; <nl> + debug_print ( buf , vr . earliest ) ; <nl> + buf - > appendf ( " , latest = " ) ; <nl> + debug_print ( buf , vr . latest ) ; <nl> + buf - > appendf ( " } " ) ; <nl> + } <nl> + <nl> / * Every ` listener_t ` constructs a ` listener_business_card_t ` and sends it to <nl> the ` broadcaster_t ` . * / <nl> <nl> mmm a / src / containers / uuid . cc <nl> ppp b / src / containers / uuid . cc <nl> <nl> # include " errors . hpp " <nl> # include < boost / uuid / uuid_generators . hpp > <nl> # include < boost / uuid / uuid_io . hpp > <nl> - # include < boost / lexical_cast . hpp > <nl> + <nl> + # include " containers / printf_buffer . hpp " <nl> <nl> # include " utils . hpp " <nl> <nl> boost : : uuids : : uuid nil_uuid ( ) { <nl> return boost : : uuids : : nil_generator ( ) ( ) ; <nl> } <nl> <nl> + void debug_print ( append_only_printf_buffer_t * buf , const boost : : uuids : : uuid & id ) { <nl> + buf - > appendf ( " % s " , boost : : uuids : : to_string ( id ) . c_str ( ) ) ; <nl> + } <nl> + <nl> + <nl> std : : string uuid_to_str ( boost : : uuids : : uuid id ) { <nl> / / Heh . <nl> return boost : : uuids : : to_string ( id ) ; <nl> mmm a / src / containers / uuid . hpp <nl> ppp b / src / containers / uuid . hpp <nl> <nl> # include " errors . hpp " <nl> # include < boost / uuid / uuid . hpp > <nl> <nl> + class append_only_printf_buffer_t ; <nl> <nl> <nl> / * This does the same thing as ` boost : : uuids : : random_generator ( ) ( ) ` , except that <nl> boost : : uuids : : uuid generate_uuid ( ) ; <nl> / / Returns boost : : uuids : : nil_generator ( ) ( ) . <nl> boost : : uuids : : uuid nil_uuid ( ) ; <nl> <nl> + void debug_print ( append_only_printf_buffer_t * buf , const boost : : uuids : : uuid & id ) ; <nl> + <nl> std : : string uuid_to_str ( boost : : uuids : : uuid id ) ; <nl> <nl> boost : : uuids : : uuid str_to_uuid ( const std : : string & ) ; <nl> mmm a / src / timestamps . hpp <nl> ppp b / src / timestamps . hpp <nl> <nl> # ifndef TIMESTAMPS_HPP_ <nl> # define TIMESTAMPS_HPP_ <nl> <nl> - # include < stdint . h > <nl> + # define __STDC_FORMAT_MACROS <nl> <nl> + # include < inttypes . h > <nl> + <nl> + # include " containers / printf_buffer . hpp " <nl> # include " repli_timestamp . hpp " <nl> # include " rpc / serialize_macros . hpp " <nl> <nl> + <nl> / * These are the timestamp types used by the clustering code . <nl> ` repli_timestamp_t ` , which is used internally within the btree code , is defined <nl> elsewhere . * / <nl> class state_timestamp_t { <nl> return ts ; <nl> } <nl> <nl> + friend void debug_print ( append_only_printf_buffer_t * buf , state_timestamp_t ts ) ; <nl> + <nl> private : <nl> friend class transition_timestamp_t ; <nl> uint64_t num ; <nl> RDB_MAKE_ME_SERIALIZABLE_1 ( num ) ; <nl> } ; <nl> <nl> + inline void debug_print ( append_only_printf_buffer_t * buf , state_timestamp_t ts ) { <nl> + buf - > appendf ( " st_t { " ) ; <nl> + debug_print ( buf , ts . num ) ; <nl> + buf - > appendf ( " } " ) ; <nl> + } <nl> + <nl> class transition_timestamp_t { <nl> public : <nl> bool operator = = ( transition_timestamp_t t ) const { return before = = t . before ; } <nl> class transition_timestamp_t { <nl> RDB_MAKE_ME_SERIALIZABLE_1 ( before ) ; <nl> } ; <nl> <nl> + <nl> # endif / * TIMESTAMPS_HPP_ * / <nl> | Add debug_print for version_range_t , uuid , state_timestamp_t . | rethinkdb/rethinkdb | 14c04fedd9a7b8008ef6ec31fbb08b2768882404 | 2012-06-11T23:38:44Z |
mmm a / lang / util . h <nl> ppp b / lang / util . h <nl> enum class UnaryType : int { <nl> abs , <nl> sin , <nl> cos , <nl> + inv , <nl> bit_not , <nl> undefined <nl> } ; <nl> inline std : : string unary_type_name ( UnaryType type ) { <nl> REGISTER_TYPE ( abs ) ; <nl> REGISTER_TYPE ( sin ) ; <nl> REGISTER_TYPE ( cos ) ; <nl> + REGISTER_TYPE ( inv ) ; <nl> REGISTER_TYPE ( bit_not ) ; <nl> REGISTER_TYPE ( undefined ) ; <nl> # undef REGISTER_TYPE <nl> | Closed | taichi-dev/taichi | f8fb75db22425f8702bfcba896c139b75890ddc2 | 2019-03-04T02:08:28Z |
mmm a / base / common / logger_useful . h <nl> ppp b / base / common / logger_useful . h <nl> using DB : : CurrentThread ; <nl> # define LOG_FATAL ( logger , message ) LOG_IMPL ( logger , LogsLevel : : error , Message : : PRIO_FATAL , message ) <nl> <nl> <nl> - # define LOG_IMPL_FORMATTED ( logger , priority , PRIORITY , message , . . . ) do \ <nl> + # define LOG_IMPL_FORMATTED ( logger , priority , PRIORITY , . . . ) do \ <nl> { \ <nl> const bool is_clients_log = ( CurrentThread : : getGroup ( ) ! = nullptr ) & & \ <nl> ( CurrentThread : : getGroup ( ) - > client_logs_level > = ( priority ) ) ; \ <nl> if ( ( logger ) - > is ( ( PRIORITY ) ) | | is_clients_log ) \ <nl> { \ <nl> - std : : string formatted_message = fmt : : format ( message , __VA_ARGS__ ) ; \ <nl> + std : : string formatted_message = fmt : : format ( __VA_ARGS__ ) ; \ <nl> if ( auto channel = ( logger ) - > getChannel ( ) ) \ <nl> { \ <nl> std : : string file_function ; \ <nl> using DB : : CurrentThread ; <nl> } while ( false ) <nl> <nl> <nl> - # define LOG_TRACE_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : trace , Message : : PRIO_TRACE , message , __VA_ARGS__ ) <nl> - # define LOG_DEBUG_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : debug , Message : : PRIO_DEBUG , message , __VA_ARGS__ ) <nl> - # define LOG_INFO_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : information , Message : : PRIO_INFORMATION , message , __VA_ARGS__ ) <nl> - # define LOG_WARNING_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : warning , Message : : PRIO_WARNING , message , __VA_ARGS__ ) <nl> - # define LOG_ERROR_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : error , Message : : PRIO_ERROR , message , __VA_ARGS__ ) <nl> - # define LOG_FATAL_FORMATTED ( logger , message , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : error , Message : : PRIO_FATAL , message , __VA_ARGS__ ) <nl> + # define LOG_TRACE_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : trace , Message : : PRIO_TRACE , __VA_ARGS__ ) <nl> + # define LOG_DEBUG_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : debug , Message : : PRIO_DEBUG , __VA_ARGS__ ) <nl> + # define LOG_INFO_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : information , Message : : PRIO_INFORMATION , __VA_ARGS__ ) <nl> + # define LOG_WARNING_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : warning , Message : : PRIO_WARNING , __VA_ARGS__ ) <nl> + # define LOG_ERROR_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : error , Message : : PRIO_ERROR , __VA_ARGS__ ) <nl> + # define LOG_FATAL_FORMATTED ( logger , . . . ) LOG_IMPL_FORMATTED ( logger , LogsLevel : : error , Message : : PRIO_FATAL , __VA_ARGS__ ) <nl> | Fix error | ClickHouse/ClickHouse | 3c582fc96fadf3dc80521f034f2296964f6e74df | 2020-05-23T16:42:16Z |
mmm a / tensorflow / core / grappler / optimizers / generic_layout_optimizer_transposer . cc <nl> ppp b / tensorflow / core / grappler / optimizers / generic_layout_optimizer_transposer . cc <nl> bool BinaryOpTransposer : : IsNDOperateWithMD ( const utils : : MutableNodeView & node , <nl> } <nl> <nl> bool BinaryOpTransposer : : IsFaninShapeSupported ( <nl> - const utils : : MutableNodeView & node ) { <nl> - return ( IsNDOperateWithMD ( node , 4 , 0 ) | | IsNDOperateWithMD ( node , 4 , 1 ) | | <nl> - IsNDOperateWithMD ( node , 4 , 4 ) | | IsNDOperateWithMD ( node , 0 , 4 ) | | <nl> - IsNDOperateWithMD ( node , 1 , 4 ) | | IsNDOperateWithMD ( node , 5 , 0 ) | | <nl> - IsNDOperateWithMD ( node , 5 , 1 ) | | IsNDOperateWithMD ( node , 5 , 5 ) | | <nl> - IsNDOperateWithMD ( node , 0 , 5 ) | | IsNDOperateWithMD ( node , 1 , 5 ) ) ; <nl> + const utils : : MutableNodeView & node , int rank ) { <nl> + return ( IsNDOperateWithMD ( node , rank , 0 ) | | <nl> + IsNDOperateWithMD ( node , rank , 1 ) | | <nl> + IsNDOperateWithMD ( node , rank , rank ) | | <nl> + IsNDOperateWithMD ( node , 0 , rank ) | | <nl> + IsNDOperateWithMD ( node , 1 , rank ) ) ; <nl> } <nl> <nl> std : : vector < int > BinaryOpTransposer : : GetNDDataFaninPorts ( <nl> Status BinaryOpTransposer : : TransposeNode ( TransposeContext * context , <nl> context - > AssignDeviceAndDataFormats ( context - > target_device , src_format_3d , <nl> dst_format_3d ) ; <nl> } <nl> - if ( ! ShouldProcess ( * context , * node ) | | ! IsFaninShapeSupported ( * node ) | | <nl> + if ( ! ShouldProcess ( * context , * node ) | | ! IsFaninShapeSupported ( * node , rank ) | | <nl> ! IsAfterDstToSrcTransform ( * context , * node ) ) { <nl> if ( allow_5d ) { <nl> context - > AssignDeviceAndDataFormats ( context - > target_device , src_format , <nl> mmm a / tensorflow / core / grappler / optimizers / generic_layout_optimizer_transposer . h <nl> ppp b / tensorflow / core / grappler / optimizers / generic_layout_optimizer_transposer . h <nl> class BinaryOpTransposer : public LayoutAgnosticOpTransposer { <nl> <nl> private : <nl> bool IsNDOperateWithMD ( const utils : : MutableNodeView & node , int n , int m ) ; <nl> - bool IsFaninShapeSupported ( const utils : : MutableNodeView & node ) ; <nl> + bool IsFaninShapeSupported ( const utils : : MutableNodeView & node , int rank ) ; <nl> std : : vector < int > GetNDDataFaninPorts ( const utils : : MutableNodeView & node , <nl> int rank ) ; <nl> Status AddNodeShapeConst ( utils : : Mutation * mutation , <nl> | Change IsFaninShapeSupported to support rank | tensorflow/tensorflow | 42aafd91e93e1ba3f66734ae038aacf2bfe861dd | 2020-09-29T18:55:31Z |
mmm a / torch / nn / modules / dropout . py <nl> ppp b / torch / nn / modules / dropout . py <nl> def __init__ ( self , p = 0 . 5 , inplace = False ) : <nl> self . inplace = inplace <nl> <nl> def extra_repr ( self ) : <nl> - inplace_str = ' , inplace ' if self . inplace else ' ' <nl> - return ' p = { } { } ' . format ( self . p , inplace_str ) <nl> + return ' p = { } , inplace = { } ' . format ( self . p , self . inplace ) <nl> <nl> <nl> @ weak_module <nl> | Make Dropout . __repr__ consistent with other modules ( ) | pytorch/pytorch | b2a39314e75b02153d0a1f3bbc98e5f082500d87 | 2019-06-24T22:27:06Z |
mmm a / snap / snapcraft . yaml <nl> ppp b / snap / snapcraft . yaml <nl> parts : <nl> build - type : ' Release ' <nl> artifacts : [ ' Telegram ' ] <nl> environment : <nl> - - CC : gcc - 7 <nl> - - CXX : g + + - 7 <nl> + - CC : gcc - 8 <nl> + - CXX : g + + - 8 <nl> organize : <nl> Telegram : bin / Telegram <nl> configflags : <nl> parts : <nl> - openal <nl> - qt <nl> - range - v3 <nl> - - gcc7 <nl> + - gcc8 <nl> <nl> telegram - launcher : <nl> plugin : dump <nl> parts : <nl> - https : / / github . com / telegramdesktop / hime . git <nl> - https : / / github . com / telegramdesktop / nimf . git <nl> environment : <nl> - - CC : gcc - 7 <nl> - - CXX : g + + - 7 <nl> - - QMAKE_CC : gcc - 7 <nl> - - QMAKE_CXX : g + + - 7 <nl> + - CC : gcc - 8 <nl> + - CXX : g + + - 8 <nl> + - QMAKE_CC : gcc - 8 <nl> + - QMAKE_CXX : g + + - 8 <nl> build - packages : <nl> - libasound2 - dev <nl> - libdbusmenu - glib - dev <nl> parts : <nl> - tests <nl> after : <nl> - libxkbcommon <nl> - - gcc7 <nl> + - gcc8 <nl> prime : [ - . / * ] <nl> <nl> breakpad : <nl> parts : <nl> prime : [ - . / * ] <nl> <nl> # Since this is supposed to be built against ubuntu 16 . 04 , we need to manually <nl> - # install gcc7 , and this is a workaround to achieve this . <nl> + # install gcc8 , and this is a workaround to achieve this . <nl> # This part can be safely removed when build . snapcraft . io will allow <nl> # to build against 18 . 04 . <nl> - gcc7 : <nl> + gcc8 : <nl> plugin : nil <nl> build - packages : <nl> - libmpc - dev <nl> parts : <nl> override - build : | <nl> set - x <nl> snapcraftctl build <nl> - sudo apt install gcc - 7 g + + - 7 - o Debug : : pkgProblemResolver = yes - - no - install - recommends - y <nl> - sudo apt - mark auto gcc - 7 g + + - 7 <nl> + sudo apt install gcc - 8 g + + - 8 - o Debug : : pkgProblemResolver = yes - - no - install - recommends - y <nl> + sudo apt - mark auto gcc - 8 g + + - 8 <nl> sudo rm - f / etc / apt / sources . list . d / ubuntu - toolchain - r . list <nl> prime : [ - . / * ] <nl> | Try building snap version with GCC 8 . | telegramdesktop/tdesktop | 1331e7089e486d73788069dafbfc54634cb3abc0 | 2019-06-04T11:23:14Z |
mmm a / tensorflow / compiler / xla / service / gpu / BUILD <nl> ppp b / tensorflow / compiler / xla / service / gpu / BUILD <nl> tf_cc_test ( <nl> ] , <nl> ) <nl> <nl> + cc_library ( <nl> + name = " nvptx_constants " , <nl> + hdrs = [ " nvptx_constants . h " ] , <nl> + ) <nl> + <nl> cc_library ( <nl> name = " gpu_transfer_manager " , <nl> srcs = [ " gpu_transfer_manager . cc " ] , <nl> hdrs = [ " gpu_transfer_manager . h " ] , <nl> deps = [ <nl> " : infeed_manager " , <nl> - " : nvptx_compiler " , <nl> + " : nvptx_constants " , <nl> " : outfeed_manager " , <nl> " / / tensorflow / compiler / xla : literal " , <nl> " / / tensorflow / compiler / xla : literal_util " , <nl> cc_library ( <nl> " : ir_emission_utils " , <nl> " : ir_emitter " , <nl> " : multi_output_fusion " , <nl> + " : nvptx_constants " , <nl> " : partition_assignment " , <nl> " : stream_assignment " , <nl> " : stream_executor_util " , <nl> mmm a / tensorflow / compiler / xla / service / gpu / gpu_transfer_manager . cc <nl> ppp b / tensorflow / compiler / xla / service / gpu / gpu_transfer_manager . cc <nl> limitations under the License . <nl> # include " llvm / IR / DataLayout . h " <nl> # include " tensorflow / compiler / xla / literal . h " <nl> # include " tensorflow / compiler / xla / literal_util . h " <nl> - # include " tensorflow / compiler / xla / service / gpu / nvptx_compiler . h " <nl> + # include " tensorflow / compiler / xla / service / gpu / nvptx_constants . h " <nl> # include " tensorflow / compiler / xla / service / gpu / outfeed_manager . h " <nl> # include " tensorflow / compiler / xla / shape_util . h " <nl> # include " tensorflow / compiler / xla / status_macros . h " <nl> Status GpuTransferManager : : TransferLiteralFromOutfeed ( <nl> static std : : unique_ptr < xla : : TransferManager > CreateNVPTXTransferManager ( ) { <nl> return absl : : make_unique < xla : : gpu : : GpuTransferManager > ( <nl> / * id = * / stream_executor : : cuda : : kCudaPlatformId , <nl> - / * pointer_size = * / llvm : : DataLayout ( xla : : gpu : : NVPTXCompiler : : kDataLayout ) <nl> + / * pointer_size = * / llvm : : DataLayout ( xla : : gpu : : kDataLayout ) <nl> . getPointerSize ( 0 / * default address space * / ) ) ; <nl> } <nl> <nl> mmm a / tensorflow / compiler / xla / service / gpu / nvptx_compiler . cc <nl> ppp b / tensorflow / compiler / xla / service / gpu / nvptx_compiler . cc <nl> limitations under the License . <nl> # include " tensorflow / compiler / xla / service / gpu / ir_emitter_unnested . h " <nl> # include " tensorflow / compiler / xla / service / gpu / llvm_gpu_backend / nvptx_backend_lib . h " <nl> # include " tensorflow / compiler / xla / service / gpu / multi_output_fusion . h " <nl> + # include " tensorflow / compiler / xla / service / gpu / nvptx_constants . h " <nl> # include " tensorflow / compiler / xla / service / gpu / partition_assignment . h " <nl> # include " tensorflow / compiler / xla / service / gpu / stream_assignment . h " <nl> # include " tensorflow / compiler / xla / service / gpu / stream_executor_util . h " <nl> limitations under the License . <nl> namespace xla { <nl> namespace gpu { <nl> <nl> - / * static * / const char * NVPTXCompiler : : kTargetTriple = " nvptx64 - nvidia - cuda " ; <nl> - / * static * / const char * NVPTXCompiler : : kDataLayout = <nl> - " e - i64 : 64 - i128 : 128 - v16 : 16 - v32 : 32 - n16 : 32 : 64 " ; <nl> - <nl> namespace { <nl> <nl> namespace tracing = tensorflow : : tracing ; <nl> mmm a / tensorflow / compiler / xla / service / gpu / nvptx_compiler . h <nl> ppp b / tensorflow / compiler / xla / service / gpu / nvptx_compiler . h <nl> class NVPTXCompiler : public LLVMCompiler { <nl> } ; <nl> } <nl> <nl> - / / The triple that represents our target . <nl> - static const char * kTargetTriple ; <nl> - <nl> - / / The data layout of the emitted module . Copied from computeDataLayout in <nl> - / / NVPTXTargetMachine . cpp . <nl> - static const char * kDataLayout ; <nl> - <nl> private : <nl> / / The size in bytes of a pointer . Used by ShapeSizeBytesFunction . <nl> const int64 pointer_size_ ; <nl> new file mode 100644 <nl> index 0000000000000 . . 67fa0020aa05c <nl> mmm / dev / null <nl> ppp b / tensorflow / compiler / xla / service / gpu / nvptx_constants . h <nl> <nl> + / * Copyright 2019 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + <nl> + # ifndef TENSORFLOW_COMPILER_XLA_SERVICE_GPU_NVPTX_CONSTANTS_H_ <nl> + # define TENSORFLOW_COMPILER_XLA_SERVICE_GPU_NVPTX_CONSTANTS_H_ <nl> + <nl> + namespace xla { <nl> + namespace gpu { <nl> + <nl> + / / The triple that represents our target . <nl> + constexpr char kTargetTriple [ ] = " nvptx64 - nvidia - cuda " ; <nl> + <nl> + / / The data layout of the emitted module . Copied from computeDataLayout in <nl> + / / NVPTXTargetMachine . cpp . <nl> + constexpr char kDataLayout [ ] = " e - i64 : 64 - i128 : 128 - v16 : 16 - v32 : 32 - n16 : 32 : 64 " ; <nl> + <nl> + } / / namespace gpu <nl> + } / / namespace xla <nl> + <nl> + # endif / / TENSORFLOW_COMPILER_XLA_SERVICE_GPU_NVPTX_CONSTANTS_H_ <nl> | [ XLA : GPU ] Extract constants from nvptx_compiler . | tensorflow/tensorflow | 69ef0cd72e48c2dc6a9245e6923c2f403fe63b71 | 2019-06-07T09:38:54Z |
mmm a / third_party / gpus / cuda / BUILD <nl> ppp b / third_party / gpus / cuda / BUILD <nl> genrule ( <nl> ] , <nl> cmd = if_cuda ( <nl> # Under cuda config , create all the symbolic links to the actual cuda files <nl> - " OUTPUTDIR = ` readlink - f $ ( @ D ) / . . / . . / . . ` ; cd third_party / gpus / cuda ; OUTPUTDIR = $ $ OUTPUTDIR . / cuda_config . sh - - check ; " , <nl> + " OUTPUTDIR = ` readlink - f $ ( @ D ) / . . / . . / . . ` ; cd ` dirname $ ( location : cuda_config . sh ) ` ; OUTPUTDIR = $ $ OUTPUTDIR . / cuda_config . sh - - check ; " , <nl> <nl> # Under non - cuda config , create all dummy files to make the build go through <nl> " ; " . join ( [ <nl> genrule ( <nl> ] , <nl> cmd = if_cuda ( <nl> # Under cuda config , create the symbolic link to the actual cuda . config <nl> - " ln - sf ` readlink - f third_party / gpus / cuda / cuda . config ` $ ( @ D ) / ; " , <nl> + " configfile = $ ( location : cuda . config ) ; ln - sf ` readlink - f $ $ { configfile # * / * / * / } ` $ ( @ D ) / ; " , <nl> <nl> # Under non - cuda config , create the dummy file <nl> " ; " . join ( [ <nl> | Fix genrules that didn ' t work when TensorFlow was imported as a submodule and compiled with - - config = cuda . | tensorflow/tensorflow | dbc7edb41702876251ac4f40f36f846e86692351 | 2016-04-07T23:06:47Z |
mmm a / stdlib / public / SwiftShims / CMakeLists . txt <nl> ppp b / stdlib / public / SwiftShims / CMakeLists . txt <nl> endif ( ) <nl> swift_install_in_component ( DIRECTORY " $ { clang_headers_location } / " <nl> DESTINATION " lib / swift / clang " <nl> COMPONENT clang - builtin - headers <nl> - FILES_MATCHING PATTERN " * . h " ) <nl> + PATTERN " * . h " ) <nl> <nl> if ( SWIFT_BUILD_STATIC_STDLIB ) <nl> swift_install_in_component ( DIRECTORY " $ { clang_headers_location } / " <nl> DESTINATION " lib / swift_static / clang " <nl> COMPONENT clang - builtin - headers <nl> - FILES_MATCHING PATTERN " * . h " ) <nl> + PATTERN " * . h " ) <nl> endif ( ) <nl> <nl> <nl> file ( TO_CMAKE_PATH " $ { LLVM_LIBRARY_OUTPUT_INTDIR } " <nl> swift_install_in_component ( DIRECTORY " $ { _SWIFT_SHIMS_PATH_TO_CLANG_BUILD } / lib / clang " <nl> DESTINATION " lib " <nl> COMPONENT clang - builtin - headers - in - clang - resource - dir <nl> - FILES_MATCHING PATTERN " * . h " ) <nl> + PATTERN " * . h " ) <nl> | Merge remote - tracking branch ' origin / main ' into next | apple/swift | 70cadcfcb2dfc87dadc77f319f1a7eb0e8d26a4f | 2020-10-09T21:08:36Z |
mmm a / examples / wrapper / server . cpp <nl> ppp b / examples / wrapper / server . cpp <nl> <nl> # include " wrapper / server . hpp " <nl> # include " wrapper / timer . hpp " <nl> + # include " api . h " <nl> # include < iostream > <nl> <nl> using namespace std ; <nl> using namespace swoole ; <nl> <nl> - class MyTimer : Timer <nl> - { <nl> - public : <nl> - MyTimer ( long ms , bool interval ) : <nl> - Timer ( ms , interval ) <nl> - { <nl> - <nl> - } <nl> - <nl> - MyTimer ( long ms ) : <nl> - Timer ( ms ) <nl> - { <nl> - <nl> - } <nl> - <nl> - protected : <nl> - virtual void callback ( void ) ; <nl> - int count = 0 ; <nl> - } ; <nl> - <nl> class MyServer : public Server <nl> { <nl> public : <nl> class MyServer : public Server <nl> <nl> virtual void onTask ( int task_id , int src_worker_id , const DataBuffer & data ) ; <nl> virtual void onFinish ( int task_id , const DataBuffer & data ) ; <nl> - <nl> - protected : <nl> - MyTimer * timer ; <nl> } ; <nl> <nl> void MyServer : : onReceive ( int fd , const DataBuffer & data ) <nl> void MyServer : : onReceive ( int fd , const DataBuffer & data ) <nl> <nl> void MyServer : : onPacket ( const DataBuffer & data , ClientInfo & clientInfo ) <nl> { <nl> - printf ( " recv , length = % d , str = % s , client = % s : % d \ n " , data . length , ( char * ) data . buffer , clientInfo . address , clientInfo . port ) ; <nl> + printf ( " recv , length = % ld , str = % s , client = % s : % d \ n " , data . length , ( char * ) data . buffer , clientInfo . address , clientInfo . port ) ; <nl> char resp_data [ SW_BUFFER_SIZE_STD ] ; <nl> int n = snprintf ( resp_data , SW_BUFFER_SIZE_STD , ( char * ) " Server : % * s \ n " , ( int ) data . length , ( char * ) data . buffer ) ; <nl> auto sent_data = DataBuffer ( resp_data , n ) ; <nl> void MyServer : : onStart ( ) <nl> printf ( " server start \ n " ) ; <nl> } <nl> <nl> - void MyServer : : onWorkerStart ( int worker_id ) <nl> - { <nl> - / / timer = new MyTimer ( 1000 ) ; <nl> - } <nl> - <nl> - void MyTimer : : callback ( ) <nl> + static void timer1 ( swTimer * timer , swTimer_node * tnode ) <nl> { <nl> + static int count = 0 ; <nl> printf ( " # % d \ thello world \ n " , count ) ; <nl> if ( count > 9 ) <nl> { <nl> - this - > clear ( ) ; <nl> + swoole_timer_clear ( tnode - > id ) ; <nl> } <nl> count + + ; <nl> } <nl> <nl> + void MyServer : : onWorkerStart ( int worker_id ) <nl> + { <nl> + / / swoole_timer_tick ( 1000 , timer1 , nullptr ) ; <nl> + } <nl> + <nl> int main ( int argc , char * * argv ) <nl> { <nl> if ( argc < 2 ) <nl> { <nl> - MyTimer t ( 1000 ) ; <nl> + swoole_timer_tick ( 1000 , timer1 , nullptr ) ; <nl> event_wait ( ) ; <nl> } <nl> else <nl> new file mode 100644 <nl> index 0000000000 . . dd170ad7f9 <nl> mmm / dev / null <nl> ppp b / include / api . h <nl> <nl> + / * <nl> + + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> + | Swoole | <nl> + + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> + | Copyright ( c ) 2012 - 2018 The Swoole Group | <nl> + + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> + | This source file is subject to version 2 . 0 of the Apache license , | <nl> + | that is bundled with this package in the file LICENSE , and is | <nl> + | available through the world - wide - web at the following url : | <nl> + | http : / / www . apache . org / licenses / LICENSE - 2 . 0 . html | <nl> + | If you did not receive a copy of the Apache2 . 0 license and are unable | <nl> + | to obtain it through the world - wide - web , please send a note to | <nl> + | license @ swoole . com so we can mail you a copy immediately . | <nl> + + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> + | Author : Tianfeng Han < mikan . tenny @ gmail . com > | <nl> + + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> + * / <nl> + <nl> + # ifndef _SW_API_H_ <nl> + # define _SW_API_H_ <nl> + <nl> + # ifdef __cplusplus <nl> + extern " C " <nl> + { <nl> + # endif <nl> + <nl> + # include " swoole . h " <nl> + # include " coroutine_c_api . h " <nl> + <nl> + long swoole_timer_after ( long ms , swTimerCallback callback , void * private_data ) ; <nl> + long swoole_timer_tick ( long ms , swTimerCallback callback , void * private_data ) ; <nl> + long swoole_timer_add ( long ms , uchar persistent , swTimerCallback callback , void * private_data ) ; <nl> + uchar swoole_timer_exists ( long timer_id ) ; <nl> + uchar swoole_timer_clear ( long timer_id ) ; <nl> + <nl> + static inline uchar swoole_event_add ( int fd , int events , int fdtype ) <nl> + { <nl> + return SwooleG . main_reactor - > add ( SwooleG . main_reactor , fd , fdtype | events ) = = SW_OK ; <nl> + } <nl> + <nl> + static inline uchar swoole_event_set ( int fd , int events , int fdtype ) <nl> + { <nl> + return SwooleG . main_reactor - > set ( SwooleG . main_reactor , fd , fdtype | events ) = = SW_OK ; <nl> + } <nl> + <nl> + static inline uchar swoole_event_del ( int fd ) <nl> + { <nl> + return SwooleG . main_reactor - > del ( SwooleG . main_reactor , fd ) ; <nl> + } <nl> + <nl> + # ifdef __cplusplus <nl> + } <nl> + # endif <nl> + <nl> + # endif / * _SW_API_H_ * / <nl> mmm a / include / swoole . h <nl> ppp b / include / swoole . h <nl> static sw_inline int swReactor_event_error ( int fdtype ) <nl> <nl> static sw_inline int swReactor_fdtype ( int fdtype ) <nl> { <nl> - return fdtype & ( ~ SW_EVENT_READ ) & ( ~ SW_EVENT_WRITE ) & ( ~ SW_EVENT_ERROR ) ; <nl> + return fdtype & ( ~ SW_EVENT_READ ) & ( ~ SW_EVENT_WRITE ) & ( ~ SW_EVENT_ERROR ) & ( ~ SW_EVENT_ONCE ) ; <nl> } <nl> <nl> static sw_inline int swReactor_events ( int fdtype ) <nl> deleted file mode 100755 <nl> index 5f0ba902ad . . 0000000000 <nl> mmm a / include / wrapper / timer . hpp <nl> ppp / dev / null <nl> <nl> - / * <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | Swoole | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | This source file is subject to version 2 . 0 of the Apache license , | <nl> - | that is bundled with this package in the file LICENSE , and is | <nl> - | available through the world - wide - web at the following url : | <nl> - | http : / / www . apache . org / licenses / LICENSE - 2 . 0 . html | <nl> - | If you did not receive a copy of the Apache2 . 0 license and are unable | <nl> - | to obtain it through the world - wide - web , please send a note to | <nl> - | license @ swoole . com so we can mail you a copy immediately . | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - | Author : Tianfeng Han < mikan . tenny @ gmail . com > | <nl> - + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> - * / <nl> - <nl> - # pragma once <nl> - <nl> - # include " base . hpp " <nl> - # include < map > <nl> - <nl> - using namespace std ; <nl> - <nl> - namespace swoole <nl> - { <nl> - class Timer <nl> - { <nl> - public : <nl> - Timer ( long ms , bool interval ) ; <nl> - Timer ( long ms ) ; <nl> - ~ Timer ( ) <nl> - { <nl> - clear ( ) ; <nl> - } <nl> - <nl> - swTimer_node * getNode ( ) <nl> - { <nl> - return m_tnode ; <nl> - } <nl> - <nl> - void setNode ( swTimer_node * tnode ) <nl> - { <nl> - m_tnode = tnode ; <nl> - } <nl> - <nl> - void clear ( ) <nl> - { <nl> - if ( m_tnode ) <nl> - { <nl> - Timer : : del ( m_tnode ) ; <nl> - m_tnode = NULL ; <nl> - id = - 1 ; <nl> - interval = 0 ; <nl> - } <nl> - } <nl> - <nl> - static void _onAfter ( swTimer * timer , swTimer_node * tnode ) ; <nl> - static void _onTick ( swTimer * timer , swTimer_node * tnode ) ; <nl> - static void init ( int msec ) ; <nl> - <nl> - static bool clear ( long id ) ; <nl> - static bool exists ( long id ) ; <nl> - <nl> - protected : <nl> - virtual void callback ( void ) = 0 ; <nl> - static long add ( int ms , Timer * object , bool tick ) ; <nl> - static bool del ( swTimer_node * tnode ) ; <nl> - <nl> - bool interval ; <nl> - long id ; <nl> - swTimer_node * m_tnode ; <nl> - } ; <nl> - <nl> - static map < long , Timer * > timer_map ; <nl> - } <nl> mmm a / src / wrapper / timer . cc <nl> ppp b / src / wrapper / timer . cc <nl> <nl> - / * <nl> + / * * <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> | Swoole | <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - + <nl> * / <nl> <nl> - # include " wrapper / timer . hpp " <nl> + # include " api . h " <nl> + # include " wrapper / base . hpp " <nl> + # include " server . h " <nl> <nl> using namespace std ; <nl> + using namespace swoole ; <nl> <nl> - namespace swoole <nl> + long swoole_timer_add ( long ms , uchar persistent , swTimerCallback callback , void * private_data ) <nl> { <nl> - Timer : : Timer ( long ms ) <nl> - { <nl> - id = Timer : : add ( ms , this , true ) ; <nl> - interval = true ; <nl> - } <nl> - <nl> - Timer : : Timer ( long ms , bool _interval ) <nl> - { <nl> - id = Timer : : add ( ms , this , _interval ) ; <nl> - interval = _interval ; <nl> - } <nl> - <nl> - void Timer : : _onAfter ( swTimer * timer , swTimer_node * tnode ) <nl> - { <nl> - timer - > _current_id = tnode - > id ; <nl> - Timer * _this = ( Timer * ) tnode - > data ; <nl> - _this - > callback ( ) ; <nl> - timer - > _current_id = - 1 ; <nl> - Timer : : del ( tnode ) ; <nl> - } <nl> - <nl> - void Timer : : _onTick ( swTimer * timer , swTimer_node * tnode ) <nl> - { <nl> - timer - > _current_id = tnode - > id ; <nl> - Timer * _this = ( Timer * ) tnode - > data ; <nl> - _this - > callback ( ) ; <nl> - timer - > _current_id = - 1 ; <nl> - if ( tnode - > remove ) <nl> - { <nl> - Timer : : del ( tnode ) ; <nl> - } <nl> - } <nl> - <nl> - long Timer : : add ( int ms , Timer * object , bool tick ) <nl> - { <nl> - if ( SwooleG . serv & & swIsMaster ( ) ) <nl> - { <nl> - swWarn ( " cannot use timer in master process . " ) ; <nl> - return SW_ERR ; <nl> - } <nl> - if ( ms > 86400000 ) <nl> - { <nl> - swWarn ( " The given parameters is too big . " ) ; <nl> - return SW_ERR ; <nl> - } <nl> if ( ms < = 0 ) <nl> { <nl> swWarn ( " Timer must be greater than 0 " ) ; <nl> return SW_ERR ; <nl> } <nl> <nl> - if ( ! swIsTaskWorker ( ) ) <nl> - { <nl> - check_reactor ( ) ; <nl> - } <nl> - swTimerCallback timer_func ; <nl> - if ( tick ) <nl> - { <nl> - timer_func = Timer : : _onTick ; <nl> - } <nl> - else <nl> - { <nl> - timer_func = Timer : : _onAfter ; <nl> - } <nl> - <nl> - swTimer_node * tnode = swTimer_add ( & SwooleG . timer , ms , tick , ( void * ) object , timer_func ) ; <nl> - if ( tnode = = NULL ) <nl> + swTimer_node * tnode = swTimer_add ( & SwooleG . timer , ms , persistent , private_data , callback ) ; <nl> + if ( tnode = = nullptr ) <nl> { <nl> swWarn ( " addtimer failed . " ) ; <nl> return SW_ERR ; <nl> } <nl> else <nl> { <nl> - object - > setNode ( tnode ) ; <nl> - timer_map [ tnode - > id ] = object ; <nl> return tnode - > id ; <nl> } <nl> } <nl> <nl> - bool Timer : : del ( swTimer_node * tnode ) <nl> + long swoole_timer_after ( long ms , swTimerCallback callback , void * private_data ) <nl> { <nl> - if ( ! SwooleG . timer . set ) <nl> - { <nl> - swWarn ( " no timer " ) ; <nl> - return false ; <nl> - } <nl> - if ( timer_map . erase ( tnode - > id ) = = 0 ) <nl> - { <nl> - return false ; <nl> - } <nl> - return swTimer_del ( & SwooleG . timer , tnode ) = = SW_TRUE ; <nl> + return swoole_timer_add ( ms , SW_FALSE , callback , private_data ) ; <nl> } <nl> <nl> - bool Timer : : clear ( long id ) <nl> + long swoole_timer_tick ( long ms , swTimerCallback callback , void * private_data ) <nl> { <nl> - map < long , Timer * > : : iterator iter = timer_map . find ( id ) ; <nl> - if ( iter = = timer_map . end ( ) ) <nl> - { <nl> - return false ; <nl> - } <nl> - <nl> - swTimer_node * tnode = iter - > second - > getNode ( ) ; <nl> - if ( tnode - > id = = SwooleG . timer . _current_id ) <nl> - { <nl> - tnode - > remove = 1 ; <nl> - return true ; <nl> - } <nl> - else <nl> - { <nl> - return Timer : : del ( tnode ) ; <nl> - } <nl> + return swoole_timer_add ( ms , SW_TRUE , callback , private_data ) ; <nl> } <nl> <nl> - bool Timer : : exists ( long id ) <nl> + uchar swoole_timer_exists ( long timer_id ) <nl> { <nl> - if ( ! SwooleG . timer . set ) <nl> + if ( ! SwooleG . timer . initialized ) <nl> { <nl> swWarn ( " no timer " ) ; <nl> return false ; <nl> } <nl> - return timer_map . find ( id ) = = timer_map . end ( ) ; <nl> + auto tnode = swTimer_get ( & SwooleG . timer , timer_id ) ; <nl> + return ( tnode & & ! tnode - > remove ) ; <nl> } <nl> + <nl> + uchar swoole_timer_clear ( long timer_id ) <nl> + { <nl> + return swTimer_del ( & SwooleG . timer , swTimer_get ( & SwooleG . timer , timer_id ) ) ; <nl> } <nl> + <nl> | add api . h | swoole/swoole-src | 1fc22642031e93ffb8bdd66a5b5fe8270f1e5f47 | 2019-02-05T14:42:30Z |
mmm a / xbmc / GUIInfoManager . cpp <nl> ppp b / xbmc / GUIInfoManager . cpp <nl> infomap fanart_labels [ ] = { { " color1 " , FANART_COLOR1 } , <nl> infomap skin_labels [ ] = { { " currenttheme " , SKIN_THEME } , <nl> { " currentcolourtheme " , SKIN_COLOUR_THEME } } ; <nl> <nl> + infomap window_bools [ ] = { { " ismedia " , WINDOW_IS_MEDIA } , <nl> + { " isactive " , WINDOW_IS_ACTIVE } , <nl> + { " istopmost " , WINDOW_IS_TOPMOST } , <nl> + { " isvisible " , WINDOW_IS_VISIBLE } , <nl> + { " previous " , WINDOW_PREVIOUS } , <nl> + { " next " , WINDOW_NEXT } } ; <nl> + <nl> + infomap control_labels [ ] = { { " hasfocus " , CONTROL_HAS_FOCUS } , <nl> + { " isvisible " , CONTROL_IS_VISIBLE } , <nl> + { " isenabled " , CONTROL_IS_ENABLED } , <nl> + { " getlabel " , CONTROL_GET_LABEL } } ; <nl> + <nl> void CGUIInfoManager : : SplitInfoString ( const CStdString & infoString , vector < pair < CStdString , CStdString > > & info ) <nl> { <nl> / / our string is of the form : <nl> int CGUIInfoManager : : TranslateSingleString ( const CStdString & strCondition ) <nl> else if ( property = = " hastheme " ) <nl> return AddMultiInfo ( GUIInfo ( SKIN_HAS_THEME , ConditionalStringParameter ( info [ 1 ] . second ) ) ) ; <nl> } <nl> + else if ( category = = " window " ) <nl> + { <nl> + if ( property = = " property " ) <nl> + { / / TODO : this doesn ' t support foo . xml <nl> + int winID = 0 ; <nl> + if ( ! info [ 0 ] . second . IsEmpty ( ) ) <nl> + winID = CButtonTranslator : : TranslateWindow ( info [ 0 ] . second ) ; <nl> + if ( winID ! = WINDOW_INVALID ) <nl> + return AddMultiInfo ( GUIInfo ( WINDOW_PROPERTY , winID , ConditionalStringParameter ( info [ 1 ] . second ) ) ) ; <nl> + } <nl> + for ( size_t i = 0 ; i < sizeof ( window_bools ) / sizeof ( infomap ) ; i + + ) <nl> + { <nl> + if ( property = = window_bools [ i ] . str ) <nl> + { / / TODO : The parameter for these should really be on the first not the second property <nl> + if ( info [ 1 ] . second . Find ( " xml " ) > = 0 ) <nl> + return AddMultiInfo ( GUIInfo ( window_bools [ i ] . val , 0 , ConditionalStringParameter ( info [ 1 ] . second ) ) ) ; <nl> + int winID = CButtonTranslator : : TranslateWindow ( info [ 1 ] . second ) ; <nl> + if ( winID ! = WINDOW_INVALID ) <nl> + return AddMultiInfo ( GUIInfo ( window_bools [ i ] . val , winID , 0 ) ) ; <nl> + return 0 ; <nl> + } <nl> + } <nl> + } <nl> + else if ( category = = " control " ) <nl> + { <nl> + for ( size_t i = 0 ; i < sizeof ( control_labels ) / sizeof ( infomap ) ; i + + ) <nl> + { <nl> + if ( property = = control_labels [ i ] . str ) <nl> + { / / TODO : The parameter for these should really be on the first not the second property <nl> + int controlID = atoi ( info [ 1 ] . second . c_str ( ) ) ; <nl> + if ( controlID ) <nl> + return AddMultiInfo ( GUIInfo ( control_labels [ i ] . val , controlID , 0 ) ) ; <nl> + return 0 ; <nl> + } <nl> + } <nl> + } <nl> + else if ( category = = " controlgroup " & & property = = " hasfocus " ) <nl> + { <nl> + int groupID = atoi ( info [ 0 ] . second . c_str ( ) ) ; <nl> + if ( groupID ) <nl> + return AddMultiInfo ( GUIInfo ( CONTROL_GROUP_HAS_FOCUS , groupID , atoi ( info [ 1 ] . second . c_str ( ) ) ) ) ; <nl> + } <nl> } <nl> else if ( info . size ( ) = = 3 ) <nl> { <nl> int CGUIInfoManager : : TranslateSingleString ( const CStdString & strCondition ) <nl> else if ( strTest . Equals ( " playlist . isrepeat " ) ) ret = PLAYLIST_ISREPEAT ; <nl> else if ( strTest . Equals ( " playlist . isrepeatone " ) ) ret = PLAYLIST_ISREPEATONE ; <nl> } <nl> - else if ( strCategory . Left ( 6 ) . Equals ( " window " ) ) <nl> - { <nl> - CStdString info = strTest . Mid ( strCategory . GetLength ( ) + 1 ) ; <nl> - / / special case for window . xml parameter , fails above <nl> - if ( info . Left ( 5 ) . Equals ( " xml ) . " ) ) <nl> - info = info . Mid ( 5 , info . GetLength ( ) + 1 ) ; <nl> - if ( info . Left ( 9 ) . Equals ( " property ( " ) ) <nl> - { <nl> - int winID = 0 ; <nl> - if ( strTest . Left ( 7 ) . Equals ( " window ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 7 , strTest . Find ( " ) " , 7 ) - 7 ) . ToLower ( ) ) ; <nl> - winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - } <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - { <nl> - int compareString = ConditionalStringParameter ( info . Mid ( 9 , info . GetLength ( ) - 10 ) ) ; <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_PROPERTY , winID , compareString ) ) ; <nl> - } <nl> - } <nl> - else if ( info . Left ( 9 ) . Equals ( " isactive ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 16 , strTest . GetLength ( ) - 17 ) . ToLower ( ) ) ; <nl> - if ( window . Find ( " xml " ) > = 0 ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_ACTIVE , 0 , ConditionalStringParameter ( window ) ) ) ; <nl> - int winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_ACTIVE , winID , 0 ) ) ; <nl> - } <nl> - else if ( info . Left ( 7 ) . Equals ( " ismedia " ) ) return WINDOW_IS_MEDIA ; <nl> - else if ( info . Left ( 10 ) . Equals ( " istopmost ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 17 , strTest . GetLength ( ) - 18 ) . ToLower ( ) ) ; <nl> - if ( window . Find ( " xml " ) > = 0 ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_TOPMOST , 0 , ConditionalStringParameter ( window ) ) ) ; <nl> - int winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_TOPMOST , winID , 0 ) ) ; <nl> - } <nl> - else if ( info . Left ( 10 ) . Equals ( " isvisible ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 17 , strTest . GetLength ( ) - 18 ) . ToLower ( ) ) ; <nl> - if ( window . Find ( " xml " ) > = 0 ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_VISIBLE , 0 , ConditionalStringParameter ( window ) ) ) ; <nl> - int winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_IS_VISIBLE , winID , 0 ) ) ; <nl> - } <nl> - else if ( info . Left ( 9 ) . Equals ( " previous ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 16 , strTest . GetLength ( ) - 17 ) . ToLower ( ) ) ; <nl> - if ( window . Find ( " xml " ) > = 0 ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_PREVIOUS , 0 , ConditionalStringParameter ( window ) ) ) ; <nl> - int winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_PREVIOUS , winID , 0 ) ) ; <nl> - } <nl> - else if ( info . Left ( 5 ) . Equals ( " next ( " ) ) <nl> - { <nl> - CStdString window ( strTest . Mid ( 12 , strTest . GetLength ( ) - 13 ) . ToLower ( ) ) ; <nl> - if ( window . Find ( " xml " ) > = 0 ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_NEXT , 0 , ConditionalStringParameter ( window ) ) ) ; <nl> - int winID = CButtonTranslator : : TranslateWindow ( window ) ; <nl> - if ( winID ! = WINDOW_INVALID ) <nl> - return AddMultiInfo ( GUIInfo ( WINDOW_NEXT , winID , 0 ) ) ; <nl> - } <nl> - } <nl> - else if ( strTest . Left ( 17 ) . Equals ( " control . hasfocus ( " ) ) <nl> - { <nl> - int controlID = atoi ( strTest . Mid ( 17 , strTest . GetLength ( ) - 18 ) . c_str ( ) ) ; <nl> - if ( controlID ) <nl> - return AddMultiInfo ( GUIInfo ( CONTROL_HAS_FOCUS , controlID , 0 ) ) ; <nl> - } <nl> - else if ( strTest . Left ( 18 ) . Equals ( " control . isvisible ( " ) ) <nl> - { <nl> - int controlID = atoi ( strTest . Mid ( 18 , strTest . GetLength ( ) - 19 ) . c_str ( ) ) ; <nl> - if ( controlID ) <nl> - return AddMultiInfo ( GUIInfo ( CONTROL_IS_VISIBLE , controlID , 0 ) ) ; <nl> - } <nl> - else if ( strTest . Left ( 18 ) . Equals ( " control . isenabled ( " ) ) <nl> - { <nl> - int controlID = atoi ( strTest . Mid ( 18 , strTest . GetLength ( ) - 19 ) . c_str ( ) ) ; <nl> - if ( controlID ) <nl> - return AddMultiInfo ( GUIInfo ( CONTROL_IS_ENABLED , controlID , 0 ) ) ; <nl> - } <nl> - else if ( strTest . Left ( 17 ) . Equals ( " control . getlabel ( " ) ) <nl> - { <nl> - int controlID = atoi ( strTest . Mid ( 17 , strTest . GetLength ( ) - 18 ) . c_str ( ) ) ; <nl> - if ( controlID ) <nl> - return AddMultiInfo ( GUIInfo ( CONTROL_GET_LABEL , controlID , 0 ) ) ; <nl> - } <nl> - else if ( strTest . Left ( 13 ) . Equals ( " controlgroup ( " ) ) <nl> - { <nl> - int groupID = atoi ( strTest . Mid ( 13 ) . c_str ( ) ) ; <nl> - int controlID = 0 ; <nl> - int controlPos = strTest . Find ( " . hasfocus ( " ) ; <nl> - if ( controlPos > 0 ) <nl> - controlID = atoi ( strTest . Mid ( controlPos + 10 ) . c_str ( ) ) ; <nl> - if ( groupID ) <nl> - { <nl> - return AddMultiInfo ( GUIInfo ( CONTROL_GROUP_HAS_FOCUS , groupID , controlID ) ) ; <nl> - } <nl> - } <nl> <nl> return ret ; <nl> } <nl> | move window . * and control * to the info parser | xbmc/xbmc | a3ba58c09e7b4292bc7ef079971de953281df34b | 2011-08-02T04:25:32Z |
mmm a / examples / reflection . py <nl> ppp b / examples / reflection . py <nl> <nl> initial = scalar ( ) <nl> loss = scalar ( ) <nl> <nl> + <nl> # ti . cfg . arch = ti . cuda <nl> <nl> @ ti . layout <nl> def laplacian ( t , i , j ) : <nl> - 4 * p [ t , i , j ] + p [ t , i , j - 1 ] + p [ t , i , j + 1 ] + p [ t , i + 1 , j ] + <nl> p [ t , i - 1 , j ] ) <nl> <nl> + <nl> @ ti . func <nl> def gradient ( t , i , j ) : <nl> - return inv_dx * ti . Vector ( [ p [ t , i , j + 1 ] - p [ t , i , j - 1 ] , p [ t , i , j + 1 ] + p [ t , i , j - 1 ] ] ) <nl> + return inv_dx * ti . Vector ( <nl> + [ p [ t , i , j + 1 ] - p [ t , i , j - 1 ] , p [ t , i , j + 1 ] + p [ t , i , j - 1 ] ] ) <nl> + <nl> <nl> @ ti . kernel <nl> def initialize ( ) : <nl> def initialize ( ) : <nl> <nl> @ ti . kernel <nl> def fdtd ( t : ti . i32 ) : <nl> - for i in range ( n_grid ) : # Parallelized over GPU threads <nl> + for i in range ( n_grid ) : # Parallelized over GPU threads <nl> for j in range ( n_grid ) : <nl> laplacian_p = laplacian ( t - 2 , i , j ) <nl> laplacian_q = laplacian ( t - 1 , i , j ) <nl> def fdtd ( t : ti . i32 ) : <nl> c * c * dt * dt + c * alpha * dt ) * laplacian_q - p [ <nl> t - 2 , i , j ] - c * alpha * dt * laplacian_p <nl> <nl> + <nl> @ ti . kernel <nl> - def render ( t : ti . i32 ) : <nl> - for i in range ( n_grid ) : # Parallelized over GPU threads <nl> + def render_reflect ( t : ti . i32 ) : <nl> + for i in range ( n_grid ) : # Parallelized over GPU threads <nl> for j in range ( n_grid ) : <nl> grad = gradient ( t , i , j ) <nl> normal = ti . Vector . normalized ( ti . Vector ( [ grad [ 0 ] , 1 . 0 , grad [ 1 ] ] ) ) <nl> rendered [ i , j ] = normal [ 1 ] <nl> <nl> <nl> + @ ti . func <nl> + def pattern ( i , j ) : <nl> + return ti . cast ( ti . floor ( i / ( n_grid / 8 ) ) + ti . floor ( j / ( n_grid / 8 ) ) , <nl> + ti . i32 ) % 2 <nl> + <nl> + <nl> @ ti . kernel <nl> + def render_refract ( t : ti . i32 ) : <nl> + for i in range ( n_grid ) : # Parallelized over GPU threads <nl> + for j in range ( n_grid ) : <nl> + grad = gradient ( t , i , j ) <nl> + # normal = ti . Vector . normalized ( ti . Vector ( [ grad [ 0 ] , 1 . 0 , grad [ 1 ] ] ) ) <nl> + <nl> + scale = 2 . 0 <nl> + sample_x = i + grad [ 0 ] * scale <nl> + sample_y = j + grad [ 1 ] * scale <nl> + sample_x = ti . min ( n_grid - 1 , ti . max ( 0 , sample_x ) ) <nl> + sample_y = ti . min ( n_grid - 1 , ti . max ( 0 , sample_y ) ) <nl> + sample_xi = ti . cast ( ti . floor ( sample_x ) , ti . i32 ) <nl> + sample_yi = ti . cast ( ti . floor ( sample_y ) , ti . i32 ) <nl> + <nl> + frac_x = sample_x - sample_xi <nl> + frac_y = sample_y - sample_yi <nl> + <nl> + rendered [ i , j ] = ( 1 . 0 - frac_x ) * ( <nl> + ( 1 - frac_y ) * target [ sample_xi , sample_yi ] + frac_y * target [ <nl> + sample_xi , sample_yi + 1 ] ) + frac_x * ( <nl> + ( 1 - frac_y ) * target [ sample_xi + 1 , sample_yi ] + frac_y * target [ <nl> + sample_xi + 1 , sample_yi + 1 ] <nl> + ) <nl> + <nl> + @ ti . kernel <nl> def compute_loss ( t : ti . i32 ) : <nl> for i in range ( n_grid ) : <nl> for j in range ( n_grid ) : <nl> ti . atomic_add ( loss , dx * dx * ti . sqr ( target [ i , j ] - p [ t , i , j ] ) ) <nl> <nl> + <nl> @ ti . kernel <nl> def apply_grad ( ) : <nl> # gradient descent <nl> for i , j in initial . grad : <nl> initial [ i , j ] - = learning_rate * initial . grad [ i , j ] <nl> <nl> + <nl> def forward ( output = None ) : <nl> steps_mul = 1 <nl> interval = vis_interval <nl> def forward ( output = None ) : <nl> fdtd ( t ) <nl> if ( t + 1 ) % interval = = 0 : <nl> img = np . zeros ( shape = ( n_grid , n_grid ) , dtype = np . float32 ) <nl> - render ( t ) <nl> + render_refract ( t ) <nl> for i in range ( n_grid ) : <nl> for j in range ( n_grid ) : img [ i , j ] = rendered [ i , j ] <nl> img = cv2 . resize ( img , fx = 4 , fy = 4 , dsize = None ) <nl> def forward ( output = None ) : <nl> loss [ None ] = 0 <nl> compute_loss ( steps - 1 ) <nl> <nl> + <nl> def main ( ) : <nl> # initialization <nl> - target_img = cv2 . imread ( ' iclr2020 . png ' ) [ : , : , 0 ] / 255 . 0 <nl> + target_img = cv2 . imread ( ' iclr2020 . png ' ) [ : , : , 0 ] / 255 . 0 <nl> target_img - = target_img . mean ( ) <nl> cv2 . imshow ( ' target ' , target_img * amplify + 0 . 5 ) <nl> # print ( target_img . min ( ) , target_img . max ( ) ) <nl> def main ( ) : <nl> for opt in range ( 200 ) : <nl> with ti . Tape ( loss ) : <nl> forward ( ) <nl> - <nl> + <nl> print ( ' Iter ' , opt , ' Loss = ' , loss [ None ] ) <nl> <nl> apply_grad ( ) <nl> - <nl> + <nl> forward ( ' optimized ' ) <nl> <nl> + <nl> if __name__ = = ' __main__ ' : <nl> main ( ) <nl> | refact image | taichi-dev/taichi | 258f8192ba937a31073593c92bf1bd8d150c308d | 2019-09-19T15:26:45Z |
mmm a / build / deps / github_hashes / facebook / fbthrift - rev . txt <nl> ppp b / build / deps / github_hashes / facebook / fbthrift - rev . txt <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit e86cad65f7ff818b930431c0085137bc9803b1f5 <nl> + Subproject commit c86b60f003d96c8b2701deab9c5121f86ec21873 <nl> mmm a / build / deps / github_hashes / facebook / folly - rev . txt <nl> ppp b / build / deps / github_hashes / facebook / folly - rev . txt <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit e56907dbd133811ccd36f6eabd6ce66d6dc35477 <nl> + Subproject commit 6da515d7e7eadc5f0048891387413eab3ec9dbdc <nl> mmm a / build / deps / github_hashes / facebook / wangle - rev . txt <nl> ppp b / build / deps / github_hashes / facebook / wangle - rev . txt <nl> @ @ - 1 + 1 @ @ <nl> - Subproject commit 316dfde9760783af9cd223bf30cbec95d599ed81 <nl> + Subproject commit 96c99afb06fee807ad46c90c6b17b2f79512f9f3 <nl> | Updating submodules | facebook/watchman | b0ee5a546fe44c5d7c7c7f4675f1742a43633780 | 2020-08-26T13:23:46Z |
mmm a / test / attr / attr_autoclosure . swift <nl> ppp b / test / attr / attr_autoclosure . swift <nl> <nl> / / RUN : % target - typecheck - verify - swift <nl> - / / RUN : not % target - swift - frontend - typecheck % s 2 > & 1 | % FileCheck % s <nl> - / / No errors at invalid locations ! <nl> - / / CHECK - NOT : < unknown > : 0 : <nl> <nl> / / Simple case . <nl> var fn : @ autoclosure ( ) - > Int = 4 / / expected - error { { @ autoclosure may only be used on parameters } } expected - error { { cannot convert value of type ' Int ' to specified type ' ( ) - > Int ' } } <nl> | [ test ] Remove ' < unknown > : 0 ' check by FileCheck from test / attr / attr_autoclosure . swift | apple/swift | 019daba95491b18d03fdbfe466e075a7951f66c8 | 2017-02-02T01:49:32Z |
mmm a / src / library_gl . js <nl> ppp b / src / library_gl . js <nl> var LibraryGL = { <nl> sizeBytes = 8 ; <nl> break ; <nl> default : <nl> + # if USE_WEBGL2 <nl> + if ( GL . currentContext . version > = 2 & & ( dataType = = 0x8368 / * GL_UNSIGNED_INT_2_10_10_10_REV * / | | dataType = = 0x8D9F / * GL_INT_2_10_10_10_REV * / ) ) { <nl> + sizeBytes = 4 ; <nl> + break ; <nl> + } else { <nl> + / / else fall through <nl> + } <nl> + # endif <nl> console . error ( ' Invalid vertex attribute data type GLenum ' + dataType + ' passed to GL function ! ' ) ; <nl> } <nl> if ( dimension = = 0x80E1 / * GL_BGRA * / ) { <nl> mmm a / tests / test_browser . py <nl> ppp b / tests / test_browser . py <nl> def test_webgl2_backwards_compatibility_emulation ( self ) : <nl> def test_webgl_with_closure ( self ) : <nl> self . btest ( path_from_root ( ' tests ' , ' webgl_with_closure . cpp ' ) , args = [ ' - O2 ' , ' - s ' , ' USE_WEBGL2 = 1 ' , ' - - closure ' , ' 1 ' , ' - lGL ' ] , expected = ' 0 ' ) <nl> <nl> + # Tests that - s GL_ASSERTIONS = 1 and glVertexAttribPointer with packed types works <nl> + @ requires_graphics_hardware <nl> + def test_webgl2_packed_types ( self ) : <nl> + self . btest ( path_from_root ( ' tests ' , ' webgl2_draw_packed_triangle . c ' ) , args = [ ' - lGL ' , ' - s ' , ' USE_WEBGL2 = 1 ' , ' - s ' , ' GL_ASSERTIONS = 1 ' ] , expected = ' 0 ' ) <nl> + <nl> def test_sdl_touch ( self ) : <nl> for opts in [ [ ] , [ ' - O2 ' , ' - g1 ' , ' - - closure ' , ' 1 ' ] ] : <nl> print ( opts ) <nl> new file mode 100644 <nl> index 00000000000 . . 12c046d88d0 <nl> mmm / dev / null <nl> ppp b / tests / webgl2_draw_packed_triangle . c <nl> <nl> + / * <nl> + * Copyright 2018 The Emscripten Authors . All rights reserved . <nl> + * Emscripten is available under two separate licenses , the MIT license and the <nl> + * University of Illinois / NCSA Open Source License . Both these licenses can be <nl> + * found in the LICENSE file . <nl> + * / <nl> + <nl> + # include < stdio . h > <nl> + # include < stdlib . h > <nl> + # include < assert . h > <nl> + # include < emscripten / emscripten . h > <nl> + # include < emscripten / html5 . h > <nl> + # include < GLES3 / gl3 . h > <nl> + <nl> + GLuint compile_shader ( GLenum shaderType , const char * src ) <nl> + { <nl> + GLuint shader = glCreateShader ( shaderType ) ; <nl> + glShaderSource ( shader , 1 , & src , NULL ) ; <nl> + glCompileShader ( shader ) ; <nl> + <nl> + GLint isCompiled = 0 ; <nl> + glGetShaderiv ( shader , GL_COMPILE_STATUS , & isCompiled ) ; <nl> + if ( ! isCompiled ) <nl> + { <nl> + GLint maxLength = 0 ; <nl> + glGetShaderiv ( shader , GL_INFO_LOG_LENGTH , & maxLength ) ; <nl> + char * buf = ( char * ) malloc ( maxLength + 1 ) ; <nl> + glGetShaderInfoLog ( shader , maxLength , & maxLength , buf ) ; <nl> + printf ( " % s \ n " , buf ) ; <nl> + free ( buf ) ; <nl> + return 0 ; <nl> + } <nl> + <nl> + return shader ; <nl> + } <nl> + <nl> + GLuint create_program ( GLuint vertexShader , GLuint fragmentShader ) <nl> + { <nl> + GLuint program = glCreateProgram ( ) ; <nl> + glAttachShader ( program , vertexShader ) ; <nl> + glAttachShader ( program , fragmentShader ) ; <nl> + glBindAttribLocation ( program , 0 , " apos " ) ; <nl> + glBindAttribLocation ( program , 1 , " acolor " ) ; <nl> + glLinkProgram ( program ) ; <nl> + return program ; <nl> + } <nl> + <nl> + int main ( ) <nl> + { <nl> + EmscriptenWebGLContextAttributes attr ; <nl> + emscripten_webgl_init_context_attributes ( & attr ) ; <nl> + # ifdef EXPLICIT_SWAP <nl> + attr . explicitSwapControl = 1 ; <nl> + # endif <nl> + attr . majorVersion = 2 ; <nl> + EMSCRIPTEN_WEBGL_CONTEXT_HANDLE ctx = emscripten_webgl_create_context ( " # canvas " , & attr ) ; <nl> + assert ( ctx & & " Failed to create WebGL2 context " ) ; <nl> + emscripten_webgl_make_context_current ( ctx ) ; <nl> + <nl> + static const char vertex_shader [ ] = <nl> + " # version 100 \ n " <nl> + " attribute vec4 apos ; " <nl> + " attribute vec4 acolor ; " <nl> + " varying vec4 color ; " <nl> + " void main ( ) { " <nl> + " color = acolor ; " <nl> + " gl_Position = apos ; " <nl> + " } " ; <nl> + GLuint vs = compile_shader ( GL_VERTEX_SHADER , vertex_shader ) ; <nl> + <nl> + static const char fragment_shader [ ] = <nl> + " # version 100 \ n " <nl> + " precision lowp float ; " <nl> + " varying vec4 color ; " <nl> + " void main ( ) { " <nl> + " gl_FragColor = color ; " <nl> + " } " ; <nl> + GLuint fs = compile_shader ( GL_FRAGMENT_SHADER , fragment_shader ) ; <nl> + <nl> + GLuint program = create_program ( vs , fs ) ; <nl> + glUseProgram ( program ) ; <nl> + <nl> + static const uint32_t pos_and_color [ ] = { <nl> + / / 1 , 0 , y , x , a , b , g , r <nl> + 0x400b36cd , 0xc00003ff , <nl> + 0x400b3533 , 0xc00ffc00 , <nl> + 0x4004cc00 , 0xfff00000 , <nl> + } ; <nl> + <nl> + GLuint vbo ; <nl> + glGenBuffers ( 1 , & vbo ) ; <nl> + glBindBuffer ( GL_ARRAY_BUFFER , vbo ) ; <nl> + glBufferData ( GL_ARRAY_BUFFER , sizeof ( pos_and_color ) , pos_and_color , GL_STATIC_DRAW ) ; <nl> + glVertexAttribPointer ( 0 , 4 , GL_INT_2_10_10_10_REV , GL_TRUE , 8 , 0 ) ; <nl> + assert ( glGetError ( ) = = GL_NO_ERROR & & " glVertexAttribPointer with GL_INT_2_10_10_10_REV failed " ) ; <nl> + glVertexAttribPointer ( 1 , 4 , GL_UNSIGNED_INT_2_10_10_10_REV , GL_TRUE , 8 , ( void * ) 4 ) ; <nl> + assert ( glGetError ( ) = = GL_NO_ERROR & & " glVertexAttribPointer with GL_UNSIGNED_INT_2_10_10_10_REV failed " ) ; <nl> + <nl> + glEnableVertexAttribArray ( 0 ) ; <nl> + glEnableVertexAttribArray ( 1 ) ; <nl> + <nl> + glClearColor ( 0 . 3f , 0 . 3f , 0 . 3f , 1 ) ; <nl> + glClear ( GL_COLOR_BUFFER_BIT ) ; <nl> + glDrawArrays ( GL_TRIANGLES , 0 , 3 ) ; <nl> + <nl> + # ifdef EXPLICIT_SWAP <nl> + emscripten_webgl_commit_frame ( ) ; <nl> + # endif <nl> + <nl> + # ifdef REPORT_RESULT <nl> + REPORT_RESULT ( 0 ) ; <nl> + # endif <nl> + } <nl> | Add validation for 10_10_10 packed vertices . ( ) | emscripten-core/emscripten | f251db16c5cda1dc58ffe105cbf1186a8c62984b | 2018-11-08T20:52:09Z |
mmm a / bin / import_voxforge . py <nl> ppp b / bin / import_voxforge . py <nl> <nl> # ! / usr / bin / env python <nl> import sys <nl> - from glob import glob <nl> - from os import makedirs , path <nl> import urllib2 <nl> import tarfile <nl> import pandas <nl> + import re <nl> + import unicodedata <nl> + from glob import glob <nl> + from os import makedirs , path <nl> from BeautifulSoup import BeautifulSoup <nl> from tensorflow . python . platform import gfile <nl> from tensorflow . contrib . learn . python . learn . datasets import base <nl> def _download_and_preprocess_data ( data_dir ) : <nl> refs = [ l [ ' href ' ] for l in links if " . tgz " in l [ ' href ' ] ] <nl> def filename_of ( x ) : return path . split ( x ) [ 1 ] <nl> <nl> - for i , ref in enumerate ( refs ) : <nl> + for i , ref in enumerate ( refs [ : 5 ] ) : <nl> print ( ' Downloading { } / { } files ' . format ( i + 1 , len ( refs ) ) ) <nl> download_url = voxforge_url + ' / ' + ref <nl> base . maybe_download ( filename_of ( download_url ) , archive_dir , download_url ) <nl> def _generate_dataset ( data_dir , data_set ) : <nl> with open ( promts_file ) as f : <nl> for line in f : <nl> id = line . split ( ' ' ) [ 0 ] . split ( ' / ' ) [ - 1 ] <nl> - transcript = line . split ( ' ' ) [ 1 : ] <nl> - transcript = ' ' . join ( transcript ) . replace ( ' \ n ' , ' ' ) . lower ( ) <nl> + sentence = ' ' . join ( line . split ( ' ' ) [ 1 : ] ) <nl> + sentence = re . sub ( " [ ^ a - z ' ] " , " " , sentence . strip ( ) . lower ( ) ) <nl> + transcript = " " <nl> + for token in sentence . split ( " " ) : <nl> + word = token . strip ( ) <nl> + if word ! = " " and word ! = " " : <nl> + transcript + = word + " " <nl> + transcript = unicodedata . normalize ( " NFKD " , unicode ( transcript . strip ( ) ) ) \ <nl> + . encode ( " ascii " , " ignore " ) \ <nl> + . decode ( " ascii " , " ignore " ) <nl> wav_file = path . join ( promts_file [ : - 11 ] , " wav / " + id + " . wav " ) <nl> if gfile . Exists ( wav_file ) : <nl> wav_filesize = path . getsize ( wav_file ) <nl> | add importer for voxforge data | mozilla/DeepSpeech | 3f152139a713abe3a7abdba521e9450c19988159 | 2017-07-13T12:20:44Z |
mmm a / xbmc / utils / StringUtils . cpp <nl> ppp b / xbmc / utils / StringUtils . cpp <nl> bool StringUtils : : IsInteger ( const CStdString & str ) <nl> return i = = str . size ( ) & & n > 0 ; <nl> } <nl> <nl> + bool StringUtils : : Test ( ) <nl> + { <nl> + bool ret = true ; <nl> + <nl> + ret | = IsNaturalNumber ( " 10 " ) ; <nl> + ret | = IsNaturalNumber ( " 10 " ) ; <nl> + ret | = IsNaturalNumber ( " 0 " ) ; <nl> + ret | = ! IsNaturalNumber ( " 1 0 " ) ; <nl> + ret | = ! IsNaturalNumber ( " 1 . 0 " ) ; <nl> + ret | = ! IsNaturalNumber ( " 1 . 1 " ) ; <nl> + ret | = ! IsNaturalNumber ( " 0x1 " ) ; <nl> + ret | = ! IsNaturalNumber ( " blah " ) ; <nl> + ret | = ! IsNaturalNumber ( " 120 h " ) ; <nl> + ret | = ! IsNaturalNumber ( " " ) ; <nl> + ret | = ! IsNaturalNumber ( " " ) ; <nl> + <nl> + ret | = IsInteger ( " 10 " ) ; <nl> + ret | = IsInteger ( " - 10 " ) ; <nl> + ret | = IsInteger ( " 0 " ) ; <nl> + ret | = ! IsInteger ( " 1 0 " ) ; <nl> + ret | = ! IsInteger ( " 1 . 0 " ) ; <nl> + ret | = ! IsInteger ( " 1 . 1 " ) ; <nl> + ret | = ! IsInteger ( " 0x1 " ) ; <nl> + ret | = ! IsInteger ( " blah " ) ; <nl> + ret | = ! IsInteger ( " 120 h " ) ; <nl> + ret | = ! IsInteger ( " " ) ; <nl> + ret | = ! IsInteger ( " " ) ; <nl> + <nl> + return ret ; <nl> + } <nl> + <nl> void StringUtils : : RemoveCRLF ( CStdString & strLine ) <nl> { <nl> while ( strLine . size ( ) & & ( strLine . Right ( 1 ) = = " \ n " | | strLine . Right ( 1 ) = = " \ r " ) ) <nl> mmm a / xbmc / utils / StringUtils . h <nl> ppp b / xbmc / utils / StringUtils . h <nl> class StringUtils <nl> * / <nl> static CStdString SecondsToTimeString ( long seconds , TIME_FORMAT format = TIME_FORMAT_GUESS ) ; <nl> <nl> + / * ! \ brief check whether a string is a natural number . <nl> + Matches [ \ t ] * [ 0 - 9 ] + [ \ t ] * <nl> + \ param str the string to check <nl> + \ return true if the string is a natural number , false otherwise . <nl> + * / <nl> static bool IsNaturalNumber ( const CStdString & str ) ; <nl> + <nl> + / * ! \ brief check whether a string is an integer . <nl> + Matches [ \ t ] * [ \ - ] * [ 0 - 9 ] + [ \ t ] * <nl> + \ param str the string to check <nl> + \ return true if the string is an integer , false otherwise . <nl> + * / <nl> static bool IsInteger ( const CStdString & str ) ; <nl> static CStdString SizeToString ( int64_t size ) ; <nl> static const CStdString EmptyString ; <nl> class StringUtils <nl> static bool ValidateUUID ( const CStdString & uuid ) ; / / NB only validates syntax <nl> static double CompareFuzzy ( const CStdString & left , const CStdString & right ) ; <nl> static int FindBestMatch ( const CStdString & str , const CStdStringArray & strings , double & matchscore ) ; <nl> + <nl> + static bool Test ( ) ; <nl> private : <nl> static CStdString m_lastUUID ; <nl> } ; <nl> | add doxy for StringUtils : : IsNaturalNumber / Integer and a unit test | xbmc/xbmc | d9b04d175403925a47e4b80a926c83fc45cf4e28 | 2012-04-01T00:18:27Z |
mmm a / hphp / runtime / base / array - provenance . cpp <nl> ppp b / hphp / runtime / base / array - provenance . cpp <nl> TypedValue tagTV ( TypedValue tv ) { <nl> return tv ; <nl> } <nl> <nl> + const ArrayData * makeEmptyArray ( const ArrayData * base , const Tag & tag ) { <nl> + assertx ( base - > empty ( ) ) ; <nl> + assertx ( base - > isStatic ( ) ) ; <nl> + assertx ( arrayWantsTag ( base ) ) ; <nl> + auto ad = base - > copy ( ) ; <nl> + arrprov : : setTag ( ad , tag ) ; <nl> + ArrayData : : GetScalarArray ( & ad ) ; <nl> + return ad ; <nl> + } <nl> + <nl> folly : : Optional < Tag > tagFromProgramCounter ( ) { <nl> auto const tag = fromLeaf ( <nl> [ & ] ( const ActRec * fp , Offset offset ) - > folly : : Optional < Tag > { <nl> mmm a / hphp / runtime / base / array - provenance . h <nl> ppp b / hphp / runtime / base / array - provenance . h <nl> void clearTag ( const ArrayData * ad ) ; <nl> * / <nl> TypedValue tagTV ( TypedValue tv ) ; <nl> <nl> + / * <nl> + * Produce a static empty array ( with the same kind and contents as ` base ` ) <nl> + * with the given provenance tag . <nl> + * <nl> + * You should be calling this with staticEmptyVecArray ( ) or <nl> + * staticEmptyDictArray ( ) <nl> + * / <nl> + const ArrayData * makeEmptyArray ( const ArrayData * base , const Tag & t ) ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> } } <nl> mmm a / hphp / runtime / base / runtime - option . h <nl> ppp b / hphp / runtime / base / runtime - option . h <nl> struct RuntimeOption { <nl> * RepoAuthoritativeMode wasn ' t built with this flag or if the \ <nl> * flag LogArrayProvenance is unset * / \ <nl> F ( bool , ArrayProvenance , false ) \ <nl> + / * Enable experimental array provenance opportunistic creation of \ <nl> + * tagged empty arrays * / \ <nl> + F ( bool , ArrayProvenancePromoteEmptyArrays , true ) \ <nl> / * Enable logging the source of vecs / dicts whose vec / dict - ness is \ <nl> * observed , e . g . through serialization * / \ <nl> F ( bool , LogArrayProvenance , false ) \ <nl> mmm a / hphp / runtime / vm / bytecode . cpp <nl> ppp b / hphp / runtime / vm / bytecode . cpp <nl> OPTBLD_INLINE void iopArray ( const ArrayData * a ) { <nl> vmStack ( ) . pushStaticArray ( a ) ; <nl> } <nl> <nl> + namespace { <nl> + <nl> + const ArrayData * makeEmptyArray ( const ArrayData * base ) { <nl> + if ( ! RuntimeOption : : EvalArrayProvenancePromoteEmptyArrays | | <nl> + ! base - > empty ( ) ) { <nl> + return base ; <nl> + } <nl> + assertx ( base - > empty ( ) ) ; <nl> + assertx ( base - > isStatic ( ) ) ; <nl> + <nl> + return arrprov : : makeEmptyArray ( base , * arrprov : : tagFromProgramCounter ( ) ) ; <nl> + } <nl> + <nl> + } <nl> + <nl> + OPTBLD_INLINE void iopVec ( const ArrayData * a ) { <nl> + assertx ( a - > isVecArray ( ) ) ; <nl> + vmStack ( ) . pushStaticVec ( makeEmptyArray ( a ) ) ; <nl> + } <nl> + <nl> OPTBLD_INLINE void iopDict ( const ArrayData * a ) { <nl> assertx ( a - > isDict ( ) ) ; <nl> - vmStack ( ) . pushStaticDict ( a ) ; <nl> + vmStack ( ) . pushStaticDict ( makeEmptyArray ( a ) ) ; <nl> } <nl> <nl> OPTBLD_INLINE void iopKeyset ( const ArrayData * a ) { <nl> OPTBLD_INLINE void iopKeyset ( const ArrayData * a ) { <nl> vmStack ( ) . pushStaticKeyset ( a ) ; <nl> } <nl> <nl> - OPTBLD_INLINE void iopVec ( const ArrayData * a ) { <nl> - assertx ( a - > isVecArray ( ) ) ; <nl> - vmStack ( ) . pushStaticVec ( a ) ; <nl> - } <nl> - <nl> OPTBLD_INLINE void iopNewArray ( uint32_t capacity ) { <nl> if ( capacity = = 0 ) { <nl> vmStack ( ) . pushArrayNoRc ( staticEmptyArray ( ) ) ; <nl> mmm a / hphp / runtime / vm / jit / irgen - basic . cpp <nl> ppp b / hphp / runtime / vm / jit / irgen - basic . cpp <nl> void emitArray ( IRGS & env , const ArrayData * x ) { <nl> push ( env , cns ( env , x ) ) ; <nl> } <nl> <nl> + namespace { <nl> + <nl> + const ArrayData * makeEmptyArray ( IRGS & env , const ArrayData * base ) { <nl> + if ( ! RuntimeOption : : EvalArrayProvenancePromoteEmptyArrays | | <nl> + ! base - > empty ( ) | | <nl> + curFunc ( env ) - > isProvenanceSkipFrame ( ) ) { <nl> + return base ; <nl> + } <nl> + assertx ( base - > empty ( ) ) ; <nl> + assertx ( base - > isStatic ( ) ) ; <nl> + <nl> + auto const unit = curUnit ( env ) ; <nl> + auto const filename = unit - > filepath ( ) ; <nl> + auto const line = unit - > getLineNumber ( bcOff ( env ) ) ; <nl> + <nl> + return arrprov : : makeEmptyArray ( base , arrprov : : Tag { filename , line } ) ; <nl> + } <nl> + <nl> + } <nl> + <nl> void emitVec ( IRGS & env , const ArrayData * x ) { <nl> assertx ( x - > isVecArray ( ) ) ; <nl> - push ( env , cns ( env , x ) ) ; <nl> + push ( env , cns ( env , makeEmptyArray ( env , x ) ) ) ; <nl> } <nl> <nl> void emitDict ( IRGS & env , const ArrayData * x ) { <nl> assertx ( x - > isDict ( ) ) ; <nl> - push ( env , cns ( env , x ) ) ; <nl> + push ( env , cns ( env , makeEmptyArray ( env , x ) ) ) ; <nl> } <nl> <nl> void emitKeyset ( IRGS & env , const ArrayData * x ) { <nl> mmm a / hphp / test / slow / array_provenance / ap - logging . php . expectf <nl> ppp b / hphp / test / slow / array_provenance / ap - logging . php . expectf <nl> <nl> <nl> - Notice : Observing vec in json_encode from % s / ap - logging . php : 6 in % s / ap - logging . php on line 7 <nl> + Notice : Observing vec in json_encode from % s / ap - logging . php : 5 in % s / ap - logging . php on line 7 <nl> string ( 9 ) " [ " hello " ] " <nl> <nl> - Notice : Observing vec in serialize from % s / ap - logging . php : 6 in % s / ap - logging . php on line 8 <nl> + Notice : Observing vec in serialize from % s / ap - logging . php : 5 in % s / ap - logging . php on line 8 <nl> string ( 18 ) " v : 1 : { s : 5 : " hello " ; } " <nl> <nl> - Notice : Observing vec in print_r from % s / ap - logging . php : 6 in % s / ap - logging . php on line 9 <nl> + Notice : Observing vec in print_r from % s / ap - logging . php : 5 in % s / ap - logging . php on line 9 <nl> string ( 25 ) " Vec <nl> ( <nl> [ 0 ] = > hello <nl> ) <nl> " <nl> <nl> - Notice : Observing vec in var_export from % s / ap - logging . php : 6 in % s / ap - logging . php on line 10 <nl> + Notice : Observing vec in var_export from % s / ap - logging . php : 5 in % s / ap - logging . php on line 10 <nl> string ( 18 ) " vec [ <nl> ' hello ' , <nl> ] " <nl> <nl> - Notice : Observing vec in fb_serialize from % s / ap - logging . php : 6 in % s / ap - logging . php on line 11 <nl> + Notice : Observing vec in fb_serialize from % s / ap - logging . php : 5 in % s / ap - logging . php on line 11 <nl> string ( 11 ) " hello " <nl> <nl> - Notice : Observing vec in gettype from % s / ap - logging . php : 6 in % s / ap - logging . php on line 12 <nl> + Notice : Observing vec in gettype from % s / ap - logging . php : 5 in % s / ap - logging . php on line 12 <nl> string ( 3 ) " vec " <nl> <nl> - Notice : Observing vec in is_array from % s / ap - logging . php : 6 in % s / ap - logging . php on line 13 <nl> + Notice : Observing vec in is_array from % s / ap - logging . php : 5 in % s / ap - logging . php on line 13 <nl> bool ( false ) <nl> \ No newline at end of file <nl> mmm a / hphp / test / slow / array_provenance / array - provenance . php . expectf <nl> ppp b / hphp / test / slow / array_provenance / array - provenance . php . expectf <nl> <nl> vec ( 1 ) { <nl> string ( 5 ) " hello " <nl> } <nl> - string ( % d ) " % s / array - provenance . php : 5 " <nl> + string ( % d ) " % s / array - provenance . php : 40 " <nl> dict ( 1 ) { <nl> [ 0 ] = > <nl> string ( 5 ) " hello " <nl> } <nl> - string ( % d ) " % s / array - provenance . php : 5 " <nl> + string ( % d ) " % s / array - provenance . php : 41 " <nl> dict ( 1 ) { <nl> [ 42 ] = > <nl> string ( 5 ) " hello " <nl> } <nl> - string ( % d ) " % s / array - provenance . php : 12 " <nl> + string ( % d ) " % s / array - provenance . php : 42 " <nl> dict ( 1 ) { <nl> [ " blargh ! " ] = > <nl> string ( 7 ) " goodbye " <nl> } <nl> - string ( % d ) " % s / array - provenance . php : 19 " <nl> + string ( % d ) " % s / array - provenance . php : 43 " <nl> dict ( 1 ) { <nl> [ 0 ] = > <nl> int ( 15 ) <nl> vec ( 1 ) { <nl> int ( 15 ) <nl> } <nl> string ( % d ) " % s / array - provenance . php : 45 " <nl> + string ( % d ) " % s / array - provenance . php : 47 " <nl> string ( % d ) " % s / array - provenance . php : 48 " <nl> - string ( % d ) " % s / array - provenance . php : 49 " <nl> string ( % d ) " % s / array - provenance . php : 53 " <nl> string ( % d ) " % s / array - provenance . php : 54 " <nl> string ( % d ) " % s / array - provenance . php : 58 " <nl> mmm a / hphp / test / slow / array_provenance / cast . php . expectf <nl> ppp b / hphp / test / slow / array_provenance / cast . php . expectf <nl> <nl> string ( % d ) " % s / cast . php : 4 " <nl> string ( % d ) " % s / cast . php : 8 " <nl> string ( % d ) " % s / cast . php : 4 " <nl> - string ( % d ) " % s / cast . php : 27 " <nl> + string ( % d ) " % s / cast . php : 26 " <nl> string ( % d ) " % s / cast . php : 8 " <nl> - string ( % d ) " % s / cast . php : 37 " <nl> + string ( % d ) " % s / cast . php : 36 " <nl> mmm a / hphp / test / slow / array_provenance / dv_cast_bug . php . expectf <nl> ppp b / hphp / test / slow / array_provenance / dv_cast_bug . php . expectf <nl> <nl> <nl> - Notice : Observing empty , static , dict in json_encode from unknown location in % s / array_provenance / dv_cast_bug . php on line 5 <nl> + Notice : Observing dict in json_encode from % s / dv_cast_bug . php : 5 in % s / array_provenance / dv_cast_bug . php on line 5 <nl> string ( 2 ) " { } " <nl> dict ( 0 ) { <nl> } <nl> <nl> - Notice : Observing empty , static , dict in json_encode from unknown location in % s / array_provenance / dv_cast_bug . php on line 7 <nl> + Notice : Observing dict in json_encode from % s / dv_cast_bug . php : 7 in % s / array_provenance / dv_cast_bug . php on line 7 <nl> string ( 2 ) " { } " <nl> | Make empty arrays special when we JIT them | facebook/hhvm | a03894dcb91f3337b764b6dc564a41f05e63acc8 | 2019-08-23T15:15:28Z |
mmm a / Telegram / CMakeLists . txt <nl> ppp b / Telegram / CMakeLists . txt <nl> PRIVATE <nl> data / data_auto_download . h <nl> data / data_chat . cpp <nl> data / data_chat . h <nl> + data / data_chat_filters . cpp <nl> + data / data_chat_filters . h <nl> data / data_channel . cpp <nl> data / data_channel . h <nl> data / data_channel_admins . cpp <nl> mmm a / Telegram / SourceFiles / boxes / share_box . cpp <nl> ppp b / Telegram / SourceFiles / boxes / share_box . cpp <nl> void ShareBox : : Inner : : changeCheckState ( Chat * chat ) { <nl> const auto history = chat - > peer - > owner ( ) . history ( chat - > peer ) ; <nl> auto row = _chatsIndexed - > getRow ( history ) ; <nl> if ( ! row ) { <nl> - const auto rowsByLetter = _chatsIndexed - > addToEnd ( history ) ; <nl> - const auto it = rowsByLetter . find ( 0 ) ; <nl> - Assert ( it ! = rowsByLetter . cend ( ) ) ; <nl> - row = it - > second ; <nl> + row = _chatsIndexed - > addToEnd ( history ) . main ; <nl> } <nl> chat = getChat ( row ) ; <nl> if ( ! chat - > checkbox . checked ( ) ) { <nl> mmm a / Telegram / SourceFiles / core / application . cpp <nl> ppp b / Telegram / SourceFiles / core / application . cpp <nl> void Application : : switchDebugMode ( ) { <nl> } <nl> <nl> void Application : : switchWorkMode ( ) { <nl> - Global : : SetDialogsModeEnabled ( ! Global : : DialogsModeEnabled ( ) ) ; <nl> - Global : : SetDialogsMode ( Dialogs : : Mode : : All ) ; <nl> + Global : : SetDialogsFiltersEnabled ( ! Global : : DialogsFiltersEnabled ( ) ) ; <nl> + Global : : SetDialogsFilterId ( 0 ) ; <nl> Local : : writeUserSettings ( ) ; <nl> App : : restart ( ) ; <nl> } <nl> new file mode 100644 <nl> index 00000000000 . . b71d77cbbd4 <nl> mmm / dev / null <nl> ppp b / Telegram / SourceFiles / data / data_chat_filters . cpp <nl> <nl> + / * <nl> + This file is part of Telegram Desktop , <nl> + the official desktop application for the Telegram messaging service . <nl> + <nl> + For license and copyright information please follow this link : <nl> + https : / / github . com / telegramdesktop / tdesktop / blob / master / LEGAL <nl> + * / <nl> + # include " data / data_chat_filters . h " <nl> + <nl> + # include " history / history . h " <nl> + # include " data / data_peer . h " <nl> + # include " data / data_user . h " <nl> + # include " data / data_chat . h " <nl> + # include " data / data_channel . h " <nl> + <nl> + namespace Data { <nl> + <nl> + ChatFilter : : ChatFilter ( <nl> + const QString & title , <nl> + Flags flags , <nl> + base : : flat_set < not_null < History * > > always ) <nl> + : _title ( title ) <nl> + , _always ( std : : move ( always ) ) <nl> + , _flags ( flags ) { <nl> + } <nl> + <nl> + bool ChatFilter : : contains ( not_null < History * > history ) const { <nl> + const auto flag = [ & ] { <nl> + const auto peer = history - > peer ; <nl> + if ( const auto user = peer - > asUser ( ) ) { <nl> + return user - > isBot ( ) ? Flag : : Bots : Flag : : Users ; <nl> + } else if ( const auto chat = peer - > asChat ( ) ) { <nl> + return Flag : : PrivateGroups ; <nl> + } else if ( const auto channel = peer - > asChannel ( ) ) { <nl> + if ( channel - > isBroadcast ( ) ) { <nl> + return Flag : : Broadcasts ; <nl> + } else if ( channel - > isPublic ( ) ) { <nl> + return Flag : : PublicGroups ; <nl> + } else { <nl> + return Flag : : PrivateGroups ; <nl> + } <nl> + } else { <nl> + Unexpected ( " Peer type in ChatFilter : : contains . " ) ; <nl> + } <nl> + } ( ) ; <nl> + return false <nl> + | | ( ( _flags & flag ) <nl> + & & ( ! ( _flags & Flag : : NoMuted ) | | ! history - > mute ( ) ) <nl> + & & ( ! ( _flags & Flag : : NoRead ) | | history - > unreadCountForBadge ( ) ) ) <nl> + | | _always . contains ( history ) ; <nl> + } <nl> + <nl> + } / / namespace Data <nl> new file mode 100644 <nl> index 00000000000 . . bc12246a0d4 <nl> mmm / dev / null <nl> ppp b / Telegram / SourceFiles / data / data_chat_filters . h <nl> <nl> + / * <nl> + This file is part of Telegram Desktop , <nl> + the official desktop application for the Telegram messaging service . <nl> + <nl> + For license and copyright information please follow this link : <nl> + https : / / github . com / telegramdesktop / tdesktop / blob / master / LEGAL <nl> + * / <nl> + # pragma once <nl> + <nl> + # include " base / flags . h " <nl> + <nl> + class History ; <nl> + <nl> + namespace Data { <nl> + <nl> + class ChatFilter final { <nl> + public : <nl> + enum class Flag : uchar { <nl> + Users = 0x01 , <nl> + PrivateGroups = 0x02 , <nl> + PublicGroups = 0x04 , <nl> + Broadcasts = 0x08 , <nl> + Bots = 0x10 , <nl> + NoMuted = 0x20 , <nl> + NoRead = 0x40 , <nl> + } ; <nl> + friend constexpr inline bool is_flag_type ( Flag ) { return true ; } ; <nl> + using Flags = base : : flags < Flag > ; <nl> + <nl> + ChatFilter ( ) = default ; <nl> + ChatFilter ( <nl> + const QString & title , <nl> + Flags flags , <nl> + base : : flat_set < not_null < History * > > always ) ; <nl> + <nl> + [ [ nodiscard ] ] bool contains ( not_null < History * > history ) const ; <nl> + <nl> + private : <nl> + QString _title ; <nl> + base : : flat_set < not_null < History * > > _always ; <nl> + Flags _flags ; <nl> + <nl> + } ; <nl> + <nl> + } / / namespace Data <nl> mmm a / Telegram / SourceFiles / data / data_folder . cpp <nl> ppp b / Telegram / SourceFiles / data / data_folder . cpp <nl> void Folder : : unreadEntryChanged ( <nl> / / return _unreadPosition . changes ( ) ; <nl> / / } <nl> <nl> - bool Folder : : toImportant ( ) const { <nl> - return false ; <nl> - } <nl> - <nl> int Folder : : fixedOnTopIndex ( ) const { <nl> return kArchiveFixOnTopIndex ; <nl> } <nl> mmm a / Telegram / SourceFiles / data / data_folder . h <nl> ppp b / Telegram / SourceFiles / data / data_folder . h <nl> class Folder final : public Dialogs : : Entry { <nl> TimeId adjustedChatListTimeId ( ) const override ; <nl> <nl> int fixedOnTopIndex ( ) const override ; <nl> - bool toImportant ( ) const override ; <nl> bool shouldBeInChatList ( ) const override ; <nl> int chatListUnreadCount ( ) const override ; <nl> bool chatListUnreadMark ( ) const override ; <nl> mmm a / Telegram / SourceFiles / data / data_session . cpp <nl> ppp b / Telegram / SourceFiles / data / data_session . cpp <nl> auto Session : : refreshChatListEntry ( Dialogs : : Key key ) <nl> auto result = RefreshChatListEntryResult ( ) ; <nl> result . changed = ! entry - > inChatList ( ) ; <nl> if ( result . changed ) { <nl> - const auto mainRow = entry - > addToChatList ( Mode : : All ) ; <nl> + const auto mainRow = entry - > addToChatList ( 0 ) ; <nl> _contactsNoChatsList . del ( key , mainRow ) ; <nl> } else { <nl> - result . moved = entry - > adjustByPosInChatList ( Mode : : All ) ; <nl> - } <nl> - if ( Global : : DialogsModeEnabled ( ) ) { <nl> - if ( entry - > toImportant ( ) ) { <nl> - result . importantChanged = ! entry - > inChatList ( Mode : : Important ) ; <nl> - if ( result . importantChanged ) { <nl> - entry - > addToChatList ( Mode : : Important ) ; <nl> - } else { <nl> - result . importantMoved = entry - > adjustByPosInChatList ( <nl> - Mode : : Important ) ; <nl> - } <nl> - } else if ( entry - > inChatList ( Mode : : Important ) ) { <nl> - entry - > removeFromChatList ( Mode : : Important ) ; <nl> - result . importantChanged = true ; <nl> - } <nl> - } <nl> + result . moved = entry - > adjustByPosInChatList ( 0 ) ; <nl> + } <nl> + / / if ( Global : : DialogsFiltersEnabled ( ) ) { / / # TODO filters <nl> + / / if ( entry - > toImportant ( ) ) { <nl> + / / result . importantChanged = ! entry - > inChatList ( Mode : : Important ) ; <nl> + / / if ( result . importantChanged ) { <nl> + / / entry - > addToChatList ( Mode : : Important ) ; <nl> + / / } else { <nl> + / / result . importantMoved = entry - > adjustByPosInChatList ( <nl> + / / Mode : : Important ) ; <nl> + / / } <nl> + / / } else if ( entry - > inChatList ( Mode : : Important ) ) { <nl> + / / entry - > removeFromChatList ( Mode : : Important ) ; <nl> + / / result . importantChanged = true ; <nl> + / / } <nl> + / / } <nl> return result ; <nl> } <nl> <nl> void Session : : removeChatListEntry ( Dialogs : : Key key ) { <nl> using namespace Dialogs ; <nl> <nl> const auto entry = key . entry ( ) ; <nl> - entry - > removeFromChatList ( Mode : : All ) ; <nl> - if ( Global : : DialogsModeEnabled ( ) ) { <nl> - entry - > removeFromChatList ( Mode : : Important ) ; <nl> - } <nl> + entry - > removeFromChatList ( 0 ) ; <nl> + / / if ( Global : : DialogsFiltersEnabled ( ) ) { / / # TODO filters <nl> + / / entry - > removeFromChatList ( Mode : : Important ) ; <nl> + / / } <nl> if ( _contactsList . contains ( key ) ) { <nl> if ( ! _contactsNoChatsList . contains ( key ) ) { <nl> _contactsNoChatsList . addByName ( key ) ; <nl> mmm a / Telegram / SourceFiles / data / data_types . h <nl> ppp b / Telegram / SourceFiles / data / data_types . h <nl> using UserId = int32 ; <nl> using ChatId = int32 ; <nl> using ChannelId = int32 ; <nl> using FolderId = int32 ; <nl> + using FilterId = int32 ; <nl> <nl> constexpr auto NoChannel = ChannelId ( 0 ) ; <nl> <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_entry . cpp <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_entry . cpp <nl> TimeId Entry : : adjustedChatListTimeId ( ) const { <nl> void Entry : : changedChatListPinHook ( ) { <nl> } <nl> <nl> - RowsByLetter & Entry : : chatListLinks ( Mode list ) { <nl> - return _chatListLinks [ static_cast < int > ( list ) ] ; <nl> + RowsByLetter * Entry : : chatListLinks ( FilterId filterId ) { <nl> + const auto i = _chatListLinks . find ( filterId ) ; <nl> + return ( i ! = end ( _chatListLinks ) ) ? & i - > second : nullptr ; <nl> } <nl> <nl> - const RowsByLetter & Entry : : chatListLinks ( Mode list ) const { <nl> - return _chatListLinks [ static_cast < int > ( list ) ] ; <nl> + const RowsByLetter * Entry : : chatListLinks ( FilterId filterId ) const { <nl> + const auto i = _chatListLinks . find ( filterId ) ; <nl> + return ( i ! = end ( _chatListLinks ) ) ? & i - > second : nullptr ; <nl> } <nl> <nl> - Row * Entry : : mainChatListLink ( Mode list ) const { <nl> - auto it = chatListLinks ( list ) . find ( 0 ) ; <nl> - Assert ( it ! = chatListLinks ( list ) . cend ( ) ) ; <nl> - return it - > second ; <nl> + not_null < Row * > Entry : : mainChatListLink ( FilterId filterId ) const { <nl> + const auto links = chatListLinks ( filterId ) ; <nl> + Assert ( links ! = nullptr ) ; <nl> + return links - > main ; <nl> } <nl> <nl> - PositionChange Entry : : adjustByPosInChatList ( Mode list ) { <nl> - const auto lnk = mainChatListLink ( list ) ; <nl> - const auto from = lnk - > pos ( ) ; <nl> - myChatsList ( list ) - > adjustByDate ( chatListLinks ( list ) ) ; <nl> - const auto to = lnk - > pos ( ) ; <nl> + Row * Entry : : maybeMainChatListLink ( FilterId filterId ) const { <nl> + const auto links = chatListLinks ( filterId ) ; <nl> + return links ? links - > main . get ( ) : nullptr ; <nl> + } <nl> + <nl> + PositionChange Entry : : adjustByPosInChatList ( FilterId filterId ) { <nl> + const auto links = chatListLinks ( filterId ) ; <nl> + Assert ( links ! = nullptr ) ; <nl> + const auto from = links - > main - > pos ( ) ; <nl> + myChatsList ( filterId ) - > adjustByDate ( * links ) ; <nl> + const auto to = links - > main - > pos ( ) ; <nl> return { from , to } ; <nl> } <nl> <nl> void Entry : : setChatListTimeId ( TimeId date ) { <nl> } <nl> } <nl> <nl> - int Entry : : posInChatList ( Dialogs : : Mode list ) const { <nl> - return mainChatListLink ( list ) - > pos ( ) ; <nl> + int Entry : : posInChatList ( FilterId filterId ) const { <nl> + return mainChatListLink ( filterId ) - > pos ( ) ; <nl> } <nl> <nl> - not_null < Row * > Entry : : addToChatList ( Mode list ) { <nl> - if ( ! inChatList ( list ) ) { <nl> - chatListLinks ( list ) = myChatsList ( list ) - > addToEnd ( _key ) ; <nl> - if ( list = = Mode : : All ) { <nl> - owner ( ) . unreadEntryChanged ( _key , true ) ; <nl> - } <nl> + not_null < Row * > Entry : : addToChatList ( FilterId filterId ) { <nl> + if ( const auto main = maybeMainChatListLink ( filterId ) ) { <nl> + return main ; <nl> } <nl> - return mainChatListLink ( list ) ; <nl> + const auto result = _chatListLinks . emplace ( <nl> + filterId , <nl> + myChatsList ( filterId ) - > addToEnd ( _key ) <nl> + ) . first - > second . main ; <nl> + if ( ! filterId ) { <nl> + owner ( ) . unreadEntryChanged ( _key , true ) ; <nl> + } <nl> + return result ; <nl> } <nl> <nl> - void Entry : : removeFromChatList ( Dialogs : : Mode list ) { <nl> - if ( inChatList ( list ) ) { <nl> - myChatsList ( list ) - > del ( _key ) ; <nl> - chatListLinks ( list ) . clear ( ) ; <nl> - if ( list = = Mode : : All ) { <nl> - owner ( ) . unreadEntryChanged ( _key , false ) ; <nl> - } <nl> + void Entry : : removeFromChatList ( FilterId filterId ) { <nl> + const auto i = _chatListLinks . find ( filterId ) ; <nl> + if ( i = = end ( _chatListLinks ) ) { <nl> + return ; <nl> + } <nl> + myChatsList ( filterId ) - > del ( _key ) ; <nl> + _chatListLinks . erase ( i ) ; <nl> + if ( ! filterId ) { <nl> + owner ( ) . unreadEntryChanged ( _key , false ) ; <nl> } <nl> } <nl> <nl> - void Entry : : removeChatListEntryByLetter ( Mode list , QChar letter ) { <nl> - Expects ( letter ! = 0 ) ; <nl> - <nl> - if ( inChatList ( list ) ) { <nl> - chatListLinks ( list ) . remove ( letter ) ; <nl> + void Entry : : removeChatListEntryByLetter ( FilterId filterId , QChar letter ) { <nl> + const auto i = _chatListLinks . find ( filterId ) ; <nl> + if ( i ! = end ( _chatListLinks ) ) { <nl> + i - > second . letters . remove ( letter ) ; <nl> } <nl> } <nl> <nl> void Entry : : addChatListEntryByLetter ( <nl> - Mode list , <nl> + FilterId filterId , <nl> QChar letter , <nl> not_null < Row * > row ) { <nl> - Expects ( letter ! = 0 ) ; <nl> - <nl> - if ( inChatList ( list ) ) { <nl> - chatListLinks ( list ) . emplace ( letter , row ) ; <nl> + const auto i = _chatListLinks . find ( filterId ) ; <nl> + if ( i ! = end ( _chatListLinks ) ) { <nl> + i - > second . letters . emplace ( letter , row ) ; <nl> } <nl> } <nl> <nl> void Entry : : updateChatListEntry ( ) const { <nl> if ( const auto main = App : : main ( ) ) { <nl> - if ( inChatList ( ) ) { <nl> - main - > repaintDialogRow ( <nl> - Mode : : All , <nl> - mainChatListLink ( Mode : : All ) ) ; <nl> - if ( inChatList ( Mode : : Important ) ) { <nl> - main - > repaintDialogRow ( <nl> - Mode : : Important , <nl> - mainChatListLink ( Mode : : Important ) ) ; <nl> - } <nl> + for ( const auto & [ filterId , links ] : _chatListLinks ) { <nl> + main - > repaintDialogRow ( filterId , links . main ) ; <nl> } <nl> if ( session ( ) . supportMode ( ) <nl> & & ! session ( ) . settings ( ) . supportAllSearchResults ( ) ) { <nl> void Entry : : updateChatListEntry ( ) const { <nl> } <nl> } <nl> <nl> - not_null < IndexedList * > Entry : : myChatsList ( Mode list ) const { <nl> - return owner ( ) . chatsList ( folder ( ) ) - > indexed ( list ) ; <nl> + not_null < IndexedList * > Entry : : myChatsList ( FilterId filterId ) const { <nl> + return owner ( ) . chatsList ( folder ( ) ) - > indexed ( filterId ) ; <nl> } <nl> <nl> } / / namespace Dialogs <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_entry . h <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_entry . h <nl> namespace Dialogs { <nl> <nl> class Row ; <nl> class IndexedList ; <nl> - using RowsByLetter = base : : flat_map < QChar , not_null < Row * > > ; <nl> + <nl> + struct RowsByLetter { <nl> + not_null < Row * > main ; <nl> + base : : flat_map < QChar , not_null < Row * > > letters ; <nl> + } ; <nl> <nl> enum class SortMode { <nl> Date = 0x00 , <nl> enum class SortMode { <nl> Add = 0x02 , <nl> } ; <nl> <nl> - enum class Mode { <nl> - All = 0x00 , <nl> - Important = 0x01 , <nl> - } ; <nl> - <nl> struct PositionChange { <nl> int from = - 1 ; <nl> int to = - 1 ; <nl> class Entry { <nl> Entry & operator = ( const Entry & other ) = delete ; <nl> virtual ~ Entry ( ) = default ; <nl> <nl> - Data : : Session & owner ( ) const ; <nl> - Main : : Session & session ( ) const ; <nl> + [ [ nodiscard ] ] Data : : Session & owner ( ) const ; <nl> + [ [ nodiscard ] ] Main : : Session & session ( ) const ; <nl> <nl> - PositionChange adjustByPosInChatList ( Mode list ) ; <nl> - bool inChatList ( Mode list = Mode : : All ) const { <nl> - return ! chatListLinks ( list ) . empty ( ) ; <nl> + PositionChange adjustByPosInChatList ( FilterId filterId ) ; <nl> + [ [ nodiscard ] ] bool inChatList ( FilterId filterId = 0 ) const { <nl> + return _chatListLinks . contains ( filterId ) ; <nl> } <nl> - int posInChatList ( Mode list ) const ; <nl> - not_null < Row * > addToChatList ( Mode list ) ; <nl> - void removeFromChatList ( Mode list ) ; <nl> - void removeChatListEntryByLetter ( Mode list , QChar letter ) ; <nl> + [ [ nodiscard ] ] int posInChatList ( FilterId filterId ) const ; <nl> + not_null < Row * > addToChatList ( FilterId filterId ) ; <nl> + void removeFromChatList ( FilterId filterId ) ; <nl> + void removeChatListEntryByLetter ( FilterId filterId , QChar letter ) ; <nl> void addChatListEntryByLetter ( <nl> - Mode list , <nl> + FilterId filterId , <nl> QChar letter , <nl> not_null < Row * > row ) ; <nl> void updateChatListEntry ( ) const ; <nl> class Entry { <nl> static constexpr auto kArchiveFixOnTopIndex = 1 ; <nl> static constexpr auto kProxyPromotionFixOnTopIndex = 2 ; <nl> <nl> - virtual bool toImportant ( ) const = 0 ; <nl> virtual bool shouldBeInChatList ( ) const = 0 ; <nl> virtual int chatListUnreadCount ( ) const = 0 ; <nl> virtual bool chatListUnreadMark ( ) const = 0 ; <nl> class Entry { <nl> void notifyUnreadStateChange ( const UnreadState & wasState ) ; <nl> <nl> void setChatListExistence ( bool exists ) ; <nl> - RowsByLetter & chatListLinks ( Mode list ) ; <nl> - const RowsByLetter & chatListLinks ( Mode list ) const ; <nl> - Row * mainChatListLink ( Mode list ) const ; <nl> + RowsByLetter * chatListLinks ( FilterId filterId ) ; <nl> + const RowsByLetter * chatListLinks ( FilterId filterId ) const ; <nl> + not_null < Row * > mainChatListLink ( FilterId filterId ) const ; <nl> + Row * maybeMainChatListLink ( FilterId filterId ) const ; <nl> <nl> - not_null < IndexedList * > myChatsList ( Mode list ) const ; <nl> + not_null < IndexedList * > myChatsList ( FilterId filterId ) const ; <nl> <nl> not_null < Data : : Session * > _owner ; <nl> Dialogs : : Key _key ; <nl> - RowsByLetter _chatListLinks [ 2 ] ; <nl> + base : : flat_map < FilterId , RowsByLetter > _chatListLinks ; <nl> uint64 _sortKeyInChatList = 0 ; <nl> int _pinnedIndex = 0 ; <nl> bool _isProxyPromoted = false ; <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_indexed_list . cpp <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_indexed_list . cpp <nl> IndexedList : : IndexedList ( SortMode sortMode ) <nl> } <nl> <nl> RowsByLetter IndexedList : : addToEnd ( Key key ) { <nl> - RowsByLetter result ; <nl> - if ( ! _list . contains ( key ) ) { <nl> - result . emplace ( 0 , _list . addToEnd ( key ) ) ; <nl> - for ( const auto ch : key . entry ( ) - > chatListFirstLetters ( ) ) { <nl> - auto j = _index . find ( ch ) ; <nl> - if ( j = = _index . cend ( ) ) { <nl> - j = _index . emplace ( ch , _sortMode ) . first ; <nl> - } <nl> - result . emplace ( ch , j - > second . addToEnd ( key ) ) ; <nl> + if ( const auto row = _list . getRow ( key ) ) { <nl> + return { row } ; <nl> + } <nl> + <nl> + auto result = RowsByLetter { _list . addToEnd ( key ) } ; <nl> + for ( const auto ch : key . entry ( ) - > chatListFirstLetters ( ) ) { <nl> + auto j = _index . find ( ch ) ; <nl> + if ( j = = _index . cend ( ) ) { <nl> + j = _index . emplace ( ch , _sortMode ) . first ; <nl> } <nl> + result . letters . emplace ( ch , j - > second . addToEnd ( key ) ) ; <nl> } <nl> return result ; <nl> } <nl> Row * IndexedList : : addByName ( Key key ) { <nl> } <nl> <nl> void IndexedList : : adjustByDate ( const RowsByLetter & links ) { <nl> - for ( const auto [ ch , row ] : links ) { <nl> - if ( ch = = QChar ( 0 ) ) { <nl> - _list . adjustByDate ( row ) ; <nl> - } else { <nl> - if ( auto it = _index . find ( ch ) ; it ! = _index . cend ( ) ) { <nl> - it - > second . adjustByDate ( row ) ; <nl> - } <nl> + _list . adjustByDate ( links . main ) ; <nl> + for ( const auto [ ch , row ] : links . letters ) { <nl> + if ( auto it = _index . find ( ch ) ; it ! = _index . cend ( ) ) { <nl> + it - > second . adjustByDate ( row ) ; <nl> } <nl> } <nl> } <nl> void IndexedList : : peerNameChanged ( <nl> if ( _sortMode = = SortMode : : Name ) { <nl> adjustByName ( history , oldLetters ) ; <nl> } else { <nl> - adjustNames ( Dialogs : : Mode : : All , history , oldLetters ) ; <nl> + adjustNames ( FilterId ( ) , history , oldLetters ) ; <nl> } <nl> } <nl> } <nl> <nl> void IndexedList : : peerNameChanged ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < PeerData * > peer , <nl> const base : : flat_set < QChar > & oldLetters ) { <nl> Expects ( _sortMode = = SortMode : : Date ) ; <nl> <nl> if ( const auto history = peer - > owner ( ) . historyLoaded ( peer ) ) { <nl> - adjustNames ( list , history , oldLetters ) ; <nl> + adjustNames ( filterId , history , oldLetters ) ; <nl> } <nl> } <nl> <nl> void IndexedList : : adjustByName ( <nl> } <nl> <nl> void IndexedList : : adjustNames ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < History * > history , <nl> const base : : flat_set < QChar > & oldLetters ) { <nl> const auto key = Dialogs : : Key ( history ) ; <nl> void IndexedList : : adjustNames ( <nl> } <nl> for ( auto ch : toRemove ) { <nl> if ( _sortMode = = SortMode : : Date ) { <nl> - history - > removeChatListEntryByLetter ( list , ch ) ; <nl> + history - > removeChatListEntryByLetter ( filterId , ch ) ; <nl> } <nl> if ( auto it = _index . find ( ch ) ; it ! = _index . cend ( ) ) { <nl> it - > second . del ( key , mainRow ) ; <nl> void IndexedList : : adjustNames ( <nl> } <nl> auto row = j - > second . addToEnd ( key ) ; <nl> if ( _sortMode = = SortMode : : Date ) { <nl> - history - > addChatListEntryByLetter ( list , ch , row ) ; <nl> + history - > addChatListEntryByLetter ( filterId , ch , row ) ; <nl> } <nl> } <nl> } <nl> std : : vector < not_null < Row * > > IndexedList : : filtered ( <nl> return result ; <nl> } <nl> <nl> - IndexedList : : ~ IndexedList ( ) { <nl> - clear ( ) ; <nl> - } <nl> - <nl> } / / namespace Dialogs <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_indexed_list . h <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_indexed_list . h <nl> class IndexedList { <nl> <nl> / / For sortMode = = SortMode : : Date <nl> void peerNameChanged ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < PeerData * > peer , <nl> const base : : flat_set < QChar > & oldChars ) ; <nl> <nl> class IndexedList { <nl> } <nl> std : : vector < not_null < Row * > > filtered ( const QStringList & words ) const ; <nl> <nl> - ~ IndexedList ( ) ; <nl> - <nl> / / Part of List interface is duplicated here for all ( ) list . <nl> int size ( ) const { return all ( ) . size ( ) ; } <nl> bool empty ( ) const { return all ( ) . empty ( ) ; } <nl> class IndexedList { <nl> Key key , <nl> const base : : flat_set < QChar > & oldChars ) ; <nl> void adjustNames ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < History * > history , <nl> const base : : flat_set < QChar > & oldChars ) ; <nl> <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_inner_widget . cpp <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_inner_widget . cpp <nl> InnerWidget : : InnerWidget ( <nl> setAttribute ( Qt : : WA_OpaquePaintEvent , true ) ; <nl> # endif / / OS_MAC_OLD <nl> <nl> - _mode = Global : : DialogsModeEnabled ( ) <nl> - ? Global : : DialogsMode ( ) <nl> - : Dialogs : : Mode : : All ; <nl> + _filterId = Global : : DialogsFiltersEnabled ( ) <nl> + ? Global : : DialogsFilterId ( ) <nl> + : 0 ; <nl> <nl> _addContactLnk - > addClickHandler ( [ ] { App : : wnd ( ) - > onShowAddContact ( ) ; } ) ; <nl> _cancelSearchInChat - > setClickedCallback ( [ = ] { cancelSearchInChat ( ) ; } ) ; <nl> void InnerWidget : : refreshWithCollapsedRows ( bool toTop ) { <nl> _collapsedSelected = - 1 ; <nl> <nl> _collapsedRows . clear ( ) ; <nl> - if ( ! _openedFolder & & Global : : DialogsModeEnabled ( ) ) { <nl> + if ( ! _openedFolder & & Global : : DialogsFiltersEnabled ( ) ) { <nl> _collapsedRows . push_back ( std : : make_unique < CollapsedRow > ( ) ) ; <nl> } <nl> const auto list = shownDialogs ( ) ; <nl> void InnerWidget : : changeOpenedFolder ( Data : : Folder * folder ) { <nl> / / const auto lastMousePosition = _lastMousePosition ; <nl> clearSelection ( ) ; <nl> _openedFolder = folder ; <nl> - _mode = _openedFolder ? Mode : : All : Global : : DialogsMode ( ) ; <nl> + _filterId = _openedFolder ? 0 : Global : : DialogsFilterId ( ) ; <nl> refreshWithCollapsedRows ( true ) ; <nl> / / This doesn ' t work , because we clear selection in leaveEvent on hide . <nl> / / if ( mouseSelection & & lastMousePosition ) { <nl> void InnerWidget : : paintCollapsedRow ( <nl> const auto narrow = ( width ( ) < = smallWidth ) ; <nl> const auto text = row - > folder <nl> ? row - > folder - > chatListName ( ) <nl> - : ( _mode = = Dialogs : : Mode : : Important ) <nl> + : _filterId / / # TODO filters <nl> ? ( narrow ? " Show " : tr : : lng_dialogs_show_all_chats ( tr : : now ) ) <nl> : ( narrow ? " Hide " : tr : : lng_dialogs_hide_muted_chats ( tr : : now ) ) ; <nl> const auto unread = row - > folder <nl> ? row - > folder - > chatListUnreadCount ( ) <nl> - : ( _mode = = Dialogs : : Mode : : Important ) <nl> + : _filterId / / # TODO filters <nl> ? session ( ) . data ( ) . unreadOnlyMutedBadge ( ) <nl> : 0 ; <nl> Layout : : PaintCollapsedRow ( <nl> void InnerWidget : : mousePressEvent ( QMouseEvent * e ) { <nl> } ) ; <nl> } else if ( base : : in_range ( _filteredPressed , 0 , _filterResults . size ( ) ) ) { <nl> const auto row = _filterResults [ _filteredPressed ] ; <nl> - const auto list = _mode ; <nl> + const auto filterId = _filterId ; <nl> row - > addRipple ( <nl> e - > pos ( ) - QPoint ( 0 , filteredOffset ( ) + _filteredPressed * st : : dialogsRowHeight ) , <nl> QSize ( width ( ) , st : : dialogsRowHeight ) , <nl> - [ = ] { repaintDialogRow ( list , row ) ; } ) ; <nl> + [ = ] { repaintDialogRow ( filterId , row ) ; } ) ; <nl> } else if ( base : : in_range ( _peerSearchPressed , 0 , _peerSearchResults . size ( ) ) ) { <nl> auto & result = _peerSearchResults [ _peerSearchPressed ] ; <nl> auto row = & result - > row ; <nl> void InnerWidget : : refreshDialog ( Key key ) { <nl> } <nl> <nl> const auto result = session ( ) . data ( ) . refreshChatListEntry ( key ) ; <nl> - const auto changed = ( _mode = = Mode : : Important ) <nl> + const auto changed = _filterId / / # TODO filters <nl> ? result . importantChanged <nl> : result . changed ; <nl> - const auto moved = ( _mode = = Mode : : Important ) <nl> + const auto moved = _filterId / / # TODO filters <nl> ? result . importantMoved <nl> : result . moved ; <nl> <nl> int InnerWidget : : defaultRowTop ( not_null < Row * > row ) const { <nl> } <nl> <nl> void InnerWidget : : repaintDialogRow ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < Row * > row ) { <nl> if ( _state = = WidgetState : : Default ) { <nl> - if ( _mode = = list ) { <nl> + if ( _filterId = = filterId ) { <nl> if ( const auto folder = row - > folder ( ) ) { <nl> repaintCollapsedFolderRow ( folder ) ; <nl> } <nl> update ( 0 , defaultRowTop ( row ) , width ( ) , st : : dialogsRowHeight ) ; <nl> } <nl> } else if ( _state = = WidgetState : : Filtered ) { <nl> - if ( list = = Mode : : All ) { <nl> + if ( ! filterId ) { <nl> for ( auto i = 0 , l = int ( _filterResults . size ( ) ) ; i ! = l ; + + i ) { <nl> if ( _filterResults [ i ] - > key ( ) = = row - > key ( ) ) { <nl> update ( <nl> void InnerWidget : : updateSelectedRow ( Key key ) { <nl> if ( _state = = WidgetState : : Default ) { <nl> if ( key ) { <nl> const auto entry = key . entry ( ) ; <nl> - if ( ! entry - > inChatList ( _mode ) ) { <nl> + if ( ! entry - > inChatList ( _filterId ) ) { <nl> return ; <nl> } <nl> - auto position = entry - > posInChatList ( _mode ) ; <nl> + auto position = entry - > posInChatList ( _filterId ) ; <nl> auto top = dialogsOffset ( ) ; <nl> if ( base : : in_range ( position , 0 , _pinnedRows . size ( ) ) ) { <nl> top + = qRound ( _pinnedRows [ position ] . yadd . current ( ) ) ; <nl> void InnerWidget : : updateSelectedRow ( Key key ) { <nl> } <nl> <nl> not_null < IndexedList * > InnerWidget : : shownDialogs ( ) const { <nl> - return session ( ) . data ( ) . chatsList ( _openedFolder ) - > indexed ( _mode ) ; <nl> + return session ( ) . data ( ) . chatsList ( _openedFolder ) - > indexed ( _filterId ) ; <nl> } <nl> <nl> void InnerWidget : : leaveEventHook ( QEvent * e ) { <nl> void InnerWidget : : peerSearchReceived ( <nl> } <nl> <nl> void InnerWidget : : notify_historyMuteUpdated ( History * history ) { <nl> - if ( ! Global : : DialogsModeEnabled ( ) | | ! history - > inChatList ( ) ) { <nl> + / / # TODO filters <nl> + if ( ! Global : : DialogsFiltersEnabled ( ) | | ! history - > inChatList ( ) ) { <nl> return ; <nl> } <nl> refreshDialog ( history ) ; <nl> void InnerWidget : : scrollToEntry ( const RowDescriptor & entry ) { <nl> <nl> void InnerWidget : : selectSkipPage ( int32 pixels , int32 direction ) { <nl> clearMouseSelection ( ) ; <nl> + const auto list = shownDialogs ( ) ; <nl> int toSkip = pixels / int ( st : : dialogsRowHeight ) ; <nl> if ( _state = = WidgetState : : Default ) { <nl> if ( ! _selected ) { <nl> - if ( direction > 0 & & shownDialogs ( ) - > size ( ) > _skipTopDialogs ) { <nl> - _selected = * ( shownDialogs ( ) - > cbegin ( ) + _skipTopDialogs ) ; <nl> + if ( direction > 0 & & list - > size ( ) > _skipTopDialogs ) { <nl> + _selected = * ( list - > cbegin ( ) + _skipTopDialogs ) ; <nl> _collapsedSelected = - 1 ; <nl> } else { <nl> return ; <nl> } <nl> } <nl> if ( direction > 0 ) { <nl> - for ( auto i = shownDialogs ( ) - > cfind ( _selected ) , end = shownDialogs ( ) - > cend ( ) ; i ! = end & & ( toSkip - - ) ; + + i ) { <nl> + for ( auto i = list - > cfind ( _selected ) , end = list - > cend ( ) ; i ! = end & & ( toSkip - - ) ; + + i ) { <nl> _selected = * i ; <nl> } <nl> } else { <nl> - for ( auto i = shownDialogs ( ) - > cfind ( _selected ) , b = shownDialogs ( ) - > cbegin ( ) ; i ! = b & & ( * i ) - > pos ( ) > _skipTopDialogs & & ( toSkip - - ) ; ) { <nl> + for ( auto i = list - > cfind ( _selected ) , b = list - > cbegin ( ) ; i ! = b & & ( * i ) - > pos ( ) > _skipTopDialogs & & ( toSkip - - ) ; ) { <nl> _selected = * ( - - i ) ; <nl> } <nl> if ( toSkip & & ! _collapsedRows . empty ( ) ) { <nl> void InnerWidget : : selectSkipPage ( int32 pixels , int32 direction ) { <nl> void InnerWidget : : loadPeerPhotos ( ) { <nl> if ( ! parentWidget ( ) ) return ; <nl> <nl> + const auto list = shownDialogs ( ) ; <nl> auto yFrom = _visibleTop ; <nl> auto yTo = _visibleTop + ( _visibleBottom - _visibleTop ) * ( PreloadHeightsCount + 1 ) ; <nl> if ( _state = = WidgetState : : Default ) { <nl> - auto otherStart = shownDialogs ( ) - > size ( ) * st : : dialogsRowHeight ; <nl> + auto otherStart = list - > size ( ) * st : : dialogsRowHeight ; <nl> if ( yFrom < otherStart ) { <nl> - for ( auto i = shownDialogs ( ) - > cfind ( yFrom , st : : dialogsRowHeight ) , end = shownDialogs ( ) - > cend ( ) ; i ! = end ; + + i ) { <nl> + for ( auto i = list - > cfind ( yFrom , st : : dialogsRowHeight ) , end = list - > cend ( ) ; i ! = end ; + + i ) { <nl> if ( ( ( * i ) - > pos ( ) * st : : dialogsRowHeight ) > = yTo ) { <nl> break ; <nl> } <nl> bool InnerWidget : : chooseCollapsedRow ( ) { <nl> <nl> void InnerWidget : : switchImportantChats ( ) { <nl> clearSelection ( ) ; <nl> - if ( Global : : DialogsMode ( ) = = Mode : : All ) { <nl> - Global : : SetDialogsMode ( Mode : : Important ) ; <nl> - } else { <nl> - Global : : SetDialogsMode ( Mode : : All ) ; <nl> - } <nl> - _mode = Global : : DialogsMode ( ) ; <nl> + / / # TODO filters <nl> + / / if ( Global : : DialogsFilterId ( ) = = 0 ) { <nl> + / / Global : : SetDialogsMode ( Mode : : Important ) ; <nl> + / / } else { <nl> + / / Global : : SetDialogsMode ( Mode : : All ) ; <nl> + / / } <nl> + _filterId = Global : : DialogsFilterId ( ) ; <nl> Local : : writeUserSettings ( ) ; <nl> refreshWithCollapsedRows ( true ) ; <nl> _collapsedSelected = 0 ; <nl> RowDescriptor InnerWidget : : chatListEntryBefore ( <nl> return RowDescriptor ( ) ; <nl> } <nl> if ( _state = = WidgetState : : Default ) { <nl> - if ( const auto row = shownDialogs ( ) - > getRow ( which . key ) ) { <nl> - const auto i = shownDialogs ( ) - > cfind ( row ) ; <nl> - if ( i ! = shownDialogs ( ) - > cbegin ( ) ) { <nl> + const auto list = shownDialogs ( ) ; <nl> + if ( const auto row = list - > getRow ( which . key ) ) { <nl> + const auto i = list - > cfind ( row ) ; <nl> + if ( i ! = list - > cbegin ( ) ) { <nl> return RowDescriptor ( <nl> ( * ( i - 1 ) ) - > key ( ) , <nl> FullMsgId ( NoChannel , ShowAtUnreadMsgId ) ) ; <nl> RowDescriptor InnerWidget : : chatListEntryAfter ( <nl> return RowDescriptor ( ) ; <nl> } <nl> if ( _state = = WidgetState : : Default ) { <nl> - if ( const auto row = shownDialogs ( ) - > getRow ( which . key ) ) { <nl> - const auto i = shownDialogs ( ) - > cfind ( row ) + 1 ; <nl> - if ( i ! = shownDialogs ( ) - > cend ( ) ) { <nl> + const auto list = shownDialogs ( ) ; <nl> + if ( const auto row = list - > getRow ( which . key ) ) { <nl> + const auto i = list - > cfind ( row ) + 1 ; <nl> + if ( i ! = list - > cend ( ) ) { <nl> return RowDescriptor ( <nl> ( * i ) - > key ( ) , <nl> FullMsgId ( NoChannel , ShowAtUnreadMsgId ) ) ; <nl> RowDescriptor InnerWidget : : chatListEntryAfter ( <nl> <nl> RowDescriptor InnerWidget : : chatListEntryFirst ( ) const { <nl> if ( _state = = WidgetState : : Default ) { <nl> - const auto i = shownDialogs ( ) - > cbegin ( ) ; <nl> - if ( i ! = shownDialogs ( ) - > cend ( ) ) { <nl> + const auto list = shownDialogs ( ) ; <nl> + const auto i = list - > cbegin ( ) ; <nl> + if ( i ! = list - > cend ( ) ) { <nl> return RowDescriptor ( <nl> ( * i ) - > key ( ) , <nl> FullMsgId ( NoChannel , ShowAtUnreadMsgId ) ) ; <nl> RowDescriptor InnerWidget : : chatListEntryFirst ( ) const { <nl> <nl> RowDescriptor InnerWidget : : chatListEntryLast ( ) const { <nl> if ( _state = = WidgetState : : Default ) { <nl> - const auto i = shownDialogs ( ) - > cend ( ) ; <nl> - if ( i ! = shownDialogs ( ) - > cbegin ( ) ) { <nl> + const auto list = shownDialogs ( ) ; <nl> + const auto i = list - > cend ( ) ; <nl> + if ( i ! = list - > cbegin ( ) ) { <nl> return RowDescriptor ( <nl> ( * ( i - 1 ) ) - > key ( ) , <nl> FullMsgId ( NoChannel , ShowAtUnreadMsgId ) ) ; <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_inner_widget . h <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_inner_widget . h <nl> namespace Dialogs { <nl> class Row ; <nl> class FakeRow ; <nl> class IndexedList ; <nl> - enum class Mode ; <nl> <nl> struct ChosenRow { <nl> Key key ; <nl> class InnerWidget <nl> <nl> void refreshDialog ( Key key ) ; <nl> void removeDialog ( Key key ) ; <nl> - void repaintDialogRow ( Mode list , not_null < Row * > row ) ; <nl> + void repaintDialogRow ( FilterId filterId , not_null < Row * > row ) ; <nl> void repaintDialogRow ( RowDescriptor row ) ; <nl> <nl> void dragLeft ( ) ; <nl> public slots : <nl> <nl> not_null < Window : : SessionController * > _controller ; <nl> <nl> - Mode _mode = Mode ( ) ; <nl> + FilterId _filterId = 0 ; <nl> bool _mouseSelection = false ; <nl> std : : optional < QPoint > _lastMousePosition ; <nl> Qt : : MouseButton _pressButton = Qt : : LeftButton ; <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_main_list . cpp <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_main_list . cpp <nl> namespace Dialogs { <nl> <nl> MainList : : MainList ( rpl : : producer < int > pinnedLimit ) <nl> : _all ( SortMode : : Date ) <nl> - , _important ( SortMode : : Date ) <nl> , _pinned ( 1 ) { <nl> _unreadState . known = true ; <nl> <nl> MainList : : MainList ( rpl : : producer < int > pinnedLimit ) <nl> ) | rpl : : start_with_next ( [ = ] ( const Notify : : PeerUpdate & update ) { <nl> const auto peer = update . peer ; <nl> const auto & oldLetters = update . oldNameFirstLetters ; <nl> - _all . peerNameChanged ( Mode : : All , peer , oldLetters ) ; <nl> - _important . peerNameChanged ( Mode : : Important , peer , oldLetters ) ; <nl> + _all . peerNameChanged ( FilterId ( ) , peer , oldLetters ) ; <nl> + for ( auto & [ filterId , list ] : _other ) { <nl> + list . peerNameChanged ( filterId , peer , oldLetters ) ; <nl> + } <nl> } , _lifetime ) ; <nl> } <nl> <nl> void MainList : : setLoaded ( bool loaded ) { <nl> <nl> void MainList : : clear ( ) { <nl> _all . clear ( ) ; <nl> - _important . clear ( ) ; <nl> + for ( auto & [ filterId , list ] : _other ) { / / # TODO filters _other . clear ? . . <nl> + list . clear ( ) ; <nl> + } <nl> _unreadState = UnreadState ( ) ; <nl> } <nl> <nl> UnreadState MainList : : unreadState ( ) const { <nl> return _unreadState ; <nl> } <nl> <nl> - not_null < IndexedList * > MainList : : indexed ( Mode list ) { <nl> - return ( list = = Mode : : All ) ? & _all : & _important ; <nl> + not_null < IndexedList * > MainList : : indexed ( FilterId filterId ) { <nl> + if ( ! filterId ) { <nl> + return & _all ; <nl> + } <nl> + const auto i = _other . find ( filterId ) ; <nl> + if ( i ! = end ( _other ) ) { <nl> + return & i - > second ; <nl> + } <nl> + return & _other . emplace ( filterId , SortMode : : Date ) . first - > second ; <nl> } <nl> <nl> - not_null < const IndexedList * > MainList : : indexed ( Mode list ) const { <nl> - return ( list = = Mode : : All ) ? & _all : & _important ; <nl> + not_null < const IndexedList * > MainList : : indexed ( ) const { <nl> + return & _all ; <nl> } <nl> <nl> not_null < PinnedList * > MainList : : pinned ( ) { <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_main_list . h <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_main_list . h <nl> class MainList final { <nl> void unreadEntryChanged ( <nl> const Dialogs : : UnreadState & state , <nl> bool added ) ; <nl> - UnreadState unreadState ( ) const ; <nl> + [ [ nodiscard ] ] UnreadState unreadState ( ) const ; <nl> <nl> - not_null < IndexedList * > indexed ( Mode list = Mode : : All ) ; <nl> - not_null < const IndexedList * > indexed ( Mode list = Mode : : All ) const ; <nl> + not_null < IndexedList * > indexed ( FilterId filterId = 0 ) ; <nl> + not_null < const IndexedList * > indexed ( ) const ; <nl> not_null < PinnedList * > pinned ( ) ; <nl> not_null < const PinnedList * > pinned ( ) const ; <nl> <nl> private : <nl> IndexedList _all ; <nl> - IndexedList _important ; <nl> + base : : flat_map < FilterId , IndexedList > _other ; <nl> PinnedList _pinned ; <nl> UnreadState _unreadState ; <nl> <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_widget . cpp <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_widget . cpp <nl> void Widget : : refreshDialog ( Key key ) { <nl> } <nl> <nl> void Widget : : repaintDialogRow ( <nl> - Mode list , <nl> + FilterId filterId , <nl> not_null < Row * > row ) { <nl> - _inner - > repaintDialogRow ( list , row ) ; <nl> + _inner - > repaintDialogRow ( filterId , row ) ; <nl> } <nl> <nl> void Widget : : repaintDialogRow ( RowDescriptor row ) { <nl> mmm a / Telegram / SourceFiles / dialogs / dialogs_widget . h <nl> ppp b / Telegram / SourceFiles / dialogs / dialogs_widget . h <nl> class ConnectionState ; <nl> <nl> namespace Dialogs { <nl> <nl> - enum class Mode ; <nl> struct RowDescriptor ; <nl> class Row ; <nl> class FakeRow ; <nl> class Widget : public Window : : AbstractSectionWidget , public RPCSender { <nl> <nl> void refreshDialog ( Key key ) ; <nl> void removeDialog ( Key key ) ; <nl> - void repaintDialogRow ( Mode list , not_null < Row * > row ) ; <nl> + void repaintDialogRow ( FilterId filterId , not_null < Row * > row ) ; <nl> void repaintDialogRow ( RowDescriptor row ) ; <nl> <nl> void jumpToTop ( ) ; <nl> mmm a / Telegram / SourceFiles / facades . cpp <nl> ppp b / Telegram / SourceFiles / facades . cpp <nl> struct Data { <nl> bool AdaptiveForWide = true ; <nl> base : : Observable < void > AdaptiveChanged ; <nl> <nl> - bool DialogsModeEnabled = false ; <nl> - Dialogs : : Mode DialogsMode = Dialogs : : Mode : : All ; <nl> + bool DialogsFiltersEnabled = false ; <nl> + FilterId DialogsFilterId = 0 ; <nl> bool ModerateModeEnabled = false ; <nl> <nl> bool ScreenIsLocked = false ; <nl> DefineVar ( Global , Adaptive : : ChatLayout , AdaptiveChatLayout ) ; <nl> DefineVar ( Global , bool , AdaptiveForWide ) ; <nl> DefineRefVar ( Global , base : : Observable < void > , AdaptiveChanged ) ; <nl> <nl> - DefineVar ( Global , bool , DialogsModeEnabled ) ; <nl> - DefineVar ( Global , Dialogs : : Mode , DialogsMode ) ; <nl> + DefineVar ( Global , bool , DialogsFiltersEnabled ) ; <nl> + DefineVar ( Global , FilterId , DialogsFilterId ) ; <nl> DefineVar ( Global , bool , ModerateModeEnabled ) ; <nl> <nl> DefineVar ( Global , bool , ScreenIsLocked ) ; <nl> mmm a / Telegram / SourceFiles / facades . h <nl> ppp b / Telegram / SourceFiles / facades . h <nl> namespace Data { <nl> struct FileOrigin ; <nl> } / / namespace Data <nl> <nl> - namespace Dialogs { <nl> - enum class Mode ; <nl> - } / / namespace Dialogs <nl> - <nl> namespace InlineBots { <nl> namespace Layout { <nl> class ItemBase ; <nl> DeclareVar ( Adaptive : : ChatLayout , AdaptiveChatLayout ) ; <nl> DeclareVar ( bool , AdaptiveForWide ) ; <nl> DeclareRefVar ( base : : Observable < void > , AdaptiveChanged ) ; <nl> <nl> - DeclareVar ( bool , DialogsModeEnabled ) ; <nl> - DeclareVar ( Dialogs : : Mode , DialogsMode ) ; <nl> + DeclareVar ( bool , DialogsFiltersEnabled ) ; <nl> + DeclareVar ( FilterId , DialogsFilterId ) ; <nl> DeclareVar ( bool , ModerateModeEnabled ) ; <nl> <nl> DeclareVar ( bool , ScreenIsLocked ) ; <nl> mmm a / Telegram / SourceFiles / history / history . cpp <nl> ppp b / Telegram / SourceFiles / history / history . cpp <nl> void History : : setUnreadMentionsCount ( int count ) { <nl> } <nl> _unreadMentionsCount = count ; <nl> const auto has = ( count > 0 ) ; <nl> - if ( has ! = had & & Global : : DialogsModeEnabled ( ) ) { <nl> + if ( has ! = had & & Global : : DialogsFiltersEnabled ( ) ) { / / # TODO filters <nl> Notify : : historyMuteUpdated ( this ) ; <nl> updateChatListEntry ( ) ; <nl> } <nl> void History : : clearFolder ( ) { <nl> } <nl> <nl> void History : : setFolderPointer ( Data : : Folder * folder ) { <nl> - using Mode = Dialogs : : Mode ; <nl> - <nl> if ( _folder = = folder ) { <nl> return ; <nl> } <nl> void History : : setFolderPointer ( Data : : Folder * folder ) { <nl> } <nl> const auto wasKnown = folderKnown ( ) ; <nl> const auto wasInList = inChatList ( ) ; <nl> - const auto wasInImportant = wasInList & & inChatList ( Mode : : Important ) ; <nl> - if ( wasInList ) { <nl> - removeFromChatList ( Mode : : All ) ; <nl> - if ( wasInImportant ) { <nl> - removeFromChatList ( Mode : : Important ) ; <nl> - } <nl> - } <nl> + / / # TODO filters <nl> + / / const auto wasInImportant = wasInList & & inChatList ( Mode : : Important ) ; <nl> + / / if ( wasInList ) { <nl> + / / removeFromChatList ( Mode : : All ) ; <nl> + / / if ( wasInImportant ) { <nl> + / / removeFromChatList ( Mode : : Important ) ; <nl> + / / } <nl> + / / } <nl> const auto was = _folder . value_or ( nullptr ) ; <nl> _folder = folder ; <nl> if ( was ) { <nl> was - > unregisterOne ( this ) ; <nl> } <nl> if ( wasInList ) { <nl> - addToChatList ( Mode : : All ) ; <nl> - if ( wasInImportant ) { <nl> - addToChatList ( Mode : : Important ) ; <nl> - } <nl> + addToChatList ( 0 ) ; <nl> + / / if ( wasInImportant ) { / / # TODO filters <nl> + / / addToChatList ( Mode : : Important ) ; <nl> + / / } <nl> owner ( ) . chatsListChanged ( was ) ; <nl> owner ( ) . chatsListChanged ( folder ) ; <nl> } else if ( ! wasKnown ) { <nl> bool History : : shouldBeInChatList ( ) const { <nl> | | ( lastMessage ( ) ! = nullptr ) ; <nl> } <nl> <nl> - bool History : : toImportant ( ) const { <nl> - return ! mute ( ) | | hasUnreadMentions ( ) ; <nl> - } <nl> - <nl> void History : : unknownMessageDeleted ( MsgId messageId ) { <nl> if ( _inboxReadBefore & & messageId > = * _inboxReadBefore ) { <nl> owner ( ) . histories ( ) . requestDialogEntry ( this ) ; <nl> mmm a / Telegram / SourceFiles / history / history . h <nl> ppp b / Telegram / SourceFiles / history / history . h <nl> class History final : public Dialogs : : Entry { <nl> int fixedOnTopIndex ( ) const override ; <nl> void updateChatListExistence ( ) override ; <nl> bool shouldBeInChatList ( ) const override ; <nl> - bool toImportant ( ) const override ; <nl> int chatListUnreadCount ( ) const override ; <nl> bool chatListUnreadMark ( ) const override ; <nl> bool chatListMutedBadge ( ) const override ; <nl> mmm a / Telegram / SourceFiles / mainwidget . cpp <nl> ppp b / Telegram / SourceFiles / mainwidget . cpp <nl> QPixmap MainWidget : : grabForShowAnimation ( const Window : : SectionSlideParams & param <nl> } <nl> <nl> void MainWidget : : repaintDialogRow ( <nl> - Dialogs : : Mode list , <nl> + FilterId filterId , <nl> not_null < Dialogs : : Row * > row ) { <nl> - _dialogs - > repaintDialogRow ( list , row ) ; <nl> + _dialogs - > repaintDialogRow ( filterId , row ) ; <nl> } <nl> <nl> void MainWidget : : repaintDialogRow ( Dialogs : : RowDescriptor row ) { <nl> mmm a / Telegram / SourceFiles / mainwidget . h <nl> ppp b / Telegram / SourceFiles / mainwidget . h <nl> struct RowDescriptor ; <nl> class Row ; <nl> class Key ; <nl> class Widget ; <nl> - enum class Mode ; <nl> } / / namespace Dialogs <nl> <nl> namespace Media { <nl> class MainWidget <nl> <nl> void refreshDialog ( Dialogs : : Key key ) ; <nl> void removeDialog ( Dialogs : : Key key ) ; <nl> - void repaintDialogRow ( Dialogs : : Mode list , not_null < Dialogs : : Row * > row ) ; <nl> + void repaintDialogRow ( FilterId filterId , not_null < Dialogs : : Row * > row ) ; <nl> void repaintDialogRow ( Dialogs : : RowDescriptor row ) ; <nl> <nl> void windowShown ( ) ; <nl> mmm a / Telegram / SourceFiles / settings / settings_codes . cpp <nl> ppp b / Telegram / SourceFiles / settings / settings_codes . cpp <nl> auto GenerateCodes ( ) { <nl> Unexpected ( " Crashed in Settings ! " ) ; <nl> } ) ; <nl> codes . emplace ( qsl ( " workmode " ) , [ ] ( : : Main : : Session * session ) { <nl> - auto text = Global : : DialogsModeEnabled ( ) ? qsl ( " Disable work mode ? " ) : qsl ( " Enable work mode ? " ) ; <nl> + auto text = Global : : DialogsFiltersEnabled ( ) ? qsl ( " Disable filters ? " ) : qsl ( " Enable filters ? " ) ; <nl> Ui : : show ( Box < ConfirmBox > ( text , [ ] { <nl> Core : : App ( ) . switchWorkMode ( ) ; <nl> } ) ) ; <nl> mmm a / Telegram / SourceFiles / storage / localstorage . cpp <nl> ppp b / Telegram / SourceFiles / storage / localstorage . cpp <nl> enum { <nl> dbiHiddenPinnedMessages = 0x39 , <nl> dbiRecentEmoji = 0x3a , <nl> dbiEmojiVariants = 0x3b , <nl> - dbiDialogsMode = 0x40 , <nl> + dbiDialogsFilters = 0x40 , <nl> dbiModerateMode = 0x41 , <nl> dbiVideoVolume = 0x42 , <nl> dbiStickersRecentLimit = 0x43 , <nl> bool _readSetting ( quint32 blockId , QDataStream & stream , int version , ReadSetting <nl> } <nl> } break ; <nl> <nl> - case dbiDialogsMode : { <nl> + case dbiDialogsFilters : { <nl> qint32 enabled , modeInt ; <nl> stream > > enabled > > modeInt ; <nl> if ( ! _checkStreamStatus ( stream ) ) return false ; <nl> <nl> - Global : : SetDialogsModeEnabled ( enabled = = 1 ) ; <nl> - auto mode = Dialogs : : Mode : : All ; <nl> + Global : : SetDialogsFiltersEnabled ( enabled = = 1 ) ; <nl> + auto mode = FilterId ( 0 ) ; <nl> if ( enabled ) { <nl> - mode = static_cast < Dialogs : : Mode > ( modeInt ) ; <nl> - if ( mode ! = Dialogs : : Mode : : All & & mode ! = Dialogs : : Mode : : Important ) { <nl> - mode = Dialogs : : Mode : : All ; <nl> + mode = FilterId ( modeInt ) ; <nl> + if ( mode = = 1 ) { / / # TODO filters <nl> + <nl> } <nl> } <nl> - Global : : SetDialogsMode ( mode ) ; <nl> + Global : : SetDialogsFilterId ( mode ) ; <nl> } break ; <nl> <nl> case dbiModerateMode : { <nl> void _writeUserSettings ( ) { <nl> data . stream < < quint32 ( dbiDialogLastPath ) < < cDialogLastPath ( ) ; <nl> data . stream < < quint32 ( dbiSongVolume ) < < qint32 ( qRound ( Global : : SongVolume ( ) * 1e6 ) ) ; <nl> data . stream < < quint32 ( dbiVideoVolume ) < < qint32 ( qRound ( Global : : VideoVolume ( ) * 1e6 ) ) ; <nl> - data . stream < < quint32 ( dbiDialogsMode ) < < qint32 ( Global : : DialogsModeEnabled ( ) ? 1 : 0 ) < < static_cast < qint32 > ( Global : : DialogsMode ( ) ) ; <nl> + data . stream < < quint32 ( dbiDialogsFilters ) < < qint32 ( Global : : DialogsFiltersEnabled ( ) ? 1 : 0 ) < < static_cast < qint32 > ( Global : : DialogsFilterId ( ) ) ; <nl> data . stream < < quint32 ( dbiModerateMode ) < < qint32 ( Global : : ModerateModeEnabled ( ) ? 1 : 0 ) ; <nl> data . stream < < quint32 ( dbiUseExternalVideoPlayer ) < < qint32 ( cUseExternalVideoPlayer ( ) ) ; <nl> data . stream < < quint32 ( dbiCacheSettings ) < < qint64 ( _cacheTotalSizeLimit ) < < qint32 ( _cacheTotalTimeLimit ) < < qint64 ( _cacheBigFileTotalSizeLimit ) < < qint32 ( _cacheBigFileTotalTimeLimit ) ; <nl> mmm a / Telegram / SourceFiles / window / window_session_controller . cpp <nl> ppp b / Telegram / SourceFiles / window / window_session_controller . cpp <nl> SessionController : : SessionController ( <nl> ) | rpl : : filter ( [ = ] ( Data : : Folder * folder ) { <nl> return ( folder ! = nullptr ) <nl> & & ( folder = = _openedFolder . current ( ) ) <nl> - & & folder - > chatsList ( ) - > indexed ( Global : : DialogsMode ( ) ) - > empty ( ) ; <nl> + & & folder - > chatsList ( ) - > indexed ( ) - > empty ( ) ; <nl> } ) | rpl : : start_with_next ( [ = ] ( Data : : Folder * folder ) { <nl> folder - > updateChatListSortPosition ( ) ; <nl> closeFolder ( ) ; <nl> | Replace dialogs mode with filters . | telegramdesktop/tdesktop | 71f7aae948661e57d7969a81a781c4f5f1191da5 | 2020-03-27T16:36:01Z |
mmm a / db / db_impl . h <nl> ppp b / db / db_impl . h <nl> class DBImpl : public DB { <nl> virtual Status SyncWAL ( ) override ; <nl> <nl> virtual SequenceNumber GetLatestSequenceNumber ( ) const override ; <nl> + / / REQUIRES : joined the main write queue if two_write_queues is disabled , and <nl> + / / the second write queue otherwise . <nl> virtual void SetLastPublishedSequence ( SequenceNumber seq ) ; <nl> / / Returns LastSequence in last_seq_same_as_publish_seq_ <nl> / / mode and LastAllocatedSequence otherwise . This is useful when visiblility <nl> mmm a / db / db_impl_write . cc <nl> ppp b / db / db_impl_write . cc <nl> Status DBImpl : : WriteImpl ( const WriteOptions & write_options , <nl> for ( auto * writer : * ( w . write_group ) ) { <nl> if ( ! writer - > CallbackFailed ( ) & & writer - > pre_release_callback ) { <nl> assert ( writer - > sequence ! = kMaxSequenceNumber ) ; <nl> - Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence ) ; <nl> + Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence , <nl> + disable_memtable ) ; <nl> if ( ! ws . ok ( ) ) { <nl> status = ws ; <nl> break ; <nl> Status DBImpl : : WriteImpl ( const WriteOptions & write_options , <nl> for ( auto * writer : write_group ) { <nl> if ( ! writer - > CallbackFailed ( ) & & writer - > pre_release_callback ) { <nl> assert ( writer - > sequence ! = kMaxSequenceNumber ) ; <nl> - Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence ) ; <nl> + Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence , <nl> + disable_memtable ) ; <nl> if ( ! ws . ok ( ) ) { <nl> status = ws ; <nl> break ; <nl> Status DBImpl : : WriteImplWALOnly ( const WriteOptions & write_options , <nl> for ( auto * writer : write_group ) { <nl> if ( ! writer - > CallbackFailed ( ) & & writer - > pre_release_callback ) { <nl> assert ( writer - > sequence ! = kMaxSequenceNumber ) ; <nl> - Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence ) ; <nl> + const bool DISABLE_MEMTABLE = true ; <nl> + Status ws = writer - > pre_release_callback - > Callback ( writer - > sequence , <nl> + DISABLE_MEMTABLE ) ; <nl> if ( ! ws . ok ( ) ) { <nl> status = ws ; <nl> break ; <nl> mmm a / db / pre_release_callback . h <nl> ppp b / db / pre_release_callback . h <nl> class PreReleaseCallback { <nl> / / propagated to all the writers in the write group . <nl> / / seq is the sequence number that is used for this write and will be <nl> / / released . <nl> - virtual Status Callback ( SequenceNumber seq ) = 0 ; <nl> + / / is_mem_disabled is currently used for debugging purposes to assert that <nl> + / / the callback is done from the right write queue . <nl> + virtual Status Callback ( SequenceNumber seq , const bool is_mem_disabled ) = 0 ; <nl> } ; <nl> <nl> } / / namespace rocksdb <nl> mmm a / db / write_callback_test . cc <nl> ppp b / db / write_callback_test . cc <nl> TEST_F ( WriteCallbackTest , WriteWithCallbackTest ) { <nl> public : <nl> PublishSeqCallback ( DBImpl * db_impl_in ) <nl> : db_impl_ ( db_impl_in ) { } <nl> - virtual Status Callback ( SequenceNumber last_seq ) { <nl> + virtual Status Callback ( SequenceNumber last_seq , <nl> + const bool / * not used * / ) override { <nl> db_impl_ - > SetLastPublishedSequence ( last_seq ) ; <nl> return Status : : OK ( ) ; <nl> } <nl> mmm a / utilities / transactions / write_prepared_txn . cc <nl> ppp b / utilities / transactions / write_prepared_txn . cc <nl> Status WritePreparedTxn : : CommitInternal ( ) { <nl> assert ( s . ok ( ) ) ; <nl> commit_batch_cnt = counter . BatchCount ( ) ; <nl> } <nl> - WritePreparedCommitEntryPreReleaseCallback update_commit_map ( <nl> - wpt_db_ , db_impl_ , prepare_seq , prepare_batch_cnt_ , commit_batch_cnt ) ; <nl> + const bool PREP_HEAP_SKIPPED = true ; <nl> const bool disable_memtable = ! includes_data ; <nl> + const bool do_one_write = <nl> + ! db_impl_ - > immutable_db_options ( ) . two_write_queues | | disable_memtable ; <nl> + const bool publish_seq = do_one_write ; <nl> + WritePreparedCommitEntryPreReleaseCallback update_commit_map ( <nl> + wpt_db_ , db_impl_ , prepare_seq , prepare_batch_cnt_ , commit_batch_cnt , <nl> + ! PREP_HEAP_SKIPPED , publish_seq ) ; <nl> uint64_t seq_used = kMaxSequenceNumber ; <nl> / / Since the prepared batch is directly written to memtable , there is already <nl> / / a connection between the memtable and its WAL , so there is no need to <nl> Status WritePreparedTxn : : CommitInternal ( ) { <nl> zero_log_number , disable_memtable , & seq_used , <nl> batch_cnt , & update_commit_map ) ; <nl> assert ( ! s . ok ( ) | | seq_used ! = kMaxSequenceNumber ) ; <nl> + if ( LIKELY ( do_one_write | | ! s . ok ( ) ) ) { <nl> + return s ; <nl> + } / / else do the 2nd write to publish seq <nl> + / / Note : the 2nd write comes with a performance penality . So if we have too <nl> + / / many of commits accompanied with ComitTimeWriteBatch and yet we cannot <nl> + / / enable use_only_the_last_commit_time_batch_for_recovery_ optimization , <nl> + / / two_write_queues should be disabled to avoid many additional writes here . <nl> + class PublishSeqPreReleaseCallback : public PreReleaseCallback { <nl> + public : <nl> + explicit PublishSeqPreReleaseCallback ( DBImpl * db_impl ) <nl> + : db_impl_ ( db_impl ) { } <nl> + virtual Status Callback ( SequenceNumber seq , bool is_mem_disabled ) override { <nl> + assert ( is_mem_disabled ) ; <nl> + assert ( db_impl_ - > immutable_db_options ( ) . two_write_queues ) ; <nl> + db_impl_ - > SetLastPublishedSequence ( seq ) ; <nl> + return Status : : OK ( ) ; <nl> + } <nl> + <nl> + private : <nl> + DBImpl * db_impl_ ; <nl> + } publish_seq_callback ( db_impl_ ) ; <nl> + WriteBatch empty_batch ; <nl> + empty_batch . PutLogData ( Slice ( ) ) ; <nl> + / / In the absence of Prepare markers , use Noop as a batch separator <nl> + WriteBatchInternal : : InsertNoop ( & empty_batch ) ; <nl> + const bool DISABLE_MEMTABLE = true ; <nl> + const size_t ONE_BATCH = 1 ; <nl> + const uint64_t NO_REF_LOG = 0 ; <nl> + s = db_impl_ - > WriteImpl ( write_options_ , & empty_batch , nullptr , nullptr , <nl> + NO_REF_LOG , DISABLE_MEMTABLE , & seq_used , ONE_BATCH , <nl> + & publish_seq_callback ) ; <nl> + assert ( ! s . ok ( ) | | seq_used ! = kMaxSequenceNumber ) ; <nl> return s ; <nl> } <nl> <nl> Status WritePreparedTxn : : RollbackInternal ( ) { <nl> WriteBatchInternal : : MarkRollback ( & rollback_batch , name_ ) ; <nl> bool do_one_write = ! db_impl_ - > immutable_db_options ( ) . two_write_queues ; <nl> const bool DISABLE_MEMTABLE = true ; <nl> - const uint64_t no_log_ref = 0 ; <nl> + const uint64_t NO_REF_LOG = 0 ; <nl> uint64_t seq_used = kMaxSequenceNumber ; <nl> const size_t ZERO_PREPARES = 0 ; <nl> const size_t ONE_BATCH = 1 ; <nl> WritePreparedCommitEntryPreReleaseCallback update_commit_map ( <nl> wpt_db_ , db_impl_ , kMaxSequenceNumber , ZERO_PREPARES , ONE_BATCH ) ; <nl> s = db_impl_ - > WriteImpl ( write_options_ , & rollback_batch , nullptr , nullptr , <nl> - no_log_ref , ! DISABLE_MEMTABLE , & seq_used , ONE_BATCH , <nl> + NO_REF_LOG , ! DISABLE_MEMTABLE , & seq_used , ONE_BATCH , <nl> do_one_write ? & update_commit_map : nullptr ) ; <nl> assert ( ! s . ok ( ) | | seq_used ! = kMaxSequenceNumber ) ; <nl> if ( ! s . ok ( ) ) { <nl> Status WritePreparedTxn : : RollbackInternal ( ) { <nl> / / In the absence of Prepare markers , use Noop as a batch separator <nl> WriteBatchInternal : : InsertNoop ( & empty_batch ) ; <nl> s = db_impl_ - > WriteImpl ( write_options_ , & empty_batch , nullptr , nullptr , <nl> - no_log_ref , DISABLE_MEMTABLE , & seq_used , ONE_BATCH , <nl> + NO_REF_LOG , DISABLE_MEMTABLE , & seq_used , ONE_BATCH , <nl> & update_commit_map_with_prepare ) ; <nl> assert ( ! s . ok ( ) | | seq_used ! = kMaxSequenceNumber ) ; <nl> / / Mark the txn as rolled back <nl> mmm a / utilities / transactions / write_prepared_txn_db . h <nl> ppp b / utilities / transactions / write_prepared_txn_db . h <nl> class WritePreparedCommitEntryPreReleaseCallback : public PreReleaseCallback { <nl> SequenceNumber prep_seq , <nl> size_t prep_batch_cnt , <nl> size_t data_batch_cnt = 0 , <nl> - bool prep_heap_skipped = false ) <nl> + bool prep_heap_skipped = false , <nl> + bool publish_seq = true ) <nl> : db_ ( db ) , <nl> db_impl_ ( db_impl ) , <nl> prep_seq_ ( prep_seq ) , <nl> prep_batch_cnt_ ( prep_batch_cnt ) , <nl> data_batch_cnt_ ( data_batch_cnt ) , <nl> prep_heap_skipped_ ( prep_heap_skipped ) , <nl> - includes_data_ ( data_batch_cnt_ > 0 ) { <nl> + includes_data_ ( data_batch_cnt_ > 0 ) , <nl> + publish_seq_ ( publish_seq ) { <nl> assert ( ( prep_batch_cnt_ > 0 ) ! = ( prep_seq = = kMaxSequenceNumber ) ) ; / / xor <nl> assert ( prep_batch_cnt_ > 0 | | data_batch_cnt_ > 0 ) ; <nl> } <nl> <nl> - virtual Status Callback ( SequenceNumber commit_seq ) override { <nl> + virtual Status Callback ( SequenceNumber commit_seq , <nl> + bool is_mem_disabled ) override { <nl> assert ( includes_data_ | | prep_seq_ ! = kMaxSequenceNumber ) ; <nl> const uint64_t last_commit_seq = LIKELY ( data_batch_cnt_ < = 1 ) <nl> ? commit_seq <nl> class WritePreparedCommitEntryPreReleaseCallback : public PreReleaseCallback { <nl> db_ - > AddCommitted ( commit_seq + i , last_commit_seq , PREPARE_SKIPPED ) ; <nl> } <nl> } <nl> - if ( db_impl_ - > immutable_db_options ( ) . two_write_queues ) { <nl> + if ( db_impl_ - > immutable_db_options ( ) . two_write_queues & & publish_seq_ ) { <nl> + assert ( is_mem_disabled ) ; / / implies the 2nd queue <nl> / / Publish the sequence number . We can do that here assuming the callback <nl> / / is invoked only from one write queue , which would guarantee that the <nl> / / publish sequence numbers will be in order , i . e . , once a seq is <nl> class WritePreparedCommitEntryPreReleaseCallback : public PreReleaseCallback { <nl> / / Either because it is commit without prepare or it has a <nl> / / CommitTimeWriteBatch <nl> bool includes_data_ ; <nl> + / / Should the callback also publishes the commit seq number <nl> + bool publish_seq_ ; <nl> } ; <nl> <nl> / / Count the number of sub - batches inside a batch . A sub - batch does not have <nl> | WritePrepared Txn : fix race condition on publishing seq | facebook/rocksdb | 7429b20e3901e0bcc7e028b37536a262cf11a448 | 2018-03-22T21:43:36Z |
mmm a / java / rocksjni . pom <nl> ppp b / java / rocksjni . pom <nl> <nl> < url > http : / / www . apache . org / licenses / LICENSE - 2 . 0 . html < / url > <nl> < distribution > repo < / distribution > <nl> < / license > <nl> + < license > <nl> + < name > GNU General Public License , version 2 < / name > <nl> + < url > http : / / www . gnu . org / licenses / gpl - 2 . 0 . html < / url > <nl> + < distribution > repo < / distribution > <nl> + < / license > <nl> < / licenses > <nl> < scm > <nl> < connection > scm : git : git : / / github . com / dropwizard / metrics . git < / connection > <nl> | Update java / rocksjni . pom | facebook/rocksdb | 6e3ee015fb1ce03e47838e9a3995410ce884c212 | 2017-07-18T20:12:23Z |
mmm a / dbms / src / Storages / StorageView . cpp <nl> ppp b / dbms / src / Storages / StorageView . cpp <nl> <nl> <nl> # include < Common / typeid_cast . h > <nl> <nl> + # include < Processors / Pipe . h > <nl> + # include < Processors / Sources / SourceFromInputStream . h > <nl> + # include < Processors / Transforms / MaterializingTransform . h > <nl> + <nl> <nl> namespace DB <nl> { <nl> StorageView : : StorageView ( <nl> } <nl> <nl> <nl> - BlockInputStreams StorageView : : read ( <nl> + Pipes StorageView : : readWithProcessors ( <nl> const Names & column_names , <nl> const SelectQueryInfo & query_info , <nl> const Context & context , <nl> BlockInputStreams StorageView : : read ( <nl> const size_t / * max_block_size * / , <nl> const unsigned / * num_streams * / ) <nl> { <nl> - BlockInputStreams res ; <nl> + Pipes pipes ; <nl> <nl> ASTPtr current_inner_query = inner_query ; <nl> <nl> BlockInputStreams StorageView : : read ( <nl> } <nl> <nl> QueryPipeline pipeline ; <nl> + InterpreterSelectWithUnionQuery interpreter ( current_inner_query , context , { } , column_names ) ; <nl> / / / FIXME res may implicitly use some objects owned be pipeline , but them will be destructed after return <nl> - res = InterpreterSelectWithUnionQuery ( current_inner_query , context , { } , column_names ) . executeWithMultipleStreams ( pipeline ) ; <nl> + if ( query_info . force_tree_shaped_pipeline ) <nl> + { <nl> + BlockInputStreams streams = interpreter . executeWithMultipleStreams ( pipeline ) ; <nl> + for ( auto & stream : streams ) <nl> + pipes . emplace_back ( std : : make_shared < SourceFromInputStream > ( std : : move ( stream ) ) ) ; <nl> + } <nl> + else <nl> + / / / TODO : support multiple streams here . Need more general interface than pipes . <nl> + pipes . emplace_back ( interpreter . executeWithProcessors ( ) . getPipe ( ) ) ; <nl> <nl> / / / It ' s expected that the columns read from storage are not constant . <nl> / / / Because method ' getSampleBlockForColumns ' is used to obtain a structure of result in InterpreterSelectQuery . <nl> - for ( auto & stream : res ) <nl> - stream = std : : make_shared < MaterializingBlockInputStream > ( stream ) ; <nl> + for ( auto & pipe : pipes ) <nl> + pipe . addSimpleTransform ( std : : make_shared < MaterializingTransform > ( pipe . getHeader ( ) ) ) ; <nl> <nl> - return res ; <nl> + return pipes ; <nl> } <nl> <nl> void StorageView : : replaceTableNameWithSubquery ( ASTSelectQuery * select_query , ASTPtr & subquery ) <nl> mmm a / dbms / src / Storages / StorageView . h <nl> ppp b / dbms / src / Storages / StorageView . h <nl> class StorageView : public ext : : shared_ptr_helper < StorageView > , public IStorage <nl> bool supportsSampling ( ) const override { return true ; } <nl> bool supportsFinal ( ) const override { return true ; } <nl> <nl> - BlockInputStreams read ( <nl> + Pipes readWithProcessors ( <nl> const Names & column_names , <nl> const SelectQueryInfo & query_info , <nl> const Context & context , <nl> class StorageView : public ext : : shared_ptr_helper < StorageView > , public IStorage <nl> size_t max_block_size , <nl> unsigned num_streams ) override ; <nl> <nl> + bool supportProcessorsPipeline ( ) const override { return true ; } <nl> + <nl> private : <nl> ASTPtr inner_query ; <nl> <nl> | Processors support for StorageView reading . | ClickHouse/ClickHouse | 728ece09f1bd9c2b24e72987ae04ce63f3e19f49 | 2020-02-17T15:27:51Z |
mmm a / src / concurrency / drain_semaphore . cc <nl> ppp b / src / concurrency / drain_semaphore . cc <nl> drain_semaphore_t : : lock_t : : ~ lock_t ( ) { <nl> parent - > release ( ) ; <nl> } <nl> <nl> + void drain_semaphore_t : : rethread ( int new_thread ) { <nl> + rassert ( refcount = = 0 ) ; <nl> + real_home_thread = new_thread ; <nl> + cond . rethread ( new_thread ) ; <nl> + } <nl> + <nl> + <nl> + <nl> void drain_semaphore_t : : drain ( ) { <nl> assert_thread ( ) ; <nl> draining = true ; <nl> mmm a / src / concurrency / drain_semaphore . hpp <nl> ppp b / src / concurrency / drain_semaphore . hpp <nl> struct drain_semaphore_t : public home_thread_mixin_t { <nl> to start . * / <nl> void drain ( ) ; <nl> <nl> + void rethread ( int new_thread ) ; <nl> + <nl> private : <nl> bool draining ; <nl> int refcount ; <nl> | Reverted 6ffcc1eb6ff , " Removed drain_semaphore_t : : rethread . " | rethinkdb/rethinkdb | 99ef70f1576b5ce51511635432c9a57d50226c7d | 2011-11-04T21:00:15Z |
mmm a / src / qt / test / apptests . cpp <nl> ppp b / src / qt / test / apptests . cpp <nl> <nl> # include < qt / networkstyle . h > <nl> # include < qt / rpcconsole . h > <nl> # include < shutdown . h > <nl> + # include < test / setup_common . h > <nl> + # include < univalue . h > <nl> # include < validation . h > <nl> <nl> # if defined ( HAVE_CONFIG_H ) <nl> <nl> # include < QtGlobal > <nl> # include < QtTest / QtTestWidgets > <nl> # include < QtTest / QtTestGui > <nl> - # include < string > <nl> - # include < univalue . h > <nl> <nl> namespace { <nl> / / ! Call getblockchaininfo RPC and check first field of JSON output . <nl> void AppTests : : appTests ( ) <nl> } <nl> # endif <nl> <nl> + BasicTestingSetup test { CBaseChainParams : : REGTEST } ; / / Create a temp data directory to backup the gui settings to <nl> ECC_Stop ( ) ; / / Already started by the common test setup , so stop it to avoid interference <nl> LogInstance ( ) . DisconnectTestLogger ( ) ; <nl> <nl> mmm a / src / qt / test / paymentservertests . cpp <nl> ppp b / src / qt / test / paymentservertests . cpp <nl> <nl> # include < random . h > <nl> # include < script / script . h > <nl> # include < script / standard . h > <nl> - # include < util / system . h > <nl> + # include < test / setup_common . h > <nl> # include < util / strencodings . h > <nl> <nl> # include < openssl / x509 . h > <nl> static SendCoinsRecipient handleRequest ( PaymentServer * server , std : : vector < unsig <nl> <nl> void PaymentServerTests : : paymentServerTests ( ) <nl> { <nl> - SelectParams ( CBaseChainParams : : MAIN ) ; <nl> + BasicTestingSetup testing_setup ( CBaseChainParams : : MAIN ) ; <nl> auto node = interfaces : : MakeNode ( ) ; <nl> OptionsModel optionsModel ( * node ) ; <nl> PaymentServer * server = new PaymentServer ( nullptr , false ) ; <nl> mmm a / src / qt / test / rpcnestedtests . cpp <nl> ppp b / src / qt / test / rpcnestedtests . cpp <nl> void RPCNestedTests : : rpcNestedTests ( ) <nl> tableRPC . appendCommand ( " rpcNestedTest " , & vRPCCommands [ 0 ] ) ; <nl> / / mempool . setSanityCheck ( 1 . 0 ) ; <nl> <nl> - ECC_Stop ( ) ; / / Already started by the common test setup , so stop it to avoid interference <nl> - LogInstance ( ) . DisconnectTestLogger ( ) ; <nl> - <nl> TestingSetup test ; <nl> <nl> if ( RPCIsInWarmup ( nullptr ) ) SetRPCWarmupFinished ( ) ; <nl> mmm a / src / qt / test / test_main . cpp <nl> ppp b / src / qt / test / test_main . cpp <nl> Q_IMPORT_PLUGIN ( QCocoaIntegrationPlugin ) ; <nl> # endif <nl> # endif <nl> <nl> - extern void noui_connect ( ) ; <nl> - <nl> / / This is all you need to run all the tests <nl> int main ( int argc , char * argv [ ] ) <nl> { <nl> - BasicTestingSetup test { CBaseChainParams : : REGTEST } ; <nl> + / / Initialize persistent globals with the testing setup state for sanity . <nl> + / / E . g . - datadir in gArgs is set to a temp directory dummy value ( instead <nl> + / / of defaulting to the default datadir ) , or globalChainParams is set to <nl> + / / regtest params . <nl> + / / <nl> + / / All tests must use their own testing setup ( if needed ) . <nl> + { <nl> + BasicTestingSetup dummy { CBaseChainParams : : REGTEST } ; <nl> + } <nl> <nl> auto node = interfaces : : MakeNode ( ) ; <nl> <nl> mmm a / src / test / setup_common . cpp <nl> ppp b / src / test / setup_common . cpp <nl> TestingSetup : : TestingSetup ( const std : : string & chainName ) : BasicTestingSetup ( cha <nl> const CChainParams & chainparams = Params ( ) ; <nl> / / Ideally we ' d move all the RPC tests to the functional testing framework <nl> / / instead of unit tests , but for now we need these here . <nl> - <nl> RegisterAllCoreRPCCommands ( tableRPC ) ; <nl> <nl> / / We have to run a scheduler thread to prevent ActivateBestChain <nl> | qt : test : Create at most one testing setup | bitcoin/bitcoin | faa1e0fb1712b1f94334e42794163f79988270fd | 2019-06-27T20:47:23Z |
mmm a / ios / sdk / WeexSDK / Sources / Bridge / WXBridgeContext . m <nl> ppp b / ios / sdk / WeexSDK / Sources / Bridge / WXBridgeContext . m <nl> - ( void ) createInstance : ( NSString * ) instance <nl> } else { <nl> args = @ [ instance , temp , options ? : @ { } ] ; <nl> } <nl> - <nl> + WX_MONITOR_INSTANCE_PERF_START ( WXFirstScreenJSFExecuteTime , [ WXSDKManager instanceForID : instance ] ) ; <nl> WX_MONITOR_INSTANCE_PERF_START ( WXPTJSCreateInstance , [ WXSDKManager instanceForID : instance ] ) ; <nl> [ self callJSMethod : @ " createInstance " args : args ] ; <nl> WX_MONITOR_INSTANCE_PERF_END ( WXPTJSCreateInstance , [ WXSDKManager instanceForID : instance ] ) ; <nl> - ( void ) executeJsFramework : ( NSString * ) script <nl> WXAssertParam ( script ) ; <nl> <nl> WX_MONITOR_PERF_START ( WXPTFrameworkExecute ) ; <nl> - <nl> + <nl> [ self . jsBridge executeJSFramework : script ] ; <nl> <nl> WX_MONITOR_PERF_END ( WXPTFrameworkExecute ) ; <nl> mmm a / ios / sdk / WeexSDK / Sources / Manager / WXComponentManager . m <nl> ppp b / ios / sdk / WeexSDK / Sources / Manager / WXComponentManager . m <nl> - ( void ) createRoot : ( NSDictionary * ) data <nl> <nl> [ self _initRootCSSNode ] ; <nl> __weak typeof ( self ) weakSelf = self ; <nl> + WX_MONITOR_INSTANCE_PERF_END ( WXFirstScreenJSFExecuteTime , self . weexInstance ) ; <nl> [ self _addUITask : ^ { <nl> [ WXTracingManager startTracingWithInstanceId : weakSelf . weexInstance . instanceId ref : data [ @ " ref " ] className : nil name : data [ @ " type " ] phase : WXTracingBegin functionName : @ " createBody " options : @ { @ " threadName " : WXTUIThread } ] ; <nl> __strong typeof ( self ) strongSelf = weakSelf ; <nl> mmm a / ios / sdk / WeexSDK / Sources / Module / WXPrerenderManager . m <nl> ppp b / ios / sdk / WeexSDK / Sources / Module / WXPrerenderManager . m <nl> - ( void ) prerender : ( NSURL * ) url instanceId : ( NSString * ) instanceId isCache : ( BOOL ) <nl> self . maxCacheNumber = max ; <nl> } <nl> } <nl> - <nl> - <nl> - WXSDKInstance * instance = [ [ WXSDKInstance alloc ] init ] ; <nl> - instance . needPrerender = YES ; <nl> - task . instance = instance ; <nl> - task . parentInstanceId = instanceId ; <nl> - task . url = url . absoluteString ; <nl> - task . isCache = isCache ; <nl> - <nl> if ( self . prerenderTasks & & self . prerenderTasks . count < self . maxCacheNumber ) { <nl> [ self . prerenderTasks setObject : task forKey : [ WXPrerenderManager getTaskKeyFromUrl : url . absoluteString ] ] ; <nl> WXPerformBlockOnMainThread ( ^ { <nl> - [ instance renderWithURL : url options : @ { @ " bundleUrl " : url . absoluteString } data : nil ] ; <nl> + WXSDKInstance * instance = [ [ WXSDKInstance alloc ] init ] ; <nl> + instance . needPrerender = YES ; <nl> + task . instance = instance ; <nl> + task . parentInstanceId = instanceId ; <nl> + task . url = url . absoluteString ; <nl> + task . isCache = isCache ; <nl> WXPrerenderManager * manager = [ WXPrerenderManager sharedInstance ] ; <nl> __weak typeof ( self ) weakSelf = manager ; <nl> instance . onCreate = ^ ( UIView * view ) { <nl> - ( void ) prerender : ( NSURL * ) url instanceId : ( NSString * ) instanceId isCache : ( BOOL ) <nl> [ weakSelf . prerenderTasks setObject : task forKey : [ WXPrerenderManager getTaskKeyFromUrl : url . absoluteString ] ] ; <nl> } <nl> } ; <nl> + [ instance renderWithURL : url options : @ { @ " bundleUrl " : url . absoluteString } data : nil ] ; <nl> } ) ; <nl> if ( callback ) { <nl> callback ( @ { @ " url " : url . absoluteString , @ " message " : MSG_PRERENDER_SUCCESS , @ " result " : @ " success " } ) ; <nl> - ( BOOL ) isTaskReady : ( NSString * ) url <nl> } <nl> <nl> + ( BOOL ) isTaskExist : ( NSString * ) url { <nl> - return [ [ WXPrerenderManager sharedInstance ] isTaskExist : url ] ; <nl> + <nl> + return [ [ WXPrerenderManager sharedInstance ] isTaskExist : url ] ; <nl> } <nl> <nl> - ( BOOL ) isTaskExist : ( NSString * ) url <nl> mmm a / ios / sdk / WeexSDK / Sources / Monitor / WXMonitor . h <nl> ppp b / ios / sdk / WeexSDK / Sources / Monitor / WXMonitor . h <nl> typedef enum : NSUInteger { <nl> / / instance <nl> WXPTJSDownload , <nl> WXPTJSCreateInstance , <nl> + WXFirstScreenJSFExecuteTime , <nl> WXPTFirstScreenRender , <nl> WXPTAllRender , <nl> WXPTBundleSize , <nl> mmm a / ios / sdk / WeexSDK / Sources / Monitor / WXMonitor . m <nl> ppp b / ios / sdk / WeexSDK / Sources / Monitor / WXMonitor . m <nl> + ( void ) commitPerformanceWithDict : ( NSMutableDictionary * ) commitDict instance : ( WX <nl> @ ( WXPTFrameworkExecute ) : JSLIBINITTIME , <nl> @ ( WXPTJSDownload ) : NETWORKTIME , <nl> @ ( WXPTJSCreateInstance ) : COMMUNICATETIME , <nl> + @ ( WXFirstScreenJSFExecuteTime ) : FIRSETSCREENJSFEXECUTETIME , <nl> @ ( WXPTFirstScreenRender ) : SCREENRENDERTIME , <nl> @ ( WXPTAllRender ) : TOTALTIME , <nl> - @ ( WXPTBundleSize ) : JSTEMPLATESIZE <nl> + @ ( WXPTBundleSize ) : JSTEMPLATESIZE , <nl> } ; <nl> } ) ; <nl> <nl> mmm a / ios / sdk / WeexSDK / Sources / Protocol / WXAppMonitorProtocol . h <nl> ppp b / ios / sdk / WeexSDK / Sources / Protocol / WXAppMonitorProtocol . h <nl> <nl> # define COMMUNICATETIME @ " communicateTime " <nl> # define SCREENRENDERTIME @ " screenRenderTime " <nl> # define TOTALTIME @ " totalTime " <nl> + # define FIRSETSCREENJSFEXECUTETIME @ " firstScreenJSFExecuteTime " <nl> <nl> # define CACHEPROCESSTIME @ " cacheProcessTime " <nl> # define CACHERATIO @ " cacheRatio " <nl> | + [ ios ] add performance for first excute js and optimize prerender | apache/incubator-weex | 3ec148430d5b452246f8836b181535597d3122fd | 2017-09-06T15:26:59Z |
mmm a / src / rdb_protocol / datum . hpp <nl> ppp b / src / rdb_protocol / datum . hpp <nl> class wire_datum_t { <nl> } ; <nl> <nl> # ifndef NDEBUG <nl> - static const int64_t WIRE_DATUM_MAP_GC_ROUNDS = 1000 ; <nl> - # else <nl> static const int64_t WIRE_DATUM_MAP_GC_ROUNDS = 2 ; <nl> + # else <nl> + static const int64_t WIRE_DATUM_MAP_GC_ROUNDS = 1000 ; <nl> # endif / / NDEBUG <nl> <nl> <nl> mmm a / src / rdb_protocol / env . cc <nl> ppp b / src / rdb_protocol / env . cc <nl> void env_checkpoint_t : : gc ( const datum_t * root ) { <nl> <nl> / / We GC more frequently ( ~ every 16 data ) in debug mode to help with testing . <nl> # ifndef NDEBUG <nl> - const int env_gc_checkpoint_t : : DEFAULT_GEN1_CUTOFF = ( 8 * 1024 * 1024 ) ; <nl> - # else <nl> const int env_gc_checkpoint_t : : DEFAULT_GEN1_CUTOFF = <nl> sizeof ( datum_t ) * ptr_bag_t : : mem_estimate_multiplier * 16 ; <nl> + # else <nl> + const int env_gc_checkpoint_t : : DEFAULT_GEN1_CUTOFF = ( 8 * 1024 * 1024 ) ; <nl> # endif / / NDEBUG <nl> const int env_gc_checkpoint_t : : DEFAULT_GEN2_SIZE_MULTIPLIER = 8 ; <nl> <nl> env_gc_checkpoint_t : : ~ env_gc_checkpoint_t ( ) { <nl> env - > merge_checkpoint ( ) ; <nl> } <nl> } <nl> + <nl> const datum_t * env_gc_checkpoint_t : : maybe_gc ( const datum_t * root ) { <nl> if ( env - > current_bag ( ) - > get_mem_estimate ( ) > gen1 ) { <nl> env - > gc ( root ) ; <nl> mmm a / src / rdb_protocol / stream_cache . hpp <nl> ppp b / src / rdb_protocol / stream_cache . hpp <nl> class stream_cache2_t { <nl> struct entry_t { <nl> ~ entry_t ( ) ; / / ` env_t ` is incomplete <nl> # ifndef NDEBUG <nl> - static const int DEFAULT_MAX_CHUNK_SIZE = 1000 ; <nl> - # else <nl> static const int DEFAULT_MAX_CHUNK_SIZE = 5 ; <nl> + # else <nl> + static const int DEFAULT_MAX_CHUNK_SIZE = 1000 ; <nl> # endif / / NDEBUG <nl> static const time_t DEFAULT_MAX_AGE = 0 ; / / 0 = never evict <nl> entry_t ( time_t _last_activity , scoped_ptr_t < env_t > * env_ptr , <nl> mmm a / src / rdb_protocol / transform_visitors . cc <nl> ppp b / src / rdb_protocol / transform_visitors . cc <nl> terminal_visitor_t : : terminal_visitor_t ( boost : : shared_ptr < scoped_cJSON_t > _json , <nl> scopes ( _scopes ) , backtrace ( _backtrace ) , out ( _out ) <nl> { } <nl> <nl> - void terminal_initializer_visitor_t : : operator ( ) ( <nl> - UNUSED const ql : : gmr_wire_func_t & f ) const { <nl> - * out = ql : : wire_datum_map_t ( ) ; <nl> - } <nl> / / All of this logic is analogous to the eager logic in datum_stream . cc . This <nl> / / code duplication needs to go away , but I ' m not 100 % sure how to do it ( there <nl> / / are sometimes minor differences between the lazy and eager evaluations ) and <nl> void terminal_visitor_t : : operator ( ) ( ql : : gmr_wire_func_t & func / * NOLINT * / ) const { <nl> } <nl> } <nl> <nl> - void terminal_initializer_visitor_t : : operator ( ) ( <nl> - UNUSED const ql : : count_wire_func_t & f ) const { <nl> - * out = ql : : wire_datum_t ( ql_env - > add_ptr ( new ql : : datum_t ( 0 . 0 ) ) ) ; <nl> - } <nl> - <nl> void terminal_visitor_t : : operator ( ) ( UNUSED const ql : : count_wire_func_t & func ) const { <nl> / / TODO : just pass an int around <nl> ql : : wire_datum_t * d = boost : : get < ql : : wire_datum_t > ( out ) ; <nl> d - > reset ( ql_env - > add_ptr ( new ql : : datum_t ( d - > get ( ) - > as_int ( ) + 1 . 0 ) ) ) ; <nl> } <nl> <nl> - void terminal_initializer_visitor_t : : operator ( ) ( <nl> - UNUSED const ql : : reduce_wire_func_t & f ) const { <nl> - * out = rget_read_response_t : : empty_t ( ) ; <nl> - } <nl> - <nl> void terminal_visitor_t : : operator ( ) ( ql : : reduce_wire_func_t & func / * NOLINT * / ) const { <nl> ql : : wire_datum_t * d = boost : : get < ql : : wire_datum_t > ( out ) ; <nl> const ql : : datum_t * rhs = ql_env - > add_ptr ( new ql : : datum_t ( json , ql_env ) ) ; <nl> mmm a / src / rdb_protocol / transform_visitors . hpp <nl> ppp b / src / rdb_protocol / transform_visitors . hpp <nl> class terminal_initializer_visitor_t : public boost : : static_visitor < void > { <nl> const scopes_t & _scopes , <nl> const backtrace_t & _backtrace ) ; <nl> <nl> - void operator ( ) ( const ql : : gmr_wire_func_t & ) const ; <nl> - void operator ( ) ( const ql : : count_wire_func_t & ) const ; <nl> - void operator ( ) ( const ql : : reduce_wire_func_t & ) const ; <nl> + void operator ( ) ( ql : : gmr_wire_func_t & f ) const { <nl> + ql : : func_t * group = f . compile_group ( ql_env ) ; <nl> + ql : : func_t * map = f . compile_map ( ql_env ) ; <nl> + ql : : func_t * reduce = f . compile_reduce ( ql_env ) ; <nl> + guarantee ( group ! = NULL & & map ! = NULL & & reduce ! = NULL ) ; <nl> + * out = ql : : wire_datum_map_t ( ) ; <nl> + } <nl> + <nl> + void operator ( ) ( const ql : : count_wire_func_t & ) const { <nl> + * out = ql : : wire_datum_t ( ql_env - > add_ptr ( new ql : : datum_t ( 0 . 0 ) ) ) ; <nl> + } <nl> + <nl> + void operator ( ) ( ql : : reduce_wire_func_t & f ) const { <nl> + ql : : func_t * reduce = f . compile ( ql_env ) ; <nl> + guarantee ( reduce ! = NULL ) ; <nl> + * out = rget_read_response_t : : empty_t ( ) ; <nl> + } <nl> + <nl> private : <nl> rget_read_response_t : : result_t * out ; <nl> ql : : env_t * ql_env ; <nl> | fixed generation issue | rethinkdb/rethinkdb | 0ae73614dd08d45d5ddc11baa7f108fb42c809d0 | 2013-04-06T02:25:09Z |
mmm a / android / playground / app / build . gradle <nl> ppp b / android / playground / app / build . gradle <nl> dependencies { <nl> compile ' com . jakewharton . scalpel : scalpel : 1 . 1 . 2 ' <nl> compile ' com . taobao . android . weex_inspection : urlconnection_interceptor : 1 . 0 . 0 ' <nl> compile ' com . android . support . test . espresso : espresso - idling - resource : 2 . 2 . 2 ' <nl> - compile ' com . taobao . android : weex_inspector : 0 . 18 . 10 ' <nl> + compile ' com . taobao . android : weex_inspector : 0 . 24 . 2 . 4 ' <nl> / / compile project ( " : weex_inspector " ) <nl> <nl> / / Bind actions to effects . See https : / / alibaba . github . io / bindingx / <nl> | * [ Android ] Update devtool version ( ) | apache/incubator-weex | 5fec7c5f0dfd1a1c4fa91cb43dcbc3cf2ec23620 | 2019-04-22T07:27:25Z |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.