diff
stringlengths 41
2.03M
| msg
stringlengths 1
1.5k
⌀ | repo
stringlengths 5
40
| sha
stringlengths 40
40
| time
stringlengths 20
20
|
---|---|---|---|---|
mmm a / src / layer / arm / convolution_1x1_pack4 . h <nl> ppp b / src / layer / arm / convolution_1x1_pack4 . h <nl> static void conv1x1s1_sgemm_pack4_neon ( const Mat & bottom_blob , Mat & top_blob , co <nl> / / } <nl> / / } <nl> } <nl> + <nl> + static void conv1x1s2_pack4_neon ( const Mat & bottom_blob , Mat & top_blob , const Mat & kernel , const Mat & _bias , const Option & opt ) <nl> + { <nl> + int w = bottom_blob . w ; <nl> + int channels = bottom_blob . c ; <nl> + size_t elemsize = bottom_blob . elemsize ; <nl> + int elempack = bottom_blob . elempack ; <nl> + <nl> + int outw = top_blob . w ; <nl> + int outh = top_blob . h ; <nl> + <nl> + const int tailstep = ( w - 2 * outw + w ) * 4 ; <nl> + <nl> + Mat bottom_blob_shrinked ; <nl> + bottom_blob_shrinked . create ( outw , outh , channels , elemsize , elempack , opt . workspace_allocator ) ; <nl> + <nl> + # pragma omp parallel for num_threads ( opt . num_threads ) <nl> + for ( int p = 0 ; p < channels ; p + + ) <nl> + { <nl> + const float * r0 = bottom_blob . channel ( p ) ; <nl> + float * outptr = bottom_blob_shrinked . channel ( p ) ; <nl> + <nl> + for ( int i = 0 ; i < outh ; i + + ) <nl> + { <nl> + for ( int j = 0 ; j < outw ; j + + ) <nl> + { <nl> + float32x4_t _v = vld1q_f32 ( r0 ) ; <nl> + vst1q_f32 ( outptr , _v ) ; <nl> + <nl> + r0 + = 8 ; <nl> + outptr + = 4 ; <nl> + } <nl> + <nl> + r0 + = tailstep ; <nl> + } <nl> + } <nl> + <nl> + conv1x1s1_sgemm_pack4_neon ( bottom_blob_shrinked , top_blob , kernel , _bias , opt ) ; <nl> + } <nl> mmm a / src / layer / arm / convolution_arm . cpp <nl> ppp b / src / layer / arm / convolution_arm . cpp <nl> int Convolution_arm : : forward ( const Mat & bottom_blob , Mat & top_blob , const Option <nl> return 0 ; <nl> } <nl> <nl> + if ( kernel_w = = 1 & & kernel_h = = 1 & & stride_w = = 2 & & stride_h = = 2 & & dilation_w = = 1 & & dilation_h = = 1 ) <nl> + { <nl> + conv1x1s2_pack4_neon ( bottom_blob_bordered , top_blob , weight_data_pack4 , bias_data , opt ) ; <nl> + <nl> + if ( activation ) <nl> + { <nl> + activation - > forward_inplace ( top_blob , opt ) ; <nl> + } <nl> + <nl> + return 0 ; <nl> + } <nl> + <nl> if ( kernel_w = = 3 & & kernel_h = = 3 & & stride_w = = 1 & & stride_h = = 1 & & dilation_w = = 1 & & dilation_h = = 1 ) <nl> { <nl> conv3x3s1_winograd64_pack4_neon ( bottom_blob_bordered , top_blob , weight_3x3_winograd64_data_pack4 , bias_data , opt ) ; <nl> mmm a / src / layer / arm / innerproduct_arm . cpp <nl> ppp b / src / layer / arm / innerproduct_arm . cpp <nl> int InnerProduct_arm : : forward ( const Mat & bottom_blob , Mat & top_blob , const Optio <nl> # endif / / __aarch64__ <nl> # endif / / __ARM_NEON <nl> <nl> - if ( activation_type = = 1 ) <nl> - { <nl> - sum = std : : max ( sum , 0 . f ) ; <nl> - } <nl> - else if ( activation_type = = 2 ) <nl> - { <nl> - float slope = activation_params [ 0 ] ; <nl> - sum = sum > 0 . f ? sum : sum * slope ; <nl> - } <nl> - else if ( activation_type = = 3 ) <nl> - { <nl> - float min = activation_params [ 0 ] ; <nl> - float max = activation_params [ 1 ] ; <nl> - if ( sum < min ) <nl> - sum = min ; <nl> - if ( sum > max ) <nl> - sum = max ; <nl> - } <nl> - else if ( activation_type = = 4 ) <nl> - { <nl> - sum = 1 . f / ( 1 . f + exp ( - sum ) ) ; <nl> - } <nl> + sum = activation_ss ( sum , activation_type , activation_params ) ; <nl> <nl> top_blob [ p ] = sum ; <nl> } <nl> | arm neon optimization for conv1x1s2 pack4 | Tencent/ncnn | 9e529354fbb52eddf1eb7be9a7b07de0c5609560 | 2019-10-11T15:41:42Z |
mmm a / include / swift / AST / AnyRequest . h <nl> ppp b / include / swift / AST / AnyRequest . h <nl> class AnyRequest { <nl> AnyRequest ( const AnyRequest & & other ) <nl> : storageKind ( other . storageKind ) , stored ( other . stored ) { } <nl> <nl> + / / Create a local template typename ` ValueType ` in the template specialization <nl> + / / so that we can refer to it in the SFINAE condition as well as the body of <nl> + / / the template itself . The SFINAE condition allows us to remove this <nl> + / / constructor from candidacy when evaluating explicit construction with an <nl> + / / instance of ` AnyRequest ` . If we do not do so , we will find ourselves with <nl> + / / ambiguity with this constructor and the defined move constructor above . <nl> / / / Construct a new instance with the given value . <nl> - template < typename T > <nl> - explicit AnyRequest ( T & & value ) : storageKind ( StorageKind : : Normal ) { <nl> - using ValueType = <nl> - typename std : : remove_cv < typename std : : remove_reference < T > : : type > : : type ; <nl> + template < typename T , <nl> + typename ValueType = typename std : : remove_cv < <nl> + typename std : : remove_reference < T > : : type > : : type , <nl> + typename = typename std : : enable_if < <nl> + ! std : : is_same < ValueType , AnyRequest > : : value > : : type > <nl> + explicit AnyRequest ( T & & value ) : storageKind ( StorageKind : : Normal ) { <nl> stored = llvm : : IntrusiveRefCntPtr < HolderBase > ( <nl> new Holder < ValueType > ( std : : forward < T > ( value ) ) ) ; <nl> } <nl> | AST : SFINAE away template ' d constructor | apple/swift | 4be44bf3ee0462a108954feae94b4fe1c1b68cd8 | 2018-09-15T05:01:51Z |
mmm a / test / mozilla / mozilla . status <nl> ppp b / test / mozilla / mozilla . status <nl> js1_5 / Regress / regress - 98901 : PASS | | FAIL <nl> js1_5 / extensions / regress - 371636 : PASS | | FAIL <nl> <nl> <nl> - # Test depends on GC timings . Inherently flaky . <nl> + # Tests depend on GC timings . Inherently flaky . <nl> js1_5 / GC / regress - 383269 - 01 : PASS | | FAIL <nl> + js1_5 / GC / regress - 383269 - 02 : PASS | | FAIL <nl> + js1_5 / Regress / regress - 404755 . js : PASS | | FAIL <nl> <nl> <nl> # # # # # # # # # # # # # # # # # # # # # INCOMPATIBLE TESTS # # # # # # # # # # # # # # # # # # # # # <nl> | Add two more Mozilla tests that depend on GC timings to the list | v8/v8 | 1970a38a9ead7ebca06967108a1da406d2becf27 | 2008-09-08T12:37:10Z |
mmm a / tensorflow / python / ops / control_flow_ops . py <nl> ppp b / tensorflow / python / ops / control_flow_ops . py <nl> def _init_from_proto ( self , context_def , import_scope = None ) : <nl> self . _pivot = g . as_graph_element ( <nl> ops . prepend_name_scope ( context_def . pivot_name , import_scope ) ) <nl> self . _branch = context_def . branch <nl> - super ( CondContext , self ) . __init__ ( <nl> - values_def = context_def . values_def , import_scope = import_scope ) <nl> + super ( CondContext , self ) . __init__ ( values_def = context_def . values_def , <nl> + import_scope = import_scope ) <nl> + # The predicate and pivot ops appear in self . _values , but don ' t have self <nl> + # set as their control context . The __init__ call above will set self for <nl> + # all values , so manually override the predicate and pivot contexts here . <nl> + # pylint : disable = protected - access <nl> + self . _pred . op . _set_control_flow_context ( self . outer_context ) <nl> + self . _pivot . op . _set_control_flow_context ( self . outer_context ) <nl> + # pylint : enable = protected - access <nl> <nl> @ property <nl> def pred ( self ) : <nl> mmm a / tensorflow / python / training / saver_test . py <nl> ppp b / tensorflow / python / training / saver_test . py <nl> def testGraphExtension ( self ) : <nl> self . _testGraphExtensionRestore ( test_dir ) <nl> self . _testRestoreFromTrainGraphWithControlContext ( test_dir ) <nl> <nl> - def testNestedWhileLoops ( self ) : <nl> - test_dir = self . _get_test_dir ( " nested_whiles " ) <nl> + def _testWhileLoopAndGradientSerDes ( self , outer_body_fn ) : <nl> + # Build a while loop with ` outer_body_fn ` , export it , and verify that it can <nl> + # be imported and the gradient can be built and run correctly . <nl> + <nl> + test_dir = self . _get_test_dir ( " nested_control_flow " ) <nl> filename = os . path . join ( test_dir , " metafile " ) <nl> saver_ckpt = os . path . join ( test_dir , " saver . ckpt " ) <nl> <nl> - # Create two simple nested while loops . <nl> + # Create while loop using ` outer_body_fn ` . <nl> with ops_lib . Graph ( ) . as_default ( ) : <nl> - def body ( i , x ) : <nl> - _ , r = control_flow_ops . while_loop ( lambda j , y : j < 3 , <nl> - lambda j , y : ( j + 1 , y + x ) , <nl> - [ 0 , 0 ] ) <nl> - return i + 1 , x + r <nl> - <nl> var = variables . Variable ( 0 ) <nl> var_name = var . name <nl> - <nl> - _ , output = control_flow_ops . while_loop ( lambda i , x : i < 5 , body , <nl> + _ , output = control_flow_ops . while_loop ( lambda i , x : i < 5 , outer_body_fn , <nl> [ 0 , var ] ) <nl> output_name = output . name <nl> - <nl> init_op = variables . global_variables_initializer ( ) <nl> <nl> - # Generate a MetaGraphDef containing the nested loops . <nl> + # Generate a MetaGraphDef containing the while loop . <nl> with session . Session ( ) as sess : <nl> sess . run ( init_op ) <nl> sess . run ( output ) <nl> def body ( i , x ) : <nl> saver . save ( sess , saver_ckpt ) <nl> saver . export_meta_graph ( filename ) <nl> <nl> - # Build and run the gradients of the nested while loop . We use this below <nl> - # to verify that the gradients are correct with an imported MetaGraphDef . <nl> + # Build and run the gradients of the while loop . We use this below to <nl> + # verify that the gradients are correct with an imported MetaGraphDef . <nl> grad = gradients_impl . gradients ( [ output ] , [ var ] ) <nl> with session . Session ( ) as sess : <nl> sess . run ( init_op ) <nl> def body ( i , x ) : <nl> actual_grad_value = sess . run ( grad ) <nl> self . assertEqual ( expected_grad_value , actual_grad_value ) <nl> <nl> + def testNestedWhileLoopsSerDes ( self ) : <nl> + # Test two simple nested while loops . <nl> + def body ( i , x ) : <nl> + _ , r = control_flow_ops . while_loop ( lambda j , y : j < 3 , <nl> + lambda j , y : ( j + 1 , y + x ) , <nl> + [ 0 , 0 ] ) <nl> + return i + 1 , x + r <nl> + self . _testWhileLoopAndGradientSerDes ( body ) <nl> + <nl> + def testNestedControlFlowSerDes ( self ) : <nl> + # Test while loop in a cond in a while loop . <nl> + # pylint : disable = g - long - lambda <nl> + def body ( i , x ) : <nl> + cond_result = control_flow_ops . cond ( <nl> + i > 0 , <nl> + lambda : control_flow_ops . while_loop ( <nl> + lambda j , y : j < 3 , <nl> + lambda j , y : ( j + 1 , y + x ) , <nl> + [ 0 , 0 ] ) [ 1 ] , <nl> + lambda : x ) <nl> + return i + 1 , cond_result <nl> + # pylint : enable = g - long - lambda <nl> + self . _testWhileLoopAndGradientSerDes ( body ) <nl> + <nl> def testStrippedOpListDef ( self ) : <nl> with self . test_session ( ) : <nl> # Creates a graph . <nl> | Fix bug in deserializing CondContexts . | tensorflow/tensorflow | 6825af46c53e6ad0b1260e5a96a4ef46b7703e46 | 2018-02-27T04:06:24Z |
mmm a / src / Functions / moduloOrZero . cpp <nl> ppp b / src / Functions / moduloOrZero . cpp <nl> struct ModuloOrZeroImpl <nl> { <nl> if constexpr ( std : : is_floating_point_v < ResultType > ) <nl> { <nl> + / / / This computation is similar to ` fmod ` but the latter is not inlined and has 40 times worse performance . <nl> return ResultType ( a ) - trunc ( ResultType ( a ) / ResultType ( b ) ) * ResultType ( b ) ; <nl> } <nl> else <nl> | Update moduloOrZero . cpp | ClickHouse/ClickHouse | 72280ee2a399043b18b1abce36998a1ea056ed1d | 2020-07-21T14:07:09Z |
mmm a / Kodi . xcodeproj / project . pbxproj <nl> ppp b / Kodi . xcodeproj / project . pbxproj <nl> <nl> 7C5608C70F1754930056433A / * ExternalPlayer . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C5608C40F1754930056433A / * ExternalPlayer . cpp * / ; } ; <nl> 7C62F24210505BC7002AD2C1 / * Bookmark . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C62F24010505BC7002AD2C1 / * Bookmark . cpp * / ; } ; <nl> 7C62F45E1057A62D002AD2C1 / * DirectoryNodeSingles . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C62F45C1057A62D002AD2C1 / * DirectoryNodeSingles . cpp * / ; } ; <nl> + 7C68401B1D87C6D400C55360 / * ViewModeSettings . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C6840191D87C6D400C55360 / * ViewModeSettings . cpp * / ; } ; <nl> + 7C68401C1D87C6D400C55360 / * ViewModeSettings . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C6840191D87C6D400C55360 / * ViewModeSettings . cpp * / ; } ; <nl> 7C6EB330155BD1D40080368A / * ImageFile . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C6EB32E155BD1D40080368A / * ImageFile . cpp * / ; } ; <nl> 7C6EB6FA155F32C30080368A / * HTTPImageHandler . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C6EB6F8155F32C30080368A / * HTTPImageHandler . cpp * / ; } ; <nl> 7C779E3A104A57E500F444C4 / * RenderSystem . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7C779E1F104A57E500F444C4 / * RenderSystem . cpp * / ; } ; <nl> <nl> 7C62F24110505BC7002AD2C1 / * Bookmark . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = Bookmark . h ; sourceTree = " < group > " ; } ; <nl> 7C62F45C1057A62D002AD2C1 / * DirectoryNodeSingles . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = DirectoryNodeSingles . cpp ; sourceTree = " < group > " ; } ; <nl> 7C62F45D1057A62D002AD2C1 / * DirectoryNodeSingles . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = DirectoryNodeSingles . h ; sourceTree = " < group > " ; } ; <nl> + 7C6840191D87C6D400C55360 / * ViewModeSettings . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = ViewModeSettings . cpp ; sourceTree = " < group > " ; } ; <nl> + 7C68401A1D87C6D400C55360 / * ViewModeSettings . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = ViewModeSettings . h ; sourceTree = " < group > " ; } ; <nl> 7C6EB32E155BD1D40080368A / * ImageFile . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = ImageFile . cpp ; sourceTree = " < group > " ; } ; <nl> 7C6EB32F155BD1D40080368A / * ImageFile . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = ImageFile . h ; sourceTree = " < group > " ; } ; <nl> 7C6EB6F8155F32C30080368A / * HTTPImageHandler . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = HTTPImageHandler . cpp ; sourceTree = " < group > " ; } ; <nl> <nl> F59876BE0FBA351D008EF4FB / * VideoReferenceClock . h * / , <nl> 7CC30DBE16291C2C003E7579 / * VideoThumbLoader . cpp * / , <nl> 7CC30DBF16291C2C003E7579 / * VideoThumbLoader . h * / , <nl> + 7C6840191D87C6D400C55360 / * ViewModeSettings . cpp * / , <nl> + 7C68401A1D87C6D400C55360 / * ViewModeSettings . h * / , <nl> ) ; <nl> path = video ; <nl> sourceTree = " < group > " ; <nl> <nl> E38A06CE0D95AA5500FF8227 / * GUIDialogKaiToast . cpp in Sources * / , <nl> 7C8E023D1BA35D0B0072E8B2 / * ProfileBuiltins . cpp in Sources * / , <nl> E3B53E7C0D97B08100021A96 / * DVDSubtitleParserMicroDVD . cpp in Sources * / , <nl> + 7C68401B1D87C6D400C55360 / * ViewModeSettings . cpp in Sources * / , <nl> E36C29DF0DA72429001F0C9D / * Artist . cpp in Sources * / , <nl> E36C29E00DA72429001F0C9D / * Album . cpp in Sources * / , <nl> E36C29E60DA72442001F0C9D / * DVDSubtitleParserSami . cpp in Sources * / , <nl> <nl> E49913AE174E5F3300741B6D / * UPnPPlayer . cpp in Sources * / , <nl> E49913AF174E5F3300741B6D / * UPnPRenderer . cpp in Sources * / , <nl> E49913B0174E5F3300741B6D / * UPnPServer . cpp in Sources * / , <nl> + 7C68401C1D87C6D400C55360 / * ViewModeSettings . cpp in Sources * / , <nl> E49913B1174E5F3300741B6D / * UPnPSettings . cpp in Sources * / , <nl> E49913B2174E5F3700741B6D / * WebSocket . cpp in Sources * / , <nl> E49913B3174E5F3700741B6D / * WebSocketManager . cpp in Sources * / , <nl> mmm a / addons / resource . language . en_gb / resources / strings . po <nl> ppp b / addons / resource . language . en_gb / resources / strings . po <nl> msgstr " " <nl> msgctxt " # 39007 " <nl> msgid " This provides access to where picture sources can be added and otherwise managed . " <nl> msgstr " " <nl> + <nl> + # : xbmc / video / dialogs / GUIDialogVideoSettings . cpp <nl> + msgctxt " # 39008 " <nl> + msgid " Zoom - 120 % width " <nl> + msgstr " " <nl> + <nl> + # : xbmc / video / dialogs / GUIDialogVideoSettings . cpp <nl> + msgctxt " # 39009 " <nl> + msgid " Zoom - 110 % width " <nl> + msgstr " " <nl> mmm a / xbmc / cores / IPlayer . h <nl> ppp b / xbmc / cores / IPlayer . h <nl> enum ViewMode { <nl> ViewModeStretch16x9 , <nl> ViewModeOriginal , <nl> ViewModeCustom , <nl> - ViewModeStretch16x9Nonlin <nl> + ViewModeStretch16x9Nonlin , <nl> + ViewModeZoom120Width , <nl> + ViewModeZoom110Width <nl> } ; <nl> <nl> class IPlayer <nl> mmm a / xbmc / cores / VideoPlayer / VideoRenderers / BaseRenderer . cpp <nl> ppp b / xbmc / cores / VideoPlayer / VideoRenderers / BaseRenderer . cpp <nl> void CBaseRenderer : : ManageRenderArea ( ) <nl> <nl> void CBaseRenderer : : SetViewMode ( int viewMode ) <nl> { <nl> - if ( viewMode < ViewModeNormal | | viewMode > ViewModeStretch16x9Nonlin ) <nl> + if ( viewMode < ViewModeNormal | | viewMode > ViewModeZoom110Width ) <nl> viewMode = ViewModeNormal ; <nl> <nl> CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode = viewMode ; <nl> void CBaseRenderer : : SetViewMode ( int viewMode ) <nl> CDisplaySettings : : GetInstance ( ) . SetNonLinearStretched ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_CustomNonLinStretch ) ; <nl> CDisplaySettings : : GetInstance ( ) . SetVerticalShift ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_CustomVerticalShift ) ; <nl> } <nl> + else if ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode = = ViewModeZoom120Width ) <nl> + { <nl> + float fitHeightZoom = sourceFrameRatio * screenHeight / ( info . fPixelRatio * screenWidth ) ; <nl> + CDisplaySettings : : GetInstance ( ) . SetPixelRatio ( 1 . 0f ) ; <nl> + CDisplaySettings : : GetInstance ( ) . SetZoomAmount ( fitHeightZoom < 1 . 0f ? 1 . 0f : ( fitHeightZoom > 1 . 2f ? 1 . 2f : fitHeightZoom ) ) ; <nl> + } <nl> + else if ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode = = ViewModeZoom110Width ) <nl> + { <nl> + float fitHeightZoom = sourceFrameRatio * screenHeight / ( info . fPixelRatio * screenWidth ) ; <nl> + CDisplaySettings : : GetInstance ( ) . SetPixelRatio ( 1 . 0f ) ; <nl> + CDisplaySettings : : GetInstance ( ) . SetZoomAmount ( fitHeightZoom < 1 . 0f ? 1 . 0f : ( fitHeightZoom > 1 . 1f ? 1 . 1f : fitHeightZoom ) ) ; <nl> + } <nl> else / / if ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode = = ViewModeNormal ) <nl> { <nl> CDisplaySettings : : GetInstance ( ) . SetPixelRatio ( 1 . 0 ) ; <nl> mmm a / xbmc / settings / MediaSettings . cpp <nl> ppp b / xbmc / settings / MediaSettings . cpp <nl> bool CMediaSettings : : Load ( const TiXmlNode * settings ) <nl> scalingMethod = ( int ) VS_SCALINGMETHOD_LINEAR ; <nl> m_defaultVideoSettings . m_ScalingMethod = ( ESCALINGMETHOD ) scalingMethod ; <nl> <nl> - XMLUtils : : GetInt ( pElement , " viewmode " , m_defaultVideoSettings . m_ViewMode , ViewModeNormal , ViewModeCustom ) ; <nl> + XMLUtils : : GetInt ( pElement , " viewmode " , m_defaultVideoSettings . m_ViewMode , ViewModeNormal , ViewModeZoom110Width ) ; <nl> if ( ! XMLUtils : : GetFloat ( pElement , " zoomamount " , m_defaultVideoSettings . m_CustomZoomAmount , 0 . 5f , 2 . 0f ) ) <nl> m_defaultVideoSettings . m_CustomZoomAmount = 1 . 0f ; <nl> if ( ! XMLUtils : : GetFloat ( pElement , " pixelratio " , m_defaultVideoSettings . m_CustomPixelRatio , 0 . 5f , 2 . 0f ) ) <nl> mmm a / xbmc / video / CMakeLists . txt <nl> ppp b / xbmc / video / CMakeLists . txt <nl> set ( SOURCES Bookmark . cpp <nl> VideoInfoTag . cpp <nl> VideoLibraryQueue . cpp <nl> VideoReferenceClock . cpp <nl> - VideoThumbLoader . cpp ) <nl> + VideoThumbLoader . cpp <nl> + ViewModeSettings . cpp ) <nl> <nl> set ( HEADERS Bookmark . h <nl> ContextMenus . h <nl> mmm a / xbmc / video / Makefile <nl> ppp b / xbmc / video / Makefile <nl> SRCS = Bookmark . cpp \ <nl> VideoLibraryQueue . cpp \ <nl> VideoReferenceClock . cpp \ <nl> VideoThumbLoader . cpp \ <nl> + ViewModeSettings . cpp \ <nl> <nl> LIB = video . a <nl> <nl> new file mode 100644 <nl> index 000000000000 . . bb885df7bcab <nl> mmm / dev / null <nl> ppp b / xbmc / video / ViewModeSettings . cpp <nl> <nl> + / * <nl> + * Copyright ( C ) 2016 Team Kodi <nl> + * http : / / kodi . tv <nl> + * <nl> + * This Program is free software ; you can redistribute it and / or modify <nl> + * it under the terms of the GNU General Public License as published by <nl> + * the Free Software Foundation ; either version 2 , or ( at your option ) <nl> + * any later version . <nl> + * <nl> + * This Program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with XBMC ; see the file COPYING . If not , see <nl> + * < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * / <nl> + <nl> + # include " ViewModeSettings . h " <nl> + <nl> + # include " cores / IPlayer . h " <nl> + # include " guilib / LocalizeStrings . h " <nl> + # include " settings / VideoSettings . h " <nl> + <nl> + struct ViewModeProperties <nl> + { <nl> + int stringIndex ; <nl> + int viewMode ; <nl> + bool hideFromQuickCycle ; <nl> + bool hideFromList ; <nl> + } ; <nl> + <nl> + # define HIDE_ITEM true <nl> + <nl> + / * * The list of all the view modes along with their properties <nl> + * / <nl> + static const ViewModeProperties viewModes [ ] = <nl> + { <nl> + { 630 , ViewModeNormal } , <nl> + { 631 , ViewModeZoom } , <nl> + { 39008 , ViewModeZoom120Width } , <nl> + { 39009 , ViewModeZoom110Width } , <nl> + { 632 , ViewModeStretch4x3 } , <nl> + { 633 , ViewModeWideZoom } , <nl> + { 634 , ViewModeStretch16x9 } , <nl> + { 644 , ViewModeStretch16x9Nonlin , HIDE_ITEM , HIDE_ITEM } , <nl> + { 635 , ViewModeOriginal } , <nl> + { 636 , ViewModeCustom , HIDE_ITEM , HIDE_ITEM } <nl> + } ; <nl> + <nl> + # define NUMBER_OF_VIEW_MODES ( sizeof ( viewModes ) / sizeof ( viewModes [ 0 ] ) ) <nl> + <nl> + / * * Gets the index of a view mode <nl> + * <nl> + * @ param viewMode The view mode <nl> + * @ return The index of the view mode in the viewModes array <nl> + * / <nl> + static int GetViewModeIndex ( int viewMode ) <nl> + { <nl> + size_t i ; <nl> + <nl> + / / Find the current view mode <nl> + for ( i = 0 ; i < NUMBER_OF_VIEW_MODES ; i + + ) <nl> + { <nl> + if ( viewModes [ i ] . viewMode = = viewMode ) <nl> + return i ; <nl> + } <nl> + <nl> + return 0 ; / / An invalid view mode will always return the first view mode <nl> + } <nl> + <nl> + / * * Gets the next view mode for quick cycling through the modes <nl> + * <nl> + * @ param viewMode The current view mode <nl> + * @ return The next view mode <nl> + * / <nl> + int CViewModeSettings : : GetNextQuickCycleViewMode ( int viewMode ) <nl> + { <nl> + / / Find the next quick cycle view mode <nl> + for ( size_t i = GetViewModeIndex ( viewMode ) + 1 ; i < NUMBER_OF_VIEW_MODES ; i + + ) <nl> + { <nl> + if ( ! viewModes [ i ] . hideFromQuickCycle ) <nl> + return viewModes [ i ] . viewMode ; <nl> + } <nl> + <nl> + return ViewModeNormal ; <nl> + } <nl> + <nl> + / * * Gets the string index for the view mode <nl> + * <nl> + * @ param viewMode The current view mode <nl> + * @ return The string index <nl> + * / <nl> + int CViewModeSettings : : GetViewModeStringIndex ( int viewMode ) <nl> + { <nl> + return viewModes [ GetViewModeIndex ( viewMode ) ] . stringIndex ; <nl> + } <nl> + <nl> + / * * Fills the list with all visible view modes <nl> + * / <nl> + void CViewModeSettings : : ViewModesFiller ( const CSetting * setting , std : : vector < std : : pair < std : : string , int > > & list , int & current , void * data ) <nl> + { <nl> + / / Add all appropriate view modes to the list control <nl> + for ( const auto & item : viewModes ) <nl> + { <nl> + if ( ! item . hideFromList ) <nl> + list . push_back ( make_pair ( g_localizeStrings . Get ( item . stringIndex ) , item . viewMode ) ) ; <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 000000000000 . . a5c182e83074 <nl> mmm / dev / null <nl> ppp b / xbmc / video / ViewModeSettings . h <nl> <nl> + <nl> + # pragma once <nl> + <nl> + / * <nl> + * Copyright ( C ) 2016 Team Kodi <nl> + * http : / / kodi . tv <nl> + * <nl> + * This Program is free software ; you can redistribute it and / or modify <nl> + * it under the terms of the GNU General Public License as published by <nl> + * the Free Software Foundation ; either version 2 , or ( at your option ) <nl> + * any later version . <nl> + * <nl> + * This Program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with XBMC ; see the file COPYING . If not , see <nl> + * < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * / <nl> + <nl> + # include < string > <nl> + # include < utility > <nl> + # include < vector > <nl> + <nl> + # include " settings / lib / Setting . h " <nl> + <nl> + class CViewModeSettings <nl> + { <nl> + private : <nl> + CViewModeSettings ( ) ; <nl> + ~ CViewModeSettings ( ) { } ; <nl> + <nl> + public : <nl> + / * * Gets the next view mode for quick cycling through the modes <nl> + * <nl> + * @ param viewMode The current view mode <nl> + * @ return The next view mode <nl> + * / <nl> + static int GetNextQuickCycleViewMode ( int viewMode ) ; <nl> + <nl> + / * * Gets the string index for the view mode <nl> + * <nl> + * @ param viewMode The current view mode <nl> + * @ return The string index <nl> + * / <nl> + static int GetViewModeStringIndex ( int viewMode ) ; <nl> + <nl> + / * * Fills the list with all visible view modes <nl> + * / <nl> + static void ViewModesFiller ( const CSetting * setting , std : : vector < std : : pair < std : : string , int > > & list , int & current , void * data ) ; <nl> + <nl> + } ; <nl> mmm a / xbmc / video / dialogs / GUIDialogVideoSettings . cpp <nl> ppp b / xbmc / video / dialogs / GUIDialogVideoSettings . cpp <nl> <nl> # include " Application . h " <nl> # include " utils / LangCodeExpander . h " <nl> # include " utils / StringUtils . h " <nl> + # include " video / ViewModeSettings . h " <nl> <nl> # define SETTING_VIDEO_VIEW_MODE " video . viewmode " <nl> # define SETTING_VIDEO_ZOOM " video . zoom " <nl> void CGUIDialogVideoSettings : : InitializeSettings ( ) <nl> <nl> if ( g_application . m_pPlayer - > Supports ( RENDERFEATURE_STRETCH ) | | g_application . m_pPlayer - > Supports ( RENDERFEATURE_PIXEL_RATIO ) ) <nl> { <nl> - entries . clear ( ) ; <nl> - for ( int i = 0 ; i < 7 ; + + i ) <nl> - entries . push_back ( std : : make_pair ( 630 + i , i ) ) ; <nl> - AddSpinner ( groupVideo , SETTING_VIDEO_VIEW_MODE , 629 , 0 , videoSettings . m_ViewMode , entries ) ; <nl> + AddList ( groupVideo , SETTING_VIDEO_VIEW_MODE , 629 , 0 , videoSettings . m_ViewMode , CViewModeSettings : : ViewModesFiller , 629 ) ; <nl> } <nl> if ( g_application . m_pPlayer - > Supports ( RENDERFEATURE_ZOOM ) ) <nl> AddSlider ( groupVideo , SETTING_VIDEO_ZOOM , 216 , 0 , videoSettings . m_CustomZoomAmount , " % 2 . 2f " , 0 . 5f , 0 . 01f , 2 . 0f , 216 , usePopup ) ; <nl> void CGUIDialogVideoSettings : : VideoStreamsOptionFiller ( const CSetting * setting , <nl> current = - 1 ; <nl> } <nl> } <nl> - <nl> mmm a / xbmc / video / windows / GUIWindowFullScreen . cpp <nl> ppp b / xbmc / video / windows / GUIWindowFullScreen . cpp <nl> <nl> # include " windowing / WindowingFactory . h " <nl> # include " cores / IPlayer . h " <nl> # include " guiinfo / GUIInfoLabels . h " <nl> + # include " video / ViewModeSettings . h " <nl> <nl> # include < stdio . h > <nl> # include < algorithm > <nl> bool CGUIWindowFullScreen : : OnAction ( const CAction & action ) <nl> if ( m_bShowViewModeInfo ) <nl> { <nl> # ifdef HAS_VIDEO_PLAYBACK <nl> - g_application . m_pPlayer - > SetRenderViewMode ( + + CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode ) ; <nl> + g_application . m_pPlayer - > SetRenderViewMode ( CViewModeSettings : : GetNextQuickCycleViewMode ( CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) . m_ViewMode ) ) ; <nl> # endif <nl> } <nl> m_bShowViewModeInfo = true ; <nl> void CGUIWindowFullScreen : : FrameMove ( ) <nl> / / get the " View Mode " string <nl> std : : string strTitle = g_localizeStrings . Get ( 629 ) ; <nl> const auto & settings = CMediaSettings : : GetInstance ( ) . GetCurrentVideoSettings ( ) ; <nl> - int sId = settings . m_ViewMode = = ViewModeStretch16x9Nonlin ? 644 : 630 + settings . m_ViewMode ; <nl> + int sId = CViewModeSettings : : GetViewModeStringIndex ( settings . m_ViewMode ) ; <nl> std : : string strMode = g_localizeStrings . Get ( sId ) ; <nl> std : : string strInfo = StringUtils : : Format ( " % s : % s " , strTitle . c_str ( ) , strMode . c_str ( ) ) ; <nl> CGUIMessage msg ( GUI_MSG_LABEL_SET , GetID ( ) , LABEL_ROW1 ) ; <nl> | Added 2 view modes with zoom that depends on AR | xbmc/xbmc | 5a3a7097a1f35e5550dfc12951d62e45bf32bb37 | 2016-09-14T10:21:35Z |
mmm a / arangod / RocksDBEngine / RocksDBCollection . cpp <nl> ppp b / arangod / RocksDBEngine / RocksDBCollection . cpp <nl> int RocksDBCollection : : insertDocument ( arangodb : : transaction : : Methods * trx , <nl> bool & waitForSync ) { <nl> / / Coordinator doesn ' t know index internals <nl> TRI_ASSERT ( ! ServerState : : instance ( ) - > isCoordinator ( ) ) ; <nl> + TRI_ASSERT ( trx - > state ( ) - > isRunning ( ) ) ; <nl> <nl> RocksDBKey key ( RocksDBKey : : Document ( _objectId , revisionId ) ) ; <nl> RocksDBValue value ( RocksDBValue : : Document ( doc ) ) ; <nl> int RocksDBCollection : : removeDocument ( arangodb : : transaction : : Methods * trx , <nl> bool & waitForSync ) { <nl> / / Coordinator doesn ' t know index internals <nl> TRI_ASSERT ( ! ServerState : : instance ( ) - > isCoordinator ( ) ) ; <nl> + TRI_ASSERT ( trx - > state ( ) - > isRunning ( ) ) ; <nl> <nl> auto key = RocksDBKey : : Document ( _objectId , revisionId ) ; <nl> <nl> int RocksDBCollection : : updateDocument ( transaction : : Methods * trx , <nl> VPackSlice const & newDoc , <nl> bool & waitForSync ) { <nl> / / Coordinator doesn ' t know index internals <nl> + TRI_ASSERT ( trx - > state ( ) - > isRunning ( ) ) ; <nl> TRI_ASSERT ( ! ServerState : : instance ( ) - > isCoordinator ( ) ) ; <nl> <nl> rocksdb : : Transaction * rtrx = rocksTransaction ( trx ) ; <nl> Result RocksDBCollection : : lookupDocumentToken ( transaction : : Methods * trx , <nl> arangodb : : Result RocksDBCollection : : lookupRevisionVPack ( <nl> TRI_voc_rid_t revisionId , transaction : : Methods * trx , <nl> arangodb : : ManagedDocumentResult & mdr ) { <nl> + TRI_ASSERT ( trx - > state ( ) - > isRunning ( ) ) ; <nl> + <nl> auto key = RocksDBKey : : Document ( _objectId , revisionId ) ; <nl> std : : string value ; <nl> TRI_ASSERT ( value . data ( ) ) ; <nl> mmm a / arangod / RocksDBEngine / RocksDBTransactionState . h <nl> ppp b / arangod / RocksDBEngine / RocksDBTransactionState . h <nl> class RocksDBTransactionState final : public TransactionState { <nl> int addOperation ( TRI_voc_rid_t , RocksDBDocumentOperation & , RocksDBWalMarker const * marker , bool & ) ; <nl> <nl> rocksdb : : Transaction * rocksTransaction ( ) { <nl> + TRI_ASSERT ( _rocksTransaction ! = nullptr ) ; <nl> return _rocksTransaction . get ( ) ; <nl> } <nl> <nl> mmm a / arangod / StorageEngine / TransactionState . h <nl> ppp b / arangod / StorageEngine / TransactionState . h <nl> class TransactionState { <nl> TRI_vocbase_t * vocbase ( ) const { return _vocbase ; } <nl> TRI_voc_tid_t id ( ) const { return _id ; } <nl> transaction : : Status status ( ) const { return _status ; } <nl> + bool isRunning ( ) const { return _status = = transaction : : Status : : RUNNING ; } <nl> <nl> int increaseNesting ( ) { return + + _nestingLevel ; } <nl> int decreaseNesting ( ) { <nl> mmm a / arangod / Utils / OperationCursor . h <nl> ppp b / arangod / Utils / OperationCursor . h <nl> struct OperationCursor { <nl> bool getMore ( <nl> std : : function < void ( DocumentIdentifierToken const & token ) > const & callback , <nl> uint64_t batchSize ) ; <nl> + <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief convenience function to retrieve all results <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + void getAll ( <nl> + std : : function < void ( DocumentIdentifierToken const & token ) > const & callback ) { <nl> + while ( getMore ( callback , 1000 ) ) { } <nl> + } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief Skip the next toSkip many elements . <nl> mmm a / arangod / V8Server / v8 - query . cpp <nl> ppp b / arangod / V8Server / v8 - query . cpp <nl> static void JS_AllQuery ( v8 : : FunctionCallbackInfo < v8 : : Value > const & args ) { <nl> } <nl> <nl> OperationResult countResult = trx . count ( collectionName , true ) ; <nl> - res = trx . finish ( countResult . code ) ; <nl> <nl> if ( countResult . failed ( ) ) { <nl> TRI_V8_THROW_EXCEPTION ( countResult . code ) ; <nl> } <nl> <nl> - if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> - TRI_V8_THROW_EXCEPTION ( res ) ; <nl> - } <nl> - <nl> VPackSlice count = countResult . slice ( ) ; <nl> TRI_ASSERT ( count . isNumber ( ) ) ; <nl> <nl> static void JS_AllQuery ( v8 : : FunctionCallbackInfo < v8 : : Value > const & args ) { <nl> ManagedDocumentResult mmdr ; <nl> VPackBuilder resultBuilder ; <nl> resultBuilder . openArray ( ) ; <nl> - auto cb = [ & resultBuilder , & mmdr , & trx , & collection ] ( DocumentIdentifierToken const & tkn ) { <nl> + <nl> + opCursor - > getAll ( [ & resultBuilder , & mmdr , & trx , & collection ] ( DocumentIdentifierToken const & tkn ) { <nl> if ( collection - > readDocument ( & trx , tkn , mmdr ) ) { <nl> resultBuilder . add ( VPackSlice ( mmdr . vpack ( ) ) ) ; <nl> } <nl> - } ; <nl> - while ( opCursor - > getMore ( cb , 1000 ) ) { <nl> - / / Noop all done in cb <nl> - } <nl> + } ) ; <nl> + <nl> resultBuilder . close ( ) ; <nl> + <nl> + res = trx . finish ( countResult . code ) ; <nl> + if ( res ! = TRI_ERROR_NO_ERROR ) { <nl> + TRI_V8_THROW_EXCEPTION ( res ) ; <nl> + } <nl> <nl> VPackSlice docs = resultBuilder . slice ( ) ; <nl> TRI_ASSERT ( docs . isArray ( ) ) ; <nl> | fix toArray ( ) | arangodb/arangodb | 704c746c0d97bdc53dd1d02f3edf67a24a92bcc8 | 2017-03-30T15:02:54Z |
mmm a / xbmc / cores / AudioEngine / Sinks / osx / AEDeviceEnumerationOSX . cpp <nl> ppp b / xbmc / cores / AudioEngine / Sinks / osx / AEDeviceEnumerationOSX . cpp <nl> float AEDeviceEnumerationOSX : : ScoreFormat ( const AudioStreamBasicDescription & for <nl> if ( formatDesc . mChannelsPerFrame ! = format . m_channelLayout . Count ( ) ) <nl> return score ; <nl> score + = 5 ; <nl> + <nl> + if ( formatDesc . mFormatID = = kAudioFormat60958AC3 | | <nl> + formatDesc . mFormatID = = ' IAC3 ' | | <nl> + formatDesc . mFormatID = = kAudioFormatAC3 ) <nl> + { <nl> + score + = 1 ; <nl> + } <nl> } <nl> / / non - passthrough , whatever works is fine <nl> else if ( formatDesc . mFormatID = = kAudioFormatLinearPCM ) <nl> | [ ae / osx ] - restore old behavior for scoring pass through formats by prefering dedicated streams - fixes unit tests | xbmc/xbmc | f59fcda5cc30f7234a4ccad209b82ed3e7f1b85e | 2015-12-17T15:49:07Z |
mmm a / src / rdb_protocol / changefeed . cc <nl> ppp b / src / rdb_protocol / changefeed . cc <nl> class point_sub_t : public flat_sub_t { <nl> env - > interruptor ) ; <nl> auto * res = boost : : get < changefeed_point_stamp_response_t > ( & read_resp . response ) ; <nl> guarantee ( res ! = nullptr ) ; <nl> - if ( ! res - > resp ) { <nl> - rfail_datum ( base_exc_t : : OP_FAILED , <nl> - " % s " , " Changefeed aborted ( did you just reshard ? ) . " ) ; <nl> - } <nl> + rcheck_datum ( res - > resp , base_exc_t : : OP_FAILED , <nl> + " Changefeed aborted . ( Did you just reshard ? ) " ) ; <nl> auto * resp = & * res - > resp ; <nl> uint64_t start_stamp = resp - > stamp . second ; <nl> initial_val = change_val_t ( <nl> class range_sub_t : public flat_sub_t { <nl> profile_bool_t : : DONT_PROFILE , <nl> read_mode_t : : SINGLE ) , <nl> & read_resp , order_token_t : : ignore , outer_env - > interruptor ) ; <nl> - auto resp = boost : : get < changefeed_stamp_response_t > ( & read_resp . response ) ; <nl> - guarantee ( resp ! = NULL ) ; <nl> - start_stamps = std : : move ( resp - > stamps ) ; <nl> - rcheck_datum ( base_exc_t : : OP_FAILED , start_stamps . size ( ) ! = 0 , " % s " , <nl> + auto * resp = boost : : get < changefeed_stamp_response_t > ( & read_resp . response ) ; <nl> + guarantee ( resp ! = nullptr ) ; <nl> + rcheck_datum ( resp - > stamps , base_exc_t : : OP_FAILED , <nl> + " Unable to retrieve the start stamps . Did you just reshard ? " ) ; <nl> + start_stamps = std : : move ( * resp - > stamps ) ; <nl> + rcheck_datum ( start_stamps . size ( ) ! = 0 , base_exc_t : : OP_FAILED , <nl> " Unable to retrieve the start stamps . Did you just reshard ? " ) ; <nl> <nl> env = make_env ( outer_env ) ; <nl> mmm a / src / rdb_protocol / datum_stream . cc <nl> ppp b / src / rdb_protocol / datum_stream . cc <nl> rget_reader_t : : do_range_read ( env_t * env , const read_t & read ) { <nl> r_sanity_check ( static_cast < bool > ( stamp ) = = static_cast < bool > ( rr - > stamp ) ) ; <nl> if ( stamp ) { <nl> r_sanity_check ( res . stamp_response ) ; <nl> - for ( const auto & pair : ( * res . stamp_response ) . stamps ) { <nl> + rcheck_datum ( res . stamp_response - > stamps , base_exc_t : : OP_FAILED , <nl> + " Changefeed aborted . ( Did you just reshard ? ) " ) ; <nl> + for ( const auto & pair : * res . stamp_response - > stamps ) { <nl> / / It ' s OK to blow away old values . <nl> shard_stamps [ pair . first ] = pair . second ; <nl> } <nl> mmm a / src / rdb_protocol / protocol . cc <nl> ppp b / src / rdb_protocol / protocol . cc <nl> void rdb_r_unshard_visitor_t : : operator ( ) ( const changefeed_limit_subscribe_t & ) { <nl> <nl> void unshard_stamps ( const std : : vector < changefeed_stamp_response_t * > & resps , <nl> changefeed_stamp_response_t * out ) { <nl> + out - > stamps = std : : map < uuid_u , uint64_t > ( ) ; <nl> for ( auto & & resp : resps ) { <nl> - for ( auto & & stamp : resp - > stamps ) { <nl> + / / In the error case abort early . <nl> + if ( ! resp - > stamps ) { <nl> + out - > stamps = boost : : none ; <nl> + return ; <nl> + } <nl> + for ( auto & & stamp : * resp - > stamps ) { <nl> / / Previously conflicts were resolved with ` it_out - > second = <nl> / / std : : max ( it - > second , it_out - > second ) ` , but I don ' t think that <nl> / / should ever happen and it isn ' t correct for <nl> / / ` include_initial_vals ` changefeeds . <nl> - auto pair = out - > stamps . insert ( std : : make_pair ( stamp . first , stamp . second ) ) ; <nl> + auto pair = out - > stamps - > insert ( std : : make_pair ( stamp . first , stamp . second ) ) ; <nl> guarantee ( pair . second ) ; <nl> } <nl> } <nl> mmm a / src / rdb_protocol / protocol . hpp <nl> ppp b / src / rdb_protocol / protocol . hpp <nl> struct changefeed_stamp_response_t { <nl> changefeed_stamp_response_t ( ) { } <nl> / / The ` uuid_u ` below is the uuid of the changefeed ` server_t ` . ( We have <nl> / / different timestamps for each ` server_t ` because they ' re on different <nl> - / / servers and don ' t synchronize with each other . ) <nl> + / / servers and don ' t synchronize with each other . ) If this is empty it <nl> + / / means the feed was aborted . <nl> boost : : optional < std : : map < uuid_u , uint64_t > > stamps ; <nl> } ; <nl> RDB_DECLARE_SERIALIZABLE_FOR_CLUSTER ( changefeed_stamp_response_t ) ; <nl> struct changefeed_point_stamp_response_t { <nl> ql : : datum_t initial_val ; <nl> RDB_DECLARE_ME_SERIALIZABLE ( valid_response_t ) ; <nl> } ; <nl> + / / If this is empty it means the feed was aborted . <nl> boost : : optional < valid_response_t > resp ; <nl> } ; <nl> <nl> mmm a / src / rdb_protocol / store . hpp <nl> ppp b / src / rdb_protocol / store . hpp <nl> class outdated_index_report_t { <nl> const std : : map < std : : string , std : : string > & name_changes ) = 0 ; <nl> } ; <nl> <nl> - enum class create_if_none_t { NO , YES } ; <nl> - <nl> class store_t final : public store_view_t { <nl> public : <nl> using home_thread_mixin_t : : assert_thread ; <nl> class store_t final : public store_view_t { <nl> } <nl> ql : : changefeed : : server_t * make_changefeed_server ( const region_t & region ) { <nl> guarantee ( ctx & & ctx - > manager ) ; <nl> - it = changefeed_servers . insert ( <nl> + auto it = changefeed_servers . insert ( <nl> std : : make_pair ( <nl> region_t ( region ) , <nl> make_scoped < ql : : changefeed : : server_t > ( ctx - > manager , this ) ) ) . first ; <nl> | Improved error handling . | rethinkdb/rethinkdb | 364213893dd054244110c6a7862ba1b81f163adb | 2015-07-23T01:49:54Z |
mmm a / test / LLVMPasses / basic . ll <nl> ppp b / test / LLVMPasses / basic . ll <nl> declare void @ swift_bridgeObjectRelease ( % swift . bridge * ) <nl> declare void @ swift_retainUnowned ( % swift . refcounted * ) <nl> <nl> declare void @ user ( % swift . refcounted * ) nounwind <nl> + declare void @ user_objc ( % objc_object * ) nounwind <nl> declare void @ unknown_func ( ) <nl> <nl> + ; CHECK - LABEL : @ trivial_objc_canonicalization ( <nl> + ; CHECK - NEXT : entry : <nl> + ; CHECK - NEXT : [ [ RET0 : % . + ] ] = bitcast i8 * % O to % objc_object * <nl> + ; CHECK - NEXT : [ [ RET1 : % . + ] ] = tail call % objc_object * @ objc_retain ( % objc_object * [ [ RET0 : % . + ] ] ) <nl> + ; CHECK - NEXT : call void @ user_objc ( % objc_object * [ [ RET0 : % . + ] ] ) <nl> + ; CHECK - NEXT : ret void <nl> + <nl> + define void @ trivial_objc_canonicalization ( i8 * % O ) { <nl> + entry : <nl> + % 0 = bitcast i8 * % O to % objc_object * <nl> + % 1 = tail call % objc_object * @ objc_retain ( % objc_object * % 0 ) <nl> + call void @ user_objc ( % objc_object * % 1 ) nounwind <nl> + ret void <nl> + } <nl> + <nl> ; CHECK - LABEL : @ trivial_retain_release ( <nl> ; CHECK - NEXT : entry : <nl> ; CHECK - NEXT : call void @ user <nl> | Add a test case for r32148 | apple/swift | 9c87df9407288664eb6f9259ab152774a2171c76 | 2015-09-22T16:59:04Z |
mmm a / caffe2 / contrib / fakelowp / test / test_op_nnpi_fp16 . py <nl> ppp b / caffe2 / contrib / fakelowp / test / test_op_nnpi_fp16 . py <nl> def _test_binary_op_graph ( self , name , seed ) : <nl> dims = np . concatenate ( ( np . array ( [ 1 ] ) , np . random . randint ( 1 , 20 , size = 3 ) ) ) <nl> A = np . random . uniform ( low = - 100 . 0 , high = 100 . 0 , size = dims ) . astype ( np . float32 ) <nl> B = np . random . uniform ( low = - 100 . 0 , high = 100 . 0 , size = dims ) . astype ( np . float32 ) <nl> + # Avoid dividing by 0 <nl> + B [ np . abs ( B ) < 1e - 3 ] = 1e - 3 <nl> print ( A . shape , B . shape ) <nl> pred_net = caffe2_pb2 . NetDef ( ) <nl> pred_net . name = " pred " <nl> | avoid dividing by 0 in div unit test ( ) | pytorch/pytorch | 4c5a808d375022f83e1e6b82530b22a6faba2b47 | 2020-06-09T23:39:19Z |
mmm a / tensorflow / core / kernels / BUILD <nl> ppp b / tensorflow / core / kernels / BUILD <nl> tf_cuda_cc_test ( <nl> srcs = [ " collective_nccl_test . cc " ] , <nl> tags = tf_cuda_tests_tags ( ) + [ <nl> " guitar " , <nl> - " manual " , <nl> " multi_gpu " , <nl> " no_oss " , <nl> " notap " , <nl> mmm a / tensorflow / core / kernels / collective_nccl_test . cc <nl> ppp b / tensorflow / core / kernels / collective_nccl_test . cc <nl> class NcclTestBase : public : : testing : : Test { <nl> / / Run the all - reduce . <nl> string exec_key = <nl> strings : : StrCat ( col_params_ . instance . instance_key , " : 0 : 0 " ) ; <nl> - NcclReducer reducer ; <nl> + auto * reducer = new NcclReducer ( ) ; <nl> auto col_ctx = std : : make_shared < CollectiveContext > ( <nl> parent_ - > col_exec_ , parent_ - > dev_mgr_ . get ( ) , <nl> / * OpKernelContext = * / & ctx , & op_params , col_params_ , exec_key , kStepId , <nl> / * input = * / & input_ , / * output = * / & input_ ) ; <nl> - TF_CHECK_OK ( reducer . InitializeCollectiveContext ( col_ctx ) ) ; <nl> + TF_CHECK_OK ( reducer - > InitializeCollectiveContext ( col_ctx ) ) ; <nl> Notification note ; <nl> - reducer . Run ( [ this , & note ] ( Status s ) { <nl> + reducer - > Run ( [ this , & note ] ( Status s ) { <nl> status_ = s ; <nl> note . Notify ( ) ; <nl> } ) ; <nl> class NcclTestBase : public : : testing : : Test { <nl> CHECK ( output_ . CopyFrom ( * ctx . mutable_output ( 0 ) , input_ . shape ( ) ) ) ; <nl> } <nl> <nl> + reducer - > Unref ( ) ; <nl> op_params . op_device_context - > Unref ( ) ; <nl> } <nl> <nl> class NcclTestBase : public : : testing : : Test { <nl> / / Run broadcast . <nl> string exec_key = <nl> strings : : StrCat ( col_params_ . instance . instance_key , " : 0 : 0 " ) ; <nl> - NcclBroadcaster broadcaster ; <nl> + auto * broadcaster = new NcclBroadcaster ( ) ; <nl> auto col_ctx = std : : make_shared < CollectiveContext > ( <nl> parent_ - > col_exec_ , parent_ - > dev_mgr_ . get ( ) , <nl> / * OpKernelContext = * / & ctx , & op_params , col_params_ , exec_key , kStepId , <nl> / * input = * / col_params_ . is_source ? & input_ : nullptr , <nl> / * output = * / & input_ ) ; <nl> - TF_CHECK_OK ( broadcaster . InitializeCollectiveContext ( col_ctx ) ) ; <nl> + TF_CHECK_OK ( broadcaster - > InitializeCollectiveContext ( col_ctx ) ) ; <nl> Notification note ; <nl> - broadcaster . Run ( [ this , & note ] ( Status s ) { <nl> + broadcaster - > Run ( [ this , & note ] ( Status s ) { <nl> status_ = s ; <nl> note . Notify ( ) ; <nl> } ) ; <nl> class NcclTestBase : public : : testing : : Test { <nl> CHECK ( output_ . CopyFrom ( input_ , input_ . shape ( ) ) ) ; <nl> } <nl> <nl> + broadcaster - > Unref ( ) ; <nl> op_params . op_device_context - > Unref ( ) ; <nl> } <nl> <nl> class NcclTestBase : public : : testing : : Test { <nl> / / Run gather . <nl> string exec_key = <nl> strings : : StrCat ( col_params_ . instance . instance_key , " : 0 : 0 " ) ; <nl> - NcclGatherer gatherer ; <nl> + auto * gatherer = new NcclGatherer ( ) ; <nl> auto col_ctx = std : : make_shared < CollectiveContext > ( <nl> parent_ - > col_exec_ , parent_ - > dev_mgr_ . get ( ) , <nl> / * OpKernelContext = * / & ctx , & op_params , col_params_ , exec_key , kStepId , <nl> / * input = * / & input_ , <nl> / * output = * / & output_ ) ; <nl> - TF_CHECK_OK ( gatherer . InitializeCollectiveContext ( col_ctx ) ) ; <nl> + TF_CHECK_OK ( gatherer - > InitializeCollectiveContext ( col_ctx ) ) ; <nl> Notification note ; <nl> - gatherer . Run ( [ this , & note ] ( Status s ) { <nl> + gatherer - > Run ( [ this , & note ] ( Status s ) { <nl> status_ = s ; <nl> note . Notify ( ) ; <nl> } ) ; <nl> note . WaitForNotification ( ) ; <nl> <nl> + gatherer - > Unref ( ) ; <nl> op_params . op_device_context - > Unref ( ) ; <nl> } <nl> <nl> | Fix collective_nccl_test | tensorflow/tensorflow | 5ce52adcdf24165cedd1f6fccd5eb7e206d5be9e | 2020-08-24T20:03:39Z |
mmm a / third_party / mlir / test / lib / TestDialect / TestDialect . cpp <nl> ppp b / third_party / mlir / test / lib / TestDialect / TestDialect . cpp <nl> OpFoldResult TestOpWithRegionFold : : fold ( ArrayRef < Attribute > operands ) { <nl> return operand ( ) ; <nl> } <nl> <nl> + LogicalResult TestOpWithVariadicResultsAndFolder : : fold ( <nl> + ArrayRef < Attribute > operands , SmallVectorImpl < OpFoldResult > & results ) { <nl> + for ( Value * input : this - > operands ( ) ) { <nl> + results . push_back ( input ) ; <nl> + } <nl> + return success ( ) ; <nl> + } <nl> + <nl> SmallVector < Type , 2 > mlir : : OpWithInferTypeInterfaceOp : : inferReturnTypes ( <nl> llvm : : Optional < Location > location , ArrayRef < Value * > operands , <nl> ArrayRef < NamedAttribute > attributes , ArrayRef < Region > regions ) { <nl> mmm a / third_party / mlir / test / lib / TestDialect / TestOps . td <nl> ppp b / third_party / mlir / test / lib / TestDialect / TestOps . td <nl> def TestOpWithRegionFold : TEST_Op < " op_with_region_fold " > { <nl> let hasFolder = 1 ; <nl> } <nl> <nl> + def TestOpWithVariadicResultsAndFolder : TEST_Op < " op_with_variadic_results_and_folder " > { <nl> + let arguments = ( ins Variadic < I32 > : $ operands ) ; <nl> + let results = ( outs Variadic < I32 > ) ; <nl> + let hasFolder = 1 ; <nl> + } <nl> + <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / Test Patterns ( Symbol Binding ) <nl> <nl> | Add test for fix to tablegen for custom folders for ops that return a single | tensorflow/tensorflow | 36b8c3eb1f10104302d46a4725565e139054cfe3 | 2019-10-10T07:38:47Z |
mmm a / aten / src / TH / THTensorDimApply . h <nl> ppp b / aten / src / TH / THTensorDimApply . h <nl> <nl> # define TH_TENSOR_DIM_APPLY3_SIZE_EQ_EXCEPT_DIM ( TENSOR1 , TENSOR2 , TENSOR3 , DIMENSION ) \ <nl> { \ <nl> int shape_check_flag = 0 ; \ <nl> - for ( TH_TENSOR_DIM_APPLY_i = 0 ; TH_TENSOR_DIM_APPLY_i < TENSOR1 - > dim ( ) ; TH_TENSOR_DIM_APPLY_i + + ) \ <nl> + for ( TH_TENSOR_DIM_APPLY_i = 0 ; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ; TH_TENSOR_DIM_APPLY_i + + ) \ <nl> { \ <nl> if ( TH_TENSOR_DIM_APPLY_i = = DIMENSION ) \ <nl> continue ; \ <nl> <nl> if ( ( DIMENSION < 0 ) | | ( DIMENSION > = THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ) ) \ <nl> THError ( " invalid dimension % d ( expected to be 0 < = dim < % d ) " , DIMENSION , THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ) ; \ <nl> int same_dims = 1 ; \ <nl> - if ( TENSOR1 - > dim ( ) ! = TENSOR2 - > dim ( ) ) { \ <nl> + if ( THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ! = THTensor_nDimensionLegacyNoScalars ( TENSOR2 ) ) { \ <nl> same_dims = 0 ; \ <nl> } \ <nl> - if ( TENSOR1 - > dim ( ) ! = TENSOR3 - > dim ( ) ) { \ <nl> + if ( THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ! = THTensor_nDimensionLegacyNoScalars ( TENSOR3 ) ) { \ <nl> same_dims = 0 ; \ <nl> } \ <nl> if ( same_dims = = 0 ) { \ <nl> <nl> \ <nl> if ( ( DIMENSION < 0 ) | | ( DIMENSION > = THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ) ) \ <nl> THError ( " invalid dimension % d ( expected to be 0 < = dim < % d ) " , DIMENSION , THTensor_nDimensionLegacyAll ( TENSOR1 ) ) ; \ <nl> - if ( TENSOR1 - > dim ( ) ! = TENSOR2 - > dim ( ) ) { \ <nl> + if ( THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ! = THTensor_nDimensionLegacyNoScalars ( TENSOR2 ) ) { \ <nl> AT_ERROR ( " inconsistent tensor size , expected " , # TENSOR1 , " " , TENSOR1 - > sizes ( ) , " and " , # TENSOR2 , " " , TENSOR2 - > sizes ( ) , " to have the same number of dimensions " ) ; \ <nl> } \ <nl> TH_UNUSED int shape_check_flag = 0 ; \ <nl> <nl> if ( THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) = = 1 ) \ <nl> break ; \ <nl> \ <nl> - for ( TH_TENSOR_DIM_APPLY_i = 0 ; TH_TENSOR_DIM_APPLY_i < TENSOR1 - > dim ( ) ; TH_TENSOR_DIM_APPLY_i + + ) \ <nl> + for ( TH_TENSOR_DIM_APPLY_i = 0 ; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars ( TENSOR1 ) ; TH_TENSOR_DIM_APPLY_i + + ) \ <nl> { \ <nl> if ( TH_TENSOR_DIM_APPLY_i = = DIMENSION ) \ <nl> { \ <nl> | Use nDimensionLegacyNoScalars in THTensorDimApply . ( ) | pytorch/pytorch | 1e3e26e3e84c743c6c9a57e498bc29752bd08346 | 2018-08-10T15:55:28Z |
mmm a / docs / source / nn . rst <nl> ppp b / docs / source / nn . rst <nl> Non - linear activations ( other ) <nl> . . autoclass : : LogSoftmax <nl> : members : <nl> <nl> + : hidden : ` AdaptiveLogSoftmaxWithLoss ` <nl> + ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ <nl> + <nl> + . . autoclass : : AdaptiveLogSoftmaxWithLoss <nl> + : members : <nl> + <nl> Normalization layers <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> mmm a / test / test_nn . py <nl> ppp b / test / test_nn . py <nl> def _backward_criterion ( self , criterion , input , target , gradOutput = None ) : <nl> return input . grad . data <nl> <nl> def _zero_grad_parameters ( self , module ) : <nl> - if hasattr ( module , ' weight ' ) and module . weight is not None : <nl> - if module . weight . grad is not None : <nl> - module . weight . grad . data . zero_ ( ) <nl> - module . weight . grad . detach_ ( ) <nl> - if hasattr ( module , ' bias ' ) and module . bias is not None : <nl> - if module . bias . grad is not None : <nl> - module . bias . grad . data . zero_ ( ) <nl> - module . bias . grad . detach_ ( ) <nl> + for p in module . parameters ( ) : <nl> + if p . grad is not None : <nl> + p . grad . data . zero_ ( ) <nl> + p . grad . detach_ ( ) <nl> <nl> def _get_parameters ( self , module ) : <nl> params = [ ] <nl> def test_grad_conv3d_input ( self ) : <nl> def test_grad_conv3d_weight ( self ) : <nl> self . run_grad_conv_test ( F . conv3d , F . grad . conv3d_weight , 3 , ' weight ' ) <nl> <nl> + def test_adaptive_log_softmax ( self ) : <nl> + # args validation <nl> + with self . assertRaises ( ValueError ) : <nl> + _ = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 15 , 15 ] , div_value = 2 . ) <nl> + <nl> + with self . assertRaises ( ValueError ) : <nl> + _ = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 15 , 10 ] , div_value = 2 . ) <nl> + <nl> + with self . assertRaises ( ValueError ) : <nl> + _ = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 10 , 25 ] , div_value = 2 . ) <nl> + <nl> + # input shapes <nl> + with self . assertRaisesRegex ( RuntimeError , " Input and target should have the same size " ) : <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 10 , 15 ] , div_value = 2 . ) <nl> + x = torch . randn ( 2 , 16 ) <nl> + y = torch . tensor ( [ 0 , 5 , 10 ] ) <nl> + asfm ( x , y ) <nl> + <nl> + # out - of - bound targets <nl> + with self . assertRaisesRegex ( RuntimeError , " Target values should be in " ) : <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 10 , 15 ] , div_value = 2 . ) <nl> + x = torch . randn ( 2 , 16 ) <nl> + y = torch . tensor ( [ 0 , 20 ] ) <nl> + asfm ( x , y ) <nl> + <nl> + # cluster sizes <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 16 , 20 , [ 5 , 10 , 15 ] , div_value = 2 . ) <nl> + x = torch . randn ( 2 , 16 ) <nl> + y = torch . tensor ( [ 0 , 17 ] ) <nl> + <nl> + self . assertEqual ( asfm . head . weight . size ( ) , ( 5 + 3 , 16 ) ) # 5 targets in head , 3 clusters , dimensionality 16 <nl> + self . assertEqual ( asfm . tail [ 0 ] [ 1 ] . weight . size ( ) , ( 5 , 8 ) ) # 5 targets in this cluster , dimensionality 8 <nl> + self . assertEqual ( asfm . tail [ 1 ] [ 1 ] . weight . size ( ) , ( 5 , 4 ) ) <nl> + self . assertEqual ( asfm . tail [ 2 ] [ 1 ] . weight . size ( ) , ( 5 , 2 ) ) <nl> + <nl> + self . assertEqual ( asfm ( x , y ) . output . size ( ) , ( 2 , ) ) <nl> + <nl> + # log_probs actually returns log_proba <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 8 , 4 , [ 2 ] , div_value = 2 . ) <nl> + x = torch . randn ( 4 , 8 ) <nl> + logprob_out = asfm . log_prob ( x ) <nl> + <nl> + self . assertEqual ( torch . exp ( logprob_out ) . data . sum ( 1 ) , torch . ones ( 4 ) ) <nl> + <nl> + # forward returns the same thing as log_probs <nl> + for v in [ 0 , 1 , 2 , 3 ] : <nl> + y = torch . full ( ( 4 , ) , v , dtype = torch . long ) <nl> + out , loss = asfm ( x , y ) <nl> + <nl> + self . assertEqual ( out , logprob_out . gather ( 1 , y . unsqueeze ( 1 ) ) . squeeze ( ) ) <nl> + self . assertEqual ( loss , F . nll_loss ( logprob_out , y ) ) <nl> + <nl> + # predict <nl> + x = torch . randn ( 64 , 8 ) . abs_ ( ) <nl> + <nl> + # argmax in shortlist <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 8 , 10 , [ 4 , 8 ] , div_value = 2 . , head_bias = True ) <nl> + asfm . head . weight . data . abs_ ( ) <nl> + asfm . head . bias . data . abs_ ( ) <nl> + asfm . head . weight . data [ asfm . shortlist_size : , : ] . zero_ ( ) <nl> + <nl> + out = asfm . predict ( x ) <nl> + self . assertEqual ( out , asfm . log_prob ( x ) . argmax ( dim = 1 ) ) <nl> + <nl> + # argmax outside of shortlist <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 8 , 10 , [ 4 , 8 ] , div_value = 2 . , head_bias = True ) <nl> + asfm . head . weight . data . abs_ ( ) <nl> + asfm . head . bias . data . abs_ ( ) <nl> + asfm . head . weight . data [ : asfm . shortlist_size , : ] . zero_ ( ) <nl> + <nl> + out = asfm . predict ( x ) <nl> + self . assertEqual ( out , asfm . log_prob ( x ) . argmax ( dim = 1 ) ) <nl> + <nl> + # half of the argmax in shortlist , half in clusters <nl> + asfm = nn . AdaptiveLogSoftmaxWithLoss ( 8 , 10 , [ 4 , 8 ] , div_value = 2 . , head_bias = True ) <nl> + asfm . head . weight . data . abs_ ( ) <nl> + asfm . head . bias . data . abs_ ( ) <nl> + <nl> + x [ : 32 , : asfm . shortlist_size ] . zero_ ( ) <nl> + x [ 32 : , asfm . shortlist_size : ] . zero_ ( ) <nl> + <nl> + asfm . head . weight . data [ : asfm . shortlist_size , asfm . shortlist_size : ] . zero_ ( ) <nl> + asfm . head . weight . data [ asfm . shortlist_size : , : asfm . shortlist_size ] . zero_ ( ) <nl> + <nl> + out = asfm . predict ( x ) <nl> + self . assertEqual ( out , asfm . log_prob ( x ) . argmax ( dim = 1 ) ) <nl> + <nl> <nl> class TestNNInit ( TestCase ) : <nl> def setUp ( self ) : <nl> def forward ( self , input ) : <nl> fullname = ' MaxUnpool3d_net ' , <nl> check_gradgrad = False , ) ) <nl> <nl> + <nl> + class _AdaptiveLogSoftmaxWithLoss ( nn . AdaptiveLogSoftmaxWithLoss ) : <nl> + def __call__ ( self , input ) : <nl> + t = torch . tensor ( [ 0 , 1 , 4 , 8 ] ) . to ( input . device ) <nl> + return nn . AdaptiveLogSoftmaxWithLoss . __call__ ( self , input , t ) . output <nl> + <nl> + <nl> + add_test ( NewModuleTest ( <nl> + constructor = lambda : _AdaptiveLogSoftmaxWithLoss ( 16 , 10 , [ 2 , 6 ] ) , <nl> + input_size = ( 4 , 16 ) , <nl> + fullname = ' AdaptiveLogSoftmax ' ) ) <nl> + <nl> + <nl> if __name__ = = ' __main__ ' : <nl> run_tests ( ) <nl> mmm a / torch / nn / modules / __init__ . py <nl> ppp b / torch / nn / modules / __init__ . py <nl> <nl> from . upsampling import UpsamplingNearest2d , UpsamplingBilinear2d , Upsample <nl> from . distance import PairwiseDistance , CosineSimilarity <nl> from . fold import Fold , Unfold <nl> + from . adaptive import AdaptiveLogSoftmaxWithLoss <nl> <nl> __all__ = [ <nl> ' Module ' , ' Linear ' , ' Conv1d ' , ' Conv2d ' , ' Conv3d ' , ' ConvTranspose1d ' , <nl> <nl> ' AdaptiveMaxPool1d ' , ' AdaptiveMaxPool2d ' , ' AdaptiveMaxPool3d ' , ' AdaptiveAvgPool1d ' , ' AdaptiveAvgPool2d ' , <nl> ' AdaptiveAvgPool3d ' , ' TripletMarginLoss ' , ' ZeroPad2d ' , ' ConstantPad1d ' , ' ConstantPad2d ' , <nl> ' ConstantPad3d ' , ' Bilinear ' , ' CosineSimilarity ' , ' Unfold ' , ' Fold ' , <nl> + ' AdaptiveLogSoftmaxWithLoss ' , <nl> ] <nl> new file mode 100644 <nl> index 000000000000 . . d82b32f4f6aa <nl> mmm / dev / null <nl> ppp b / torch / nn / modules / adaptive . py <nl> <nl> + from collections import namedtuple <nl> + <nl> + import torch <nl> + <nl> + from . import Sequential , ModuleList , Linear <nl> + from . module import Module <nl> + from . . functional import log_softmax <nl> + <nl> + <nl> + _ASMoutput = namedtuple ( ' ASMoutput ' , [ ' output ' , ' loss ' ] ) <nl> + <nl> + <nl> + class AdaptiveLogSoftmaxWithLoss ( Module ) : <nl> + r " " " Efficient softmax approximation as described in <nl> + ` Efficient softmax approximation for GPUs ` _ by Edouard Grave , Armand Joulin , <nl> + Moustapha Cissé , David Grangier , and Hervé Jégou . <nl> + <nl> + Adaptive softmax is an approximate strategy for training models with large <nl> + output spaces . It is most effective when the label distribution is highly <nl> + imbalanced , for example in natural language modelling , where the word <nl> + frequency distribution approximately follows the ` Zipf ' s law ` _ . <nl> + <nl> + Adaptive softmax partitions the labels into several clusters , according to <nl> + their frequency . These clusters may contain different number of targets <nl> + each . <nl> + Additionally , clusters containig less frequent labels assign lower <nl> + dimensional embeddings to those labels , which speeds up the computation . <nl> + For each minibatch , only clusters for which at least one target is <nl> + present are evaluated . <nl> + <nl> + The idea is that the clusters which are accessed frequently <nl> + ( like the first one , containing most frequent labels ) , should also be cheap <nl> + to compute - - that is , contain a small number of assigned labels . <nl> + <nl> + We highly recommend taking a look at the original paper for more details . <nl> + <nl> + * : attr : ` cutoffs ` should be an ordered Sequence of integers sorted <nl> + in the increasing order . <nl> + It controls number of clusters and the partitioning of targets into <nl> + clusters . For example setting ` ` cutoffs = [ 10 , 100 , 1000 ] ` ` <nl> + means that first ` 10 ` targets will be assigned <nl> + to the ' head ' of the adaptive softmax , targets ` 11 , 12 , . . . , 100 ` will be <nl> + assigned to the first cluster , and targets ` 101 , 102 , . . . , 1000 ` will be <nl> + assigned to the second cluster , while targets <nl> + ` 1001 , 1002 , . . . , n_classes - 1 ` will be assigned <nl> + to the last , third cluster <nl> + <nl> + * : attr : ` div_value ` is used to compute the size of each additional cluster , <nl> + which is given as <nl> + : math : ` \ left \ lfloor \ frac { in \ _features } { div \ _value ^ { idx } } \ right \ rfloor ` , <nl> + where : math : ` idx ` is the cluster index ( with clusters <nl> + for less frequent words having larger indices , <nl> + and indices starting from : math : ` 1 ` ) . <nl> + <nl> + * : attr : ` head_bias ` if set to True , adds a bias term to the ' head ' of the <nl> + adaptive softmax . See paper for details . Set to False in the official <nl> + implementation . <nl> + <nl> + . . warning : : <nl> + Labels passed as inputs to this module should be sorted accoridng to <nl> + their frequency . This means that the most frequent label should be <nl> + represented by the index ` 0 ` , and the least frequent <nl> + label should be represented by the index ` n_classes - 1 ` . <nl> + <nl> + . . note : : <nl> + This module returns a ` ` NamedTuple ` ` with ` ` output ` ` <nl> + and ` ` loss ` ` fields . See further documentation for details . <nl> + <nl> + . . note : : <nl> + To compute log - probabilities for all classes , the ` ` log_prob ` ` <nl> + method can be used . <nl> + <nl> + Args : <nl> + in_features ( int ) : Number of features in the input tensor <nl> + n_classes ( int ) : Number of classes in the dataset . <nl> + cutoffs ( Sequence ) : Cutoffs used to assign targets to their buckets . <nl> + div_value ( float , optional ) : value used as an exponent to compute sizes <nl> + of the clusters . Default : 4 . 0 <nl> + <nl> + Returns : <nl> + ` ` NamedTuple ` ` with ` ` output ` ` and ` ` loss ` ` fields : <nl> + * * * output * * is a Tensor of size ` ` N ` ` containing computed target <nl> + log probabilities for each example <nl> + * * * loss * * is a Scalar representing the computed negative <nl> + log likelihood loss <nl> + <nl> + Shape : <nl> + - input : : math : ` ( N , in \ _features ) ` <nl> + - target : : math : ` ( N ) ` where each value satisfies : math : ` 0 < = target [ i ] < = n \ _classes ` <nl> + - output : : math : ` ( N ) ` <nl> + - loss : ` ` Scalar ` ` <nl> + <nl> + <nl> + . . _Efficient softmax approximation for GPUs : <nl> + https : / / arxiv . org / abs / 1609 . 04309 <nl> + <nl> + . . _Zipf ' s law : <nl> + https : / / en . wikipedia . org / wiki / Zipf % 27s_law <nl> + " " " <nl> + <nl> + def __init__ ( self , in_features , n_classes , cutoffs , div_value = 4 . , head_bias = False ) : <nl> + super ( AdaptiveLogSoftmaxWithLoss , self ) . __init__ ( ) <nl> + <nl> + cutoffs = list ( cutoffs ) <nl> + <nl> + if ( cutoffs ! = sorted ( cutoffs ) ) \ <nl> + or ( min ( cutoffs ) < = 0 ) \ <nl> + or ( max ( cutoffs ) > = ( n_classes - 1 ) ) \ <nl> + or ( len ( set ( cutoffs ) ) ! = len ( cutoffs ) ) \ <nl> + or any ( [ int ( c ) ! = c for c in cutoffs ] ) : <nl> + <nl> + raise ValueError ( " cutoffs should be a sequence of unique , positive " <nl> + " integers sorted in an increasing order , where " <nl> + " each value is between 1 and n_classes - 1 " ) <nl> + <nl> + self . in_features = in_features <nl> + self . n_classes = n_classes <nl> + self . cutoffs = cutoffs + [ n_classes ] <nl> + self . div_value = div_value <nl> + self . head_bias = head_bias <nl> + <nl> + self . shortlist_size = self . cutoffs [ 0 ] <nl> + self . n_clusters = len ( self . cutoffs ) - 1 <nl> + self . head_size = self . shortlist_size + self . n_clusters <nl> + <nl> + self . head = Linear ( self . in_features , self . head_size , bias = self . head_bias ) <nl> + self . tail = ModuleList ( ) <nl> + <nl> + for i in range ( self . n_clusters ) : <nl> + <nl> + hsz = int ( self . in_features / / ( self . div_value * * ( i + 1 ) ) ) <nl> + osz = self . cutoffs [ i + 1 ] - self . cutoffs [ i ] <nl> + <nl> + projection = Sequential ( <nl> + Linear ( self . in_features , hsz , bias = False ) , <nl> + Linear ( hsz , osz , bias = False ) <nl> + ) <nl> + <nl> + self . tail . append ( projection ) <nl> + <nl> + def reset_parameters ( self ) : <nl> + self . head . reset_parameters ( ) <nl> + for i2h , h2o in self . tail : <nl> + i2h . reset_parameters ( ) <nl> + h2o . reset_parameters ( ) <nl> + <nl> + def forward ( self , input , target ) : <nl> + if input . size ( 0 ) ! = target . size ( 0 ) : <nl> + raise RuntimeError ( ' Input and target should have the same size ' <nl> + ' in the batch dimension . ' ) <nl> + <nl> + used_rows = 0 <nl> + batch_size = target . size ( 0 ) <nl> + <nl> + output = input . new_zeros ( batch_size ) <nl> + gather_inds = target . new_empty ( batch_size ) <nl> + <nl> + cutoff_values = [ 0 ] + self . cutoffs <nl> + for i in range ( len ( cutoff_values ) - 1 ) : <nl> + <nl> + low_idx = cutoff_values [ i ] <nl> + high_idx = cutoff_values [ i + 1 ] <nl> + <nl> + target_mask = ( target > = low_idx ) & ( target < high_idx ) <nl> + row_indices = target_mask . nonzero ( ) . squeeze ( ) <nl> + <nl> + if row_indices . numel ( ) = = 0 : <nl> + continue <nl> + <nl> + if i = = 0 : <nl> + gather_inds . index_copy_ ( 0 , row_indices , target [ target_mask ] ) <nl> + <nl> + else : <nl> + relative_target = target [ target_mask ] - low_idx <nl> + input_subset = input . index_select ( 0 , row_indices ) <nl> + <nl> + cluster_output = self . tail [ i - 1 ] ( input_subset ) <nl> + cluster_index = self . shortlist_size + i - 1 <nl> + <nl> + gather_inds . index_fill_ ( 0 , row_indices , cluster_index ) <nl> + <nl> + cluster_logprob = log_softmax ( cluster_output , dim = 1 ) <nl> + local_logprob = cluster_logprob . gather ( 1 , relative_target . unsqueeze ( 1 ) ) <nl> + output . index_copy_ ( 0 , row_indices , local_logprob . squeeze ( 1 ) ) <nl> + <nl> + used_rows + = row_indices . numel ( ) <nl> + <nl> + if used_rows ! = batch_size : <nl> + raise RuntimeError ( " Target values should be in [ 0 , { } ] , " <nl> + " but values in range [ { } , { } ] " <nl> + " were found . " . format ( self . n_classes - 1 , <nl> + target . min ( ) . item ( ) , <nl> + target . max ( ) . item ( ) ) ) <nl> + <nl> + head_output = self . head ( input ) <nl> + head_logprob = log_softmax ( head_output , dim = 1 ) <nl> + output + = head_logprob . gather ( 1 , gather_inds . unsqueeze ( 1 ) ) . squeeze ( ) <nl> + loss = ( - output ) . mean ( ) <nl> + <nl> + return _ASMoutput ( output , loss ) <nl> + <nl> + def _get_full_log_prob ( self , input , head_output ) : <nl> + " " " Given input tensor , and output of ` self . head ` , <nl> + compute the log of the full distribution " " " <nl> + <nl> + out = input . new_empty ( ( head_output . size ( 0 ) , self . n_classes ) ) <nl> + head_logprob = log_softmax ( head_output , dim = 1 ) <nl> + <nl> + out [ : , : self . shortlist_size ] = head_logprob [ : , : self . shortlist_size ] <nl> + <nl> + for i , ( start_idx , stop_idx ) in enumerate ( zip ( self . cutoffs , self . cutoffs [ 1 : ] ) ) : <nl> + cluster_output = self . tail [ i ] ( input ) <nl> + cluster_logprob = log_softmax ( cluster_output , dim = 1 ) <nl> + output_logprob = cluster_logprob + head_logprob [ : , self . shortlist_size + i ] . unsqueeze ( 1 ) <nl> + <nl> + out [ : , start_idx : stop_idx ] = output_logprob <nl> + <nl> + return out <nl> + <nl> + def log_prob ( self , input ) : <nl> + " " " Computes log probabilities for all : math : ` n \ _classes ` <nl> + <nl> + Args : <nl> + input ( Tensor ) : a minibatch of examples <nl> + <nl> + Returns : <nl> + log - probabilities of for each class : math : ` c ` <nl> + in range : math : ` 0 < = c < = n \ _classes ` , where : math : ` n \ _classes ` is a <nl> + parameter passed to ` ` AdaptiveLogSoftmaxWithLoss ` ` constructor . <nl> + <nl> + Shape : <nl> + - Input : : math : ` ( N , in \ _features ) ` <nl> + - Output : : math : ` ( N , n \ _classes ) ` <nl> + <nl> + " " " <nl> + <nl> + head_output = self . head ( input ) <nl> + return self . _get_full_log_prob ( input , head_output ) <nl> + <nl> + def predict ( self , input ) : <nl> + " " " This is equivalent to ` self . log_pob ( input ) . argmax ( dim = 1 ) ` , <nl> + but is more efficient in some cases . <nl> + <nl> + Args : <nl> + input ( Tensor ) : a minibatch of examples <nl> + <nl> + Returns : <nl> + output ( Tensor ) : a class with the highest probability for each example <nl> + <nl> + Shape : <nl> + - Input : : math : ` ( N , in \ _features ) ` <nl> + - Output : : math : ` ( N ) ` <nl> + " " " <nl> + <nl> + head_output = self . head ( input ) <nl> + output = torch . argmax ( head_output , dim = 1 ) <nl> + not_in_shortlist = ( output > = self . shortlist_size ) <nl> + all_in_shortlist = not ( not_in_shortlist . any ( ) ) <nl> + <nl> + if all_in_shortlist : <nl> + return output <nl> + <nl> + elif not_in_shortlist . all ( ) : <nl> + log_prob = self . _get_full_log_prob ( input , head_output ) <nl> + return torch . argmax ( log_prob , dim = 1 ) <nl> + <nl> + else : <nl> + log_prob = self . _get_full_log_prob ( input [ not_in_shortlist ] , <nl> + head_output [ not_in_shortlist ] ) <nl> + output [ not_in_shortlist ] = torch . argmax ( log_prob , dim = 1 ) <nl> + return output <nl> | Implement adaptive softmax ( ) | pytorch/pytorch | c2046c1e5eb4d80d2d8b6848ec61548aa2f6b580 | 2018-06-04T16:12:03Z |
mmm a / tensorflow / lite / micro / examples / hello_world / README . md <nl> ppp b / tensorflow / lite / micro / examples / hello_world / README . md <nl> get it started . <nl> * Plug in the microSD card into the J11 connector . <nl> * Push the RST button . If a red LED is lit beside RST button , push the CFG <nl> button . <nl> + * Type or copy next commands one - by - another into serial terminal : <nl> + ` ` ` <nl> + setenv loadaddr 0x10800000 <nl> + setenv bootfile app . elf <nl> + setenv bootdelay 1 <nl> + setenv bootcmd fatload mmc 0 \ $ \ { loadaddr \ } \ $ \ { bootfile \ } \ & \ & bootelf <nl> + saveenv <nl> + ` ` ` <nl> + * Push the RST button . <nl> <nl> 6 . If you have the MetaWare Debugger installed in your environment : <nl> <nl> mmm a / tensorflow / lite / micro / examples / micro_speech / README . md <nl> ppp b / tensorflow / lite / micro / examples / micro_speech / README . md <nl> get it started . <nl> * Plug in the microSD card into the J11 connector . <nl> * Push the RST button . If a red LED is lit beside RST button , push the CFG <nl> button . <nl> + * Type or copy next commands one - by - another into serial terminal : <nl> + ` ` ` <nl> + setenv loadaddr 0x10800000 <nl> + setenv bootfile app . elf <nl> + setenv bootdelay 1 <nl> + setenv bootcmd fatload mmc 0 \ $ \ { loadaddr \ } \ $ \ { bootfile \ } \ & \ & bootelf <nl> + saveenv <nl> + ` ` ` <nl> + * Push the RST button . <nl> <nl> 6 . If you have the MetaWare Debugger installed in your environment : <nl> <nl> mmm a / tensorflow / lite / micro / examples / micro_speech / arc_emsdp / Makefile . inc <nl> ppp b / tensorflow / lite / micro / examples / micro_speech / arc_emsdp / Makefile . inc <nl> ifneq ( $ ( filter $ ( ALL_TAGS ) , reduce_codesize ) , ) <nl> # with high probability may not be acceptable for other graphs and will need <nl> # to be adjusted by the user <nl> <nl> - @ sed - E - i ' s # mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32 \ ( # \ <nl> - mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32_generic \ ( # ' $ ( word 3 , $ ^ ) <nl> + @ sed - E - i ' s # mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32 \ ( # \ <nl> + mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32_generic \ ( # g ' $ ( word 3 , $ ^ ) <nl> @ echo $ ( word 3 , $ ^ ) : Use generic function > > $ @ <nl> endif <nl> <nl> mmm a / tensorflow / lite / micro / examples / person_detection / README . md <nl> ppp b / tensorflow / lite / micro / examples / person_detection / README . md <nl> get it started . <nl> * Plug in the microSD card into the J11 connector . <nl> * Push the RST button . If a red LED is lit beside RST button , push the CFG <nl> button . <nl> + * Type or copy next commands one - by - another into serial terminal : <nl> + ` ` ` <nl> + setenv loadaddr 0x10800000 <nl> + setenv bootfile app . elf <nl> + setenv bootdelay 1 <nl> + setenv bootcmd fatload mmc 0 \ $ \ { loadaddr \ } \ $ \ { bootfile \ } \ & \ & bootelf <nl> + saveenv <nl> + ` ` ` <nl> + * Push the RST button . <nl> <nl> 6 . If you have the MetaWare Debugger installed in your environment : <nl> <nl> mmm a / tensorflow / lite / micro / examples / person_detection_experimental / README . md <nl> ppp b / tensorflow / lite / micro / examples / person_detection_experimental / README . md <nl> get it started . <nl> * Plug in the microSD card into the J11 connector . <nl> * Push the RST button . If a red LED is lit beside RST button , push the CFG <nl> button . <nl> + * Type or copy next commands one - by - another into serial terminal : <nl> + ` ` ` <nl> + setenv loadaddr 0x10800000 <nl> + setenv bootfile app . elf <nl> + setenv bootdelay 1 <nl> + setenv bootcmd fatload mmc 0 \ $ \ { loadaddr \ } \ $ \ { bootfile \ } \ & \ & bootelf <nl> + saveenv <nl> + ` ` ` <nl> + * Push the RST button . <nl> <nl> 6 . If you have the MetaWare Debugger installed in your environment : <nl> <nl> mmm a / tensorflow / lite / micro / examples / person_detection_experimental / arc_emsdp / Makefile . inc <nl> ppp b / tensorflow / lite / micro / examples / person_detection_experimental / arc_emsdp / Makefile . inc <nl> ifneq ( $ ( filter $ ( ALL_TAGS ) , reduce_codesize ) , ) <nl> @ sed - E - i ' s # mli_krn_conv2d_nhwc_sa8_sa8_sa32 # mli_krn_conv2d_nhwc_sa8_sa8_sa32_k1x1_nopad # ' $ ( word 3 , $ ^ ) <nl> @ sed - E - i ' s # mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32 # mli_krn_depthwise_conv2d_hwcn_sa8_sa8_sa32_k3x3_krnpad # ' $ ( word 4 , $ ^ ) <nl> @ sed - E - i ' s # mli_krn_avepool_hwc_sa8 # mli_krn_avepool_hwc_sa8_k3x3_nopad # ' $ ( word 5 , $ ^ ) <nl> - @ sed - E - i ' s # mli_krn_maxpool_hwc_sa8 \ ( in_ptr , \ & cfg , out_ptr \ ) ; # { \ <nl> - TF_LITE_KERNEL_LOG ( context , \ " Max pooling is removed from the application \ " ) ; \ <nl> - return kTfLiteError ; \ <nl> - } # ' $ ( word 5 , $ ^ ) <nl> + @ sed - E - i ' s # mli_krn_maxpool_hwc_sa8 \ ( in_ptr , \ & cfg , out_ptr \ ) ; # return kTfLiteError ; # ' $ ( word 5 , $ ^ ) <nl> @ echo $ ( word 3 , $ ^ ) : Use specialization > > $ @ <nl> @ echo $ ( word 4 , $ ^ ) : Use specialization > > $ @ <nl> @ echo $ ( word 5 , $ ^ ) : Use specialization and remove max pooling > > $ @ <nl> mmm a / tensorflow / lite / micro / kernels / arc_mli / conv . cc <nl> ppp b / tensorflow / lite / micro / kernels / arc_mli / conv . cc <nl> void EvalQuantizedPerChannel ( TfLiteContext * context , TfLiteNode * node , <nl> reference_integer_ops : : ConvPerChannel ( <nl> op_params , data . per_channel_output_multiplier , <nl> data . per_channel_output_shift , GetTensorShape ( input ) , <nl> - GetTensorData < int8 > ( input ) , GetTensorShape ( filter ) , <nl> - GetTensorData < int8 > ( filter ) , GetTensorShape ( bias ) , <nl> - GetTensorData < int32 > ( bias ) , GetTensorShape ( output ) , <nl> - GetTensorData < int8 > ( output ) ) ; <nl> + GetTensorData < int8_t > ( input ) , GetTensorShape ( filter ) , <nl> + GetTensorData < int8_t > ( filter ) , GetTensorShape ( bias ) , <nl> + GetTensorData < int32_t > ( bias ) , GetTensorShape ( output ) , <nl> + GetTensorData < int8_t > ( output ) ) ; <nl> # else <nl> TF_LITE_KERNEL_LOG ( context , <nl> " Node configuration is not supported by ARC MLI Library . " ) ; <nl> mmm a / tensorflow / lite / micro / kernels / arc_mli / depthwise_conv . cc <nl> ppp b / tensorflow / lite / micro / kernels / arc_mli / depthwise_conv . cc <nl> void EvalQuantizedPerChannel ( TfLiteContext * context , TfLiteNode * node , <nl> reference_integer_ops : : DepthwiseConvPerChannel ( <nl> op_params , data . per_channel_output_multiplier , <nl> data . per_channel_output_shift , GetTensorShape ( input ) , <nl> - GetTensorData < int8 > ( input ) , GetTensorShape ( filter ) , <nl> - GetTensorData < int8 > ( filter ) , GetTensorShape ( bias ) , <nl> - GetTensorData < int32 > ( bias ) , GetTensorShape ( output ) , <nl> - GetTensorData < int8 > ( output ) ) ; <nl> + GetTensorData < int8_t > ( input ) , GetTensorShape ( filter ) , <nl> + GetTensorData < int8_t > ( filter ) , GetTensorShape ( bias ) , <nl> + GetTensorData < int32_t > ( bias ) , GetTensorShape ( output ) , <nl> + GetTensorData < int8_t > ( output ) ) ; <nl> # else <nl> TF_LITE_KERNEL_LOG ( context , <nl> " Node configuration is not supported by ARC MLI Library . " ) ; <nl> mmm a / tensorflow / lite / micro / tools / make / targets / arc / README . md <nl> ppp b / tensorflow / lite / micro / tools / make / targets / arc / README . md <nl> In both cases you will see the application output in the serial terminal . <nl> 1 . Use the following command in the same command shell you used for building <nl> the application , as described in the previous step <nl> <nl> + ` ` ` <nl> make flash <nl> + ` ` ` <nl> <nl> 2 . Copy the content of the created * . / bin * folder into the root of microSD <nl> card . Note that the card must be formatted as FAT32 with default cluster <nl> In both cases you will see the application output in the serial terminal . <nl> 4 . Push the RST button . If a red LED is lit beside RST button , push the CFG <nl> button . <nl> <nl> - You will see the application output in the serial terminal . <nl> + 5 . Using serial terminal , create uboot environment file to automatically run <nl> + the application on start - up . Type or copy next sequence of commands into <nl> + serial terminal one - by - another : <nl> + <nl> + ` ` ` <nl> + setenv loadaddr 0x10800000 <nl> + setenv bootfile app . elf <nl> + setenv bootdelay 1 <nl> + setenv bootcmd fatload mmc 0 \ $ \ { loadaddr \ } \ $ \ { bootfile \ } \ & \ & bootelf <nl> + saveenv <nl> + ` ` ` <nl> + <nl> + 6 . Reset the board ( see step 4 above ) <nl> + <nl> + You will see the application output in the serial terminal . <nl> <nl> # # Custom ARC EM / HS Platform <nl> <nl> deleted file mode 100644 <nl> index c336b6c8733f9 . . 0000000000000 <nl> Binary files a / tensorflow / lite / micro / tools / make / targets / arc / emsdp / uboot . env and / dev / null differ <nl> mmm a / tensorflow / lite / micro / tools / make / targets / arc_emsdp_makefile . inc <nl> ppp b / tensorflow / lite / micro / tools / make / targets / arc_emsdp_makefile . inc <nl> endif <nl> <nl> TCF_FILE = $ ( PWD ) / $ ( MAKEFILE_DIR ) / downloads / $ ( MLI_LIB_DIR ) / hw / emsdp_em11d_em9d_dfss . tcf <nl> LCF_FILE = $ ( PWD ) / $ ( MAKEFILE_DIR ) / targets / arc / emsdp / emsdp . lcf <nl> - UBOOT_FILE : = $ ( PWD ) / $ ( MAKEFILE_DIR ) / targets / arc / emsdp / uboot . env <nl> - UBOOT_FILE_NAME : = $ ( notdir $ ( UBOOT_FILE ) ) <nl> - <nl> + <nl> <nl> include $ ( MAKEFILE_DIR ) / targets / arc / arc_common . inc <nl> <nl> include $ ( MAKEFILE_DIR ) / targets / arc / arc_common . inc <nl> ARC_EXTRA_APP_RULES = \ <nl> $ ( DLR ) \ ( BIN_FILE \ ) : $ ( DLR ) \ ( BIN_DIR \ ) $ ( DLR ) \ ( OUT_NAME \ ) \ <nl> \ n \ t \ @ $ ( DLR ) \ ( CP \ ) $ ( DLR ) \ ( OUT_NAME \ ) $ ( DLR ) \ ( BIN_FILE \ ) \ <nl> - \ n \ t \ @ $ ( DLR ) \ ( CP \ ) $ ( UBOOT_FILE_NAME ) $ ( DLR ) \ ( BIN_DIR \ ) $ ( DLR ) \ ( PS \ ) $ ( UBOOT_FILE_NAME ) \ <nl> \ n \ <nl> \ n $ ( DLR ) \ ( BIN_DIR \ ) : \ <nl> \ n \ t \ @ $ ( DLR ) \ ( MKDIR \ ) $ ( DLR ) \ ( BIN_DIR \ ) \ <nl> include $ ( MAKEFILE_DIR ) / targets / arc / arc_common . inc <nl> ARC_APP_DEBUG_CMD = mdb - OK - digilent - nooptions $ ( DLR ) \ ( DBG_ARGS \ ) <nl> ARC_EXTRA_EXECUTE_RULES = <nl> <nl> - MAKE_PROJECT_FILES + = $ ( UBOOT_FILE_NAME ) <nl> - ARC_TARGET_COPY_FILES + = $ ( notdir $ ( UBOOT_FILE ) ) ! $ ( UBOOT_FILE ) <nl> - <nl> MAKE_PROJECT_FILES : = $ ( filter - out README_MAKE . md , $ ( MAKE_PROJECT_FILES ) ) README_ARC_EMSDP . md <nl> <nl> # for default EMSDP configuration we can use em9d_va rt libs <nl> | Remove uboot . env and add instructions to generate it . Update project patching of TFLM examples for newer sed . Use native types in arc conv and dw_conv | tensorflow/tensorflow | 4987375d02aff5051b92c2b0b652702ad48c52fa | 2020-08-13T11:15:05Z |
mmm a / arangod / MMFiles / MMFilesLogfileManager . cpp <nl> ppp b / arangod / MMFiles / MMFilesLogfileManager . cpp <nl> MMFilesLogfileManager : : MMFilesLogfileManager ( ApplicationServer * server ) <nl> _idLock ( ) , <nl> _writeThrottled ( false ) , <nl> _shutdown ( 0 ) { <nl> - LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " creating WAL logfile manager " ; <nl> TRI_ASSERT ( ! _allowWrites ) ; <nl> <nl> setOptional ( true ) ; <nl> MMFilesLogfileManager : : MMFilesLogfileManager ( ApplicationServer * server ) <nl> <nl> / / destroy the logfile manager <nl> MMFilesLogfileManager : : ~ MMFilesLogfileManager ( ) { <nl> - LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " shutting down WAL logfile manager " ; <nl> - <nl> for ( auto & it : _barriers ) { <nl> delete it . second ; <nl> } <nl> MMFilesLogfileManager : : ~ MMFilesLogfileManager ( ) { <nl> <nl> void MMFilesLogfileManager : : collectOptions ( std : : shared_ptr < ProgramOptions > options ) { <nl> options - > addSection ( <nl> - Section ( " wal " , " Configure the WAL " , " wal " , false , false ) ) ; <nl> + Section ( " wal " , " Configure the WAL of the MMFiles engine " , " wal " , false , false ) ) ; <nl> <nl> options - > addHiddenOption ( <nl> " - - wal . allow - oversize - entries " , <nl> bool MMFilesLogfileManager : : open ( ) { <nl> <nl> return true ; <nl> } <nl> - <nl> + <nl> + void MMFilesLogfileManager : : beginShutdown ( ) { <nl> + if ( ! isEnabled ( ) ) { <nl> + return ; <nl> + } <nl> + throttleWhenPending ( 0 ) ; / / deactivate write - throttling on shutdown <nl> + } <nl> + <nl> void MMFilesLogfileManager : : stop ( ) { <nl> + if ( ! isEnabled ( ) ) { <nl> + return ; <nl> + } <nl> / / deactivate write - throttling ( again ) on shutdown in case it was set again <nl> / / after beginShutdown <nl> throttleWhenPending ( 0 ) ; <nl> } <nl> <nl> - void MMFilesLogfileManager : : beginShutdown ( ) { <nl> - throttleWhenPending ( 0 ) ; / / deactivate write - throttling on shutdown <nl> - } <nl> - <nl> void MMFilesLogfileManager : : unprepare ( ) { <nl> if ( ! isEnabled ( ) ) { <nl> return ; <nl> void MMFilesLogfileManager : : unprepare ( ) { <nl> _removerThread = nullptr ; <nl> } <nl> <nl> - if ( _collectorThread ! = nullptr ) { <nl> - LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " stopping collector thread " ; <nl> - while ( _collectorThread - > isRunning ( ) ) { <nl> - usleep ( 10000 ) ; <nl> + { <nl> + WRITE_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread ! = nullptr ) { <nl> + LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " stopping collector thread " ; <nl> + while ( _collectorThread - > isRunning ( ) ) { <nl> + usleep ( 10000 ) ; <nl> + } <nl> + delete _collectorThread ; <nl> + _collectorThread = nullptr ; <nl> } <nl> - delete _collectorThread ; <nl> - _collectorThread = nullptr ; <nl> } <nl> <nl> if ( _synchronizerThread ! = nullptr ) { <nl> MMFilesWalSlotInfoCopy MMFilesLogfileManager : : writeSlot ( MMFilesWalSlotInfo & slot <nl> int MMFilesLogfileManager : : waitForCollectorQueue ( TRI_voc_cid_t cid , double timeout ) { <nl> double const end = TRI_microtime ( ) + timeout ; <nl> <nl> - while ( _collectorThread - > hasQueuedOperations ( cid ) ) { <nl> + while ( true ) { <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread = = nullptr ) { <nl> + break ; <nl> + } <nl> + <nl> + if ( ! _collectorThread - > hasQueuedOperations ( cid ) ) { <nl> + break ; <nl> + } <nl> + <nl> + / / sleep without holding the lock <nl> + locker . unlock ( ) ; <nl> usleep ( 10000 ) ; <nl> <nl> if ( TRI_microtime ( ) > end ) { <nl> void MMFilesLogfileManager : : setCollectionRequested ( MMFilesWalLogfile * logfile ) { <nl> <nl> if ( ! _inRecovery ) { <nl> / / to start collection <nl> - _collectorThread - > signal ( ) ; <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread ! = nullptr ) { <nl> + _collectorThread - > signal ( ) ; <nl> + } <nl> } <nl> } <nl> <nl> void MMFilesLogfileManager : : setCollectionDone ( MMFilesWalLogfile * logfile ) { <nl> <nl> if ( ! _inRecovery ) { <nl> / / to start removal of unneeded datafiles <nl> - _collectorThread - > signal ( ) ; <nl> + { <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + if ( _collectorThread ! = nullptr ) { <nl> + _collectorThread - > signal ( ) ; <nl> + } <nl> + } <nl> writeShutdownInfo ( false ) ; <nl> } <nl> } <nl> void MMFilesLogfileManager : : removeLogfile ( MMFilesWalLogfile * logfile ) { <nl> } <nl> <nl> void MMFilesLogfileManager : : waitForCollector ( ) { <nl> - if ( _collectorThread = = nullptr ) { <nl> - return ; <nl> - } <nl> + while ( true ) { <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread = = nullptr ) { <nl> + return ; <nl> + } <nl> + <nl> + if ( ! _collectorThread - > hasQueuedOperations ( ) ) { <nl> + return ; <nl> + } <nl> + <nl> + locker . unlock ( ) ; <nl> <nl> - while ( _collectorThread - > hasQueuedOperations ( ) ) { <nl> LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " waiting for WAL collector " ; <nl> usleep ( 50000 ) ; <nl> } <nl> void MMFilesLogfileManager : : waitForCollector ( ) { <nl> / / queued . This is used in the DatabaseManagerThread when dropping <nl> / / a database to avoid existence of ditches of type DOCUMENT . <nl> bool MMFilesLogfileManager : : executeWhileNothingQueued ( std : : function < void ( ) > const & cb ) { <nl> - if ( _collectorThread = = nullptr ) { <nl> - return true ; <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread ! = nullptr ) { <nl> + return _collectorThread - > executeWhileNothingQueued ( cb ) ; <nl> } <nl> - return _collectorThread - > executeWhileNothingQueued ( cb ) ; <nl> + <nl> + locker . unlock ( ) ; <nl> + <nl> + cb ( ) ; <nl> + return true ; <nl> } <nl> <nl> / / wait until a specific logfile has been collected <nl> int MMFilesLogfileManager : : waitForCollector ( MMFilesWalLogfile : : IdType logfileId , <nl> return TRI_ERROR_NO_ERROR ; <nl> } <nl> <nl> + READ_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> + if ( _collectorThread = = nullptr ) { <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> int res = _collectorThread - > waitForResult ( 50 * 1000 ) ; <nl> <nl> + locker . unlock ( ) ; <nl> + <nl> / / LOG_TOPIC ( TRACE , arangodb : : Logger : : FIXME ) < < " still waiting for collector . logfileId : " < < logfileId < < <nl> / / " lastCollected : " < < _lastCollectedId < < " , result : " < < res ; <nl> <nl> void MMFilesLogfileManager : : stopMMFilesAllocatorThread ( ) { <nl> <nl> / / start the collector thread <nl> int MMFilesLogfileManager : : startMMFilesCollectorThread ( ) { <nl> + WRITE_LOCKER ( locker , _collectorThreadLock ) ; <nl> + <nl> _collectorThread = new MMFilesCollectorThread ( this ) ; <nl> <nl> if ( ! _collectorThread - > start ( ) ) { <nl> mmm a / arangod / MMFiles / MMFilesLogfileManager . h <nl> ppp b / arangod / MMFiles / MMFilesLogfileManager . h <nl> class MMFilesLogfileManager final : public application_features : : ApplicationFeat <nl> <nl> / / the collector thread <nl> MMFilesCollectorThread * _collectorThread ; <nl> + <nl> + / / lock protecting the destruction of the collector thread <nl> + basics : : ReadWriteLock _collectorThreadLock ; <nl> <nl> / / the logfile remover thread <nl> MMFilesRemoverThread * _removerThread ; <nl> mmm a / arangod / RestServer / DatabaseFeature . cpp <nl> ppp b / arangod / RestServer / DatabaseFeature . cpp <nl> void DatabaseFeature : : start ( ) { <nl> FATAL_ERROR_EXIT ( ) ; <nl> } <nl> <nl> - / / TODO : handle _upgrade and _checkVersion here <nl> - <nl> / / activate deadlock detection in case we ' re not running in cluster mode <nl> if ( ! arangodb : : ServerState : : instance ( ) - > isRunningInCluster ( ) ) { <nl> enableDeadlockDetection ( ) ; <nl> | fix a race on shutdown ( ) | arangodb/arangodb | beea9033d36c050d4fc42a60a2a721265324e3f3 | 2017-07-29T08:30:24Z |
mmm a / tensorflow / python / distribute / mirrored_run . py <nl> ppp b / tensorflow / python / distribute / mirrored_run . py <nl> def _merge_call ( self , fn , args , kwargs ) : <nl> <nl> This pauses the current replica thread and passes ` fn ` and its arguments to <nl> the main thread . The main thread will wait until all replicas pause , then <nl> - invoke ` fn ` with grouped arugments . The current replica thread will continue <nl> + invoke ` fn ` with grouped arguments . The current replica thread will continue <nl> after ` fn ` completes . <nl> <nl> See ` _call_for_each_replica ` for the logic in the main thread . <nl> | Fix typo in mirrored_run . py . | tensorflow/tensorflow | e580b59f5aee49269fa48626cdec44b6ae2b2176 | 2020-04-23T05:00:01Z |
mmm a / hphp / runtime / base / runtime - option . cpp <nl> ppp b / hphp / runtime / base / runtime - option . cpp <nl> static std : : vector < std : : string > getTierOverwrites ( IniSetting : : Map & ini , <nl> return messages ; <nl> } <nl> <nl> + void RuntimeOption : : ReadSatelliteInfo ( <nl> + const IniSettingMap & ini , <nl> + const Hdf & hdf , <nl> + std : : vector < std : : shared_ptr < SatelliteServerInfo > > & infos , <nl> + std : : string & xboxPassword , <nl> + std : : set < std : : string > & xboxPasswords ) { <nl> + auto ss_callback = [ & ] ( const IniSettingMap & ini_ss , const Hdf & hdf_ss , <nl> + const std : : string & ini_ss_key ) { <nl> + auto satellite = std : : make_shared < SatelliteServerInfo > ( ini_ss , hdf_ss , <nl> + ini_ss_key ) ; <nl> + infos . push_back ( satellite ) ; <nl> + if ( satellite - > getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) { <nl> + xboxPassword = satellite - > getPassword ( ) ; <nl> + xboxPasswords = satellite - > getPasswords ( ) ; <nl> + } <nl> + } ; <nl> + Config : : Iterate ( ss_callback , ini , hdf , " Satellites " ) ; <nl> + } <nl> <nl> void RuntimeOption : : Load ( <nl> IniSetting : : Map & ini , Hdf & config , <nl> void RuntimeOption : : Load ( <nl> IpBlocks = std : : make_shared < IpBlockMap > ( ini , config ) ; <nl> } <nl> { <nl> - if ( config [ " Satellites " ] . exists ( ) ) { <nl> - for ( Hdf hdf = config [ " Satellites " ] . firstChild ( ) ; hdf . exists ( ) ; <nl> - hdf = hdf . next ( ) ) { <nl> - auto satellite = std : : make_shared < SatelliteServerInfo > ( ini , hdf ) ; <nl> - SatelliteServerInfos . push_back ( satellite ) ; <nl> - if ( satellite - > getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) { <nl> - XboxPassword = satellite - > getPassword ( ) ; <nl> - XboxPasswords = satellite - > getPasswords ( ) ; <nl> - } <nl> - } <nl> - } <nl> + ReadSatelliteInfo ( ini , config , SatelliteServerInfos , <nl> + XboxPassword , XboxPasswords ) ; <nl> } <nl> { <nl> / / Xbox <nl> mmm a / hphp / runtime / base / runtime - option . h <nl> ppp b / hphp / runtime / base / runtime - option . h <nl> class RuntimeOption { <nl> return strcmp ( ExecutionMode , " cli " ) = = 0 ; <nl> } <nl> <nl> + static void ReadSatelliteInfo ( <nl> + const IniSettingMap & ini , <nl> + const Hdf & hdf , <nl> + std : : vector < std : : shared_ptr < SatelliteServerInfo > > & infos , <nl> + std : : string & xboxPassword , <nl> + std : : set < std : : string > & xboxPasswords <nl> + ) ; <nl> + <nl> static const char * ExecutionMode ; <nl> static std : : string BuildId ; <nl> static std : : string InstanceId ; <nl> mmm a / hphp / runtime / server / satellite - server . cpp <nl> ppp b / hphp / runtime / server / satellite - server . cpp <nl> namespace HPHP { <nl> std : : set < std : : string > SatelliteServerInfo : : InternalURLs ; <nl> int SatelliteServerInfo : : DanglingServerPort = 0 ; <nl> <nl> - SatelliteServerInfo : : SatelliteServerInfo ( const IniSetting : : Map & ini , Hdf hdf ) { <nl> + SatelliteServerInfo : : SatelliteServerInfo ( const IniSetting : : Map & ini , <nl> + const Hdf & hdf , <nl> + const std : : string & ini_key / * = " " * / <nl> + ) { <nl> + m_name = hdf . exists ( ) & & ! hdf . isEmpty ( ) ? hdf . getName ( ) : ini_key ; <nl> m_name = hdf . getName ( ) ; <nl> m_port = Config : : GetUInt16 ( ini , hdf , " Port " , 0 , false ) ; <nl> m_threadCount = Config : : GetInt32 ( ini , hdf , " ThreadCount " , 5 , false ) ; <nl> mmm a / hphp / runtime / server / satellite - server . h <nl> ppp b / hphp / runtime / server / satellite - server . h <nl> class SatelliteServerInfo { <nl> static bool checkMainURL ( const std : : string & path ) ; <nl> <nl> public : <nl> - explicit SatelliteServerInfo ( const IniSetting : : Map & ini , Hdf hdf ) ; <nl> + SatelliteServerInfo ( const IniSetting : : Map & ini , const Hdf & hdf , <nl> + const std : : string & ini_key = " " ) ; <nl> <nl> const std : : string & getName ( ) const { return m_name ; } <nl> SatelliteServer : : Type getType ( ) const { return m_type ; } <nl> mmm a / hphp / test / ext / test_cpp_base . cpp <nl> ppp b / hphp / test / ext / test_cpp_base . cpp <nl> bool TestCppBase : : RunTests ( const std : : string & which ) { <nl> RUN_TEST ( TestIpBlockMap ) ; <nl> RUN_TEST ( TestIpBlockMapIni ) ; <nl> RUN_TEST ( TestSatelliteServer ) ; <nl> + RUN_TEST ( TestSatelliteServerIni ) ; <nl> RUN_TEST ( TestVirtualHost ) ; <nl> RUN_TEST ( TestVirtualHostIni ) ; <nl> RUN_TEST ( TestCollectionHdf ) ; <nl> bool TestCppBase : : TestSatelliteServer ( ) { <nl> ) ; <nl> <nl> <nl> - std : : vector < SatelliteServerInfo > infos ; <nl> - if ( hdf [ " Satellites " ] . exists ( ) ) { <nl> - for ( Hdf c = hdf [ " Satellites " ] . firstChild ( ) ; c . exists ( ) ; c = c . next ( ) ) { <nl> - auto satellite = SatelliteServerInfo ( ini , hdf ) ; <nl> - infos . push_back ( satellite ) ; <nl> - if ( satellite . getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) { <nl> - RuntimeOption : : XboxPassword = satellite . getPassword ( ) ; <nl> - RuntimeOption : : XboxPasswords = satellite . getPasswords ( ) ; <nl> - } <nl> + std : : vector < std : : shared_ptr < SatelliteServerInfo > > infos ; <nl> + RuntimeOption : : ReadSatelliteInfo ( ini , hdf , infos , <nl> + RuntimeOption : : XboxPassword , <nl> + RuntimeOption : : XboxPasswords ) ; <nl> + for ( auto & info_ptr : infos ) { <nl> + auto info = info_ptr . get ( ) ; <nl> + auto name = info - > getName ( ) ; <nl> + if ( name = = " rpc " ) { <nl> + VERIFY ( info - > getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) ; <nl> + VERIFY ( info - > getPort ( ) = = 9999 ) ; <nl> + VERIFY ( info - > getThreadCount ( ) = = 5 ) ; <nl> + VERIFY ( info - > getTimeoutSeconds ( ) = = <nl> + std : : chrono : : seconds ( RuntimeOption : : RequestTimeoutSeconds ) ) ; <nl> + VERIFY ( info - > getURLs ( ) . size ( ) = = 0 ) ; <nl> + VERIFY ( info - > getMaxRequest ( ) = = 500 ) ; <nl> + VERIFY ( info - > getMaxDuration ( ) = = 120 ) ; <nl> + VERIFY ( info - > getReqInitFunc ( ) = = " init_me " ) ; <nl> + VERIFY ( info - > getReqInitDoc ( ) = = " my / rpc / rpc . php " ) ; <nl> + VERIFY ( info - > getPassword ( ) = = " abcd0987 " ) ; <nl> + VERIFY ( info - > getPasswords ( ) . size ( ) = = 1 ) ; <nl> + VERIFY ( info - > getPasswords ( ) . find ( " abcd0987 " ) ! = <nl> + info - > getPasswords ( ) . end ( ) ) ; <nl> + VERIFY ( info - > alwaysReset ( ) = = false ) ; <nl> + VERIFY ( RuntimeOption : : XboxPassword = = " abcd0987 " ) ; <nl> + } else if ( name = = " ips " ) { <nl> + VERIFY ( info - > getType ( ) = = <nl> + SatelliteServer : : Type : : KindOfInternalPageServer ) ; <nl> + VERIFY ( info - > getURLs ( ) . size ( ) = = 0 ) ; <nl> } <nl> } <nl> + return Count ( true ) ; <nl> + } <nl> <nl> - for ( auto & info : infos ) { <nl> - auto name = info . getName ( ) ; <nl> + bool TestCppBase : : TestSatelliteServerIni ( ) { <nl> + std : : string iniStr = <nl> + " hhvm . satellites [ rpc ] [ type ] = RPCServer \ n " <nl> + " hhvm . satellites [ rpc ] [ port ] = 9999 \ n " <nl> + " hhvm . satellites [ rpc ] [ request_init_document ] = my / rpc / rpc . php \ n " <nl> + " hhvm . satellites [ rpc ] [ request_init_function ] = init_me \ n " <nl> + " hhvm . satellites [ rpc ] [ password ] = abcd0987 \ n " <nl> + " hhvm . satellites [ rpc ] [ passwords ] [ ] = abcd0987 \ n " <nl> + " hhvm . satellites [ ips ] [ type ] = InternalPageServer \ n " <nl> + " hhvm . satellites [ ips ] [ block_main_server ] = false \ n " ; <nl> + <nl> + IniSettingMap ini = IniSetting : : Map : : object ; <nl> + Hdf empty ; <nl> + Config : : ParseIniString ( iniStr , ini ) ; <nl> + <nl> + std : : vector < std : : shared_ptr < SatelliteServerInfo > > infos ; <nl> + RuntimeOption : : ReadSatelliteInfo ( ini , empty , infos , <nl> + RuntimeOption : : XboxPassword , <nl> + RuntimeOption : : XboxPasswords ) ; <nl> + for ( auto & info_ptr : infos ) { <nl> + auto info = info_ptr . get ( ) ; <nl> + auto name = info - > getName ( ) ; <nl> if ( name = = " rpc " ) { <nl> - VERIFY ( info . getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) ; <nl> - VERIFY ( info . getPort ( ) = = 9999 ) ; <nl> - VERIFY ( info . getThreadCount ( ) = = 5 ) ; <nl> - VERIFY ( info . getTimeoutSeconds ( ) = = <nl> + VERIFY ( info - > getType ( ) = = SatelliteServer : : Type : : KindOfRPCServer ) ; <nl> + VERIFY ( info - > getPort ( ) = = 9999 ) ; <nl> + VERIFY ( info - > getThreadCount ( ) = = 5 ) ; <nl> + VERIFY ( info - > getTimeoutSeconds ( ) = = <nl> std : : chrono : : seconds ( RuntimeOption : : RequestTimeoutSeconds ) ) ; <nl> - VERIFY ( info . getURLs ( ) . size ( ) = = 0 ) ; <nl> - VERIFY ( info . getMaxRequest ( ) = = 500 ) ; <nl> - VERIFY ( info . getMaxDuration ( ) = = 120 ) ; <nl> - VERIFY ( info . getReqInitFunc ( ) = = " init_me " ) ; <nl> - VERIFY ( info . getReqInitDoc ( ) = = " my / rpc / rpc . php " ) ; <nl> - VERIFY ( info . getPassword ( ) = = " abcd0987 " ) ; <nl> - VERIFY ( info . getPasswords ( ) . size ( ) = = 1 ) ; <nl> - VERIFY ( info . getPasswords ( ) . find ( " abcd0987 " ) ! = info . getPasswords ( ) . end ( ) ) ; <nl> - VERIFY ( info . alwaysReset ( ) = = false ) ; <nl> + VERIFY ( info - > getURLs ( ) . size ( ) = = 0 ) ; <nl> + VERIFY ( info - > getMaxRequest ( ) = = 500 ) ; <nl> + VERIFY ( info - > getMaxDuration ( ) = = 120 ) ; <nl> + VERIFY ( info - > getReqInitFunc ( ) = = " init_me " ) ; <nl> + VERIFY ( info - > getReqInitDoc ( ) = = " my / rpc / rpc . php " ) ; <nl> + VERIFY ( info - > getPassword ( ) = = " abcd0987 " ) ; <nl> + VERIFY ( info - > getPasswords ( ) . size ( ) = = 1 ) ; <nl> + VERIFY ( info - > getPasswords ( ) . find ( " abcd0987 " ) ! = <nl> + info - > getPasswords ( ) . end ( ) ) ; <nl> + VERIFY ( info - > alwaysReset ( ) = = false ) ; <nl> VERIFY ( RuntimeOption : : XboxPassword = = " abcd0987 " ) ; <nl> } else if ( name = = " ips " ) { <nl> - VERIFY ( info . getType ( ) = = SatelliteServer : : Type : : KindOfInternalPageServer ) ; <nl> - VERIFY ( info . getURLs ( ) . size ( ) = = 0 ) ; <nl> + VERIFY ( info - > getType ( ) = = <nl> + SatelliteServer : : Type : : KindOfInternalPageServer ) ; <nl> + VERIFY ( info - > getURLs ( ) . size ( ) = = 0 ) ; <nl> } <nl> } <nl> return Count ( true ) ; <nl> mmm a / hphp / test / ext / test_cpp_base . h <nl> ppp b / hphp / test / ext / test_cpp_base . h <nl> class TestCppBase : public TestBase { <nl> bool TestIpBlockMap ( ) ; <nl> bool TestIpBlockMapIni ( ) ; <nl> bool TestSatelliteServer ( ) ; <nl> + bool TestSatelliteServerIni ( ) ; <nl> bool TestVirtualHost ( ) ; <nl> bool TestVirtualHostIni ( ) ; <nl> bool TestCollectionHdf ( ) ; <nl> | Make SatelliteServer ini - aware | facebook/hhvm | ebb75b2cf901545422ec9a68c47bd07cda57bbc4 | 2015-08-17T04:30:25Z |
mmm a / Kodi . xcodeproj / project . pbxproj <nl> ppp b / Kodi . xcodeproj / project . pbxproj <nl> <nl> 7CE3FB911C9D40EA00366A4C / * ServiceBroker . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CE3FB8E1C9D40EA00366A4C / * ServiceBroker . cpp * / ; } ; <nl> 7CE514AA1CD5154A0046BC5C / * GUIDialogKeyboardTouch . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CE514A81CD5154A0046BC5C / * GUIDialogKeyboardTouch . cpp * / ; } ; <nl> 7CE514AB1CD5154A0046BC5C / * GUIDialogKeyboardTouch . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CE514A81CD5154A0046BC5C / * GUIDialogKeyboardTouch . cpp * / ; } ; <nl> + 7CE5D0B41D37EB6900211428 / * ActiveAEFilter . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CE5D0B21D37EB6900211428 / * ActiveAEFilter . cpp * / ; } ; <nl> + 7CE5D0B51D37EB6900211428 / * ActiveAEFilter . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CE5D0B21D37EB6900211428 / * ActiveAEFilter . cpp * / ; } ; <nl> 7CEBD8A80F33A0D800CAF6AD / * SpecialProtocolDirectory . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 7CEBD8A60F33A0D800CAF6AD / * SpecialProtocolDirectory . cpp * / ; } ; <nl> 7CED59391CD340460093F573 / * VideoToolbox . framework in Frameworks * / = { isa = PBXBuildFile ; fileRef = 7CED59381CD340460093F573 / * VideoToolbox . framework * / ; } ; <nl> 7CED593A1CD340460093F573 / * VideoToolbox . framework in Frameworks * / = { isa = PBXBuildFile ; fileRef = 7CED59381CD340460093F573 / * VideoToolbox . framework * / ; } ; <nl> <nl> 7CE3FB8F1C9D40EA00366A4C / * ServiceBroker . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = ServiceBroker . h ; sourceTree = " < group > " ; } ; <nl> 7CE514A81CD5154A0046BC5C / * GUIDialogKeyboardTouch . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = GUIDialogKeyboardTouch . cpp ; sourceTree = " < group > " ; } ; <nl> 7CE514A91CD5154A0046BC5C / * GUIDialogKeyboardTouch . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = GUIDialogKeyboardTouch . h ; sourceTree = " < group > " ; } ; <nl> + 7CE5D0B21D37EB6900211428 / * ActiveAEFilter . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = ActiveAEFilter . cpp ; sourceTree = " < group > " ; } ; <nl> + 7CE5D0B31D37EB6900211428 / * ActiveAEFilter . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = ActiveAEFilter . h ; sourceTree = " < group > " ; } ; <nl> 7CEBD8A60F33A0D800CAF6AD / * SpecialProtocolDirectory . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = SpecialProtocolDirectory . cpp ; sourceTree = " < group > " ; } ; <nl> 7CEBD8A70F33A0D800CAF6AD / * SpecialProtocolDirectory . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = SpecialProtocolDirectory . h ; sourceTree = " < group > " ; } ; <nl> 7CED59381CD340460093F573 / * VideoToolbox . framework * / = { isa = PBXFileReference ; lastKnownFileType = wrapper . framework ; name = VideoToolbox . framework ; path = System / Library / Frameworks / VideoToolbox . framework ; sourceTree = SDKROOT ; } ; <nl> <nl> F5CC22D41814FF3B006B5E91 / * ActiveAE . h * / , <nl> F5CC22D51814FF3B006B5E91 / * ActiveAEBuffer . cpp * / , <nl> F5CC22D61814FF3B006B5E91 / * ActiveAEBuffer . h * / , <nl> + 7CE5D0B21D37EB6900211428 / * ActiveAEFilter . cpp * / , <nl> + 7CE5D0B31D37EB6900211428 / * ActiveAEFilter . h * / , <nl> DF32466019E931A8005E8CFB / * ActiveAEResampleFFMPEG . cpp * / , <nl> DF32466119E931A8005E8CFB / * ActiveAEResampleFFMPEG . h * / , <nl> F5CC22D91814FF3B006B5E91 / * ActiveAESink . cpp * / , <nl> <nl> C84828DE156CFCD8005A996F / * GUIWindowPVRBase . cpp in Sources * / , <nl> C84828DF156CFCD8005A996F / * GUIWindowPVRChannels . cpp in Sources * / , <nl> C84828E1156CFCD8005A996F / * GUIWindowPVRGuide . cpp in Sources * / , <nl> + 7CE5D0B41D37EB6900211428 / * ActiveAEFilter . cpp in Sources * / , <nl> C84828E2156CFCD8005A996F / * GUIWindowPVRRecordings . cpp in Sources * / , <nl> C84828E3156CFCD8005A996F / * GUIWindowPVRSearch . cpp in Sources * / , <nl> C84828E4156CFCD8005A996F / * GUIWindowPVRTimers . cpp in Sources * / , <nl> <nl> E49913F7174E5FB000741B6D / * PVRChannelGroups . cpp in Sources * / , <nl> E49913F8174E5FB000741B6D / * PVRChannelGroupsContainer . cpp in Sources * / , <nl> E49913F9174E5FB000741B6D / * GUIDialogPVRChannelManager . cpp in Sources * / , <nl> + 7CE5D0B51D37EB6900211428 / * ActiveAEFilter . cpp in Sources * / , <nl> E49913FA174E5FB000741B6D / * GUIDialogPVRChannelsOSD . cpp in Sources * / , <nl> E49913FD174E5FB000741B6D / * GUIDialogPVRGroupManager . cpp in Sources * / , <nl> E49913FE174E5FB000741B6D / * GUIDialogPVRGuideInfo . cpp in Sources * / , <nl> mmm a / project / VS2010Express / XBMC . vcxproj <nl> ppp b / project / VS2010Express / XBMC . vcxproj <nl> copy " . . \ Win32BuildSetup \ dependencies \ python27 . dll " " $ ( TargetDir ) " < / Command > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Encoders \ AEEncoderFFmpeg . cpp " / > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAE . cpp " / > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEBuffer . cpp " / > <nl> + < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEFilter . cpp " / > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEResampleFFMPEG . cpp " / > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAESink . cpp " / > <nl> < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAESound . cpp " / > <nl> copy " . . \ Win32BuildSetup \ dependencies \ python27 . dll " " $ ( TargetDir ) " < / Command > <nl> < ClInclude Include = " . . \ . . \ xbmc \ contrib \ kissfft \ kiss_fft . h " / > <nl> < ClInclude Include = " . . \ . . \ xbmc \ contrib \ kissfft \ kiss_fftr . h " / > <nl> < ClInclude Include = " . . \ . . \ xbmc \ contrib \ kissfft \ _kiss_fft_guts . h " / > <nl> + < ClInclude Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEFilter . h " / > <nl> < ClInclude Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ AudioDSPAddons \ ActiveAEDSP . h " / > <nl> < ClInclude Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ AudioDSPAddons \ ActiveAEDSPAddon . h " / > <nl> < ClInclude Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ AudioDSPAddons \ ActiveAEDSPDatabase . h " / > <nl> mmm a / project / VS2010Express / XBMC . vcxproj . filters <nl> ppp b / project / VS2010Express / XBMC . vcxproj . filters <nl> <nl> < ClCompile Include = " . . \ . . \ xbmc \ dialogs \ GUIDialogPlayerProcessInfo . cpp " > <nl> < Filter > dialogs < / Filter > <nl> < / ClCompile > <nl> + < ClCompile Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEFilter . cpp " > <nl> + < Filter > cores \ AudioEngine \ Engines \ ActiveAE < / Filter > <nl> + < / ClCompile > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> < ClCompile Include = " . . \ . . \ xbmc \ media \ MediaType . cpp " > <nl> <nl> < / ClInclude > <nl> < ClInclude Include = " . . \ . . \ xbmc \ dialogs \ GUIDialogPlayerProcessInfo . h " > <nl> < Filter > dialogs < / Filter > <nl> + < / ClCompile > <nl> + < ClInclude Include = " . . \ . . \ xbmc \ cores \ AudioEngine \ Engines \ ActiveAE \ ActiveAEFilter . h " > <nl> + < Filter > cores \ AudioEngine \ Engines \ ActiveAE < / Filter > <nl> < / ClInclude > <nl> < / ItemGroup > <nl> < ItemGroup > <nl> <nl> < Filter > shaders < / Filter > <nl> < / FxCompile > <nl> < / ItemGroup > <nl> - < / Project > <nl> \ No newline at end of file <nl> + < / Project > <nl> mmm a / xbmc / cores / AudioEngine / CMakeLists . txt <nl> ppp b / xbmc / cores / AudioEngine / CMakeLists . txt <nl> set ( SOURCES AEFactory . cpp <nl> Encoders / AEEncoderFFmpeg . cpp <nl> Engines / ActiveAE / ActiveAE . cpp <nl> Engines / ActiveAE / ActiveAEBuffer . cpp <nl> + Engines / ActiveAE / ActiveAEFilter . cpp <nl> Engines / ActiveAE / ActiveAESink . cpp <nl> Engines / ActiveAE / ActiveAEStream . cpp <nl> Engines / ActiveAE / ActiveAESound . cpp <nl> set ( HEADERS AEFactory . h <nl> Encoders / AEEncoderFFmpeg . h <nl> Engines / ActiveAE / ActiveAE . h <nl> Engines / ActiveAE / ActiveAEBuffer . h <nl> + Engines / ActiveAE / ActiveAEFilter . h <nl> Engines / ActiveAE / ActiveAESink . h <nl> Engines / ActiveAE / ActiveAESound . h <nl> Engines / ActiveAE / ActiveAEStream . h <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAE . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAE . cpp <nl> void CActiveAE : : Configure ( AEAudioFormat * desiredFmt ) <nl> } <nl> if ( initSink & & ( * it ) - > m_processingBuffers ) <nl> { <nl> + ( * it ) - > m_processingBuffers - > Flush ( ) ; <nl> m_discardBufferPools . push_back ( ( * it ) - > m_processingBuffers - > GetResampleBuffers ( ) ) ; <nl> + m_discardBufferPools . push_back ( ( * it ) - > m_processingBuffers - > GetAtempoBuffers ( ) ) ; <nl> delete ( * it ) - > m_processingBuffers ; <nl> ( * it ) - > m_processingBuffers = nullptr ; <nl> } <nl> void CActiveAE : : DiscardStream ( CActiveAEStream * stream ) <nl> if ( ( * it ) - > m_inputBuffers ) <nl> m_discardBufferPools . push_back ( ( * it ) - > m_inputBuffers ) ; <nl> if ( ( * it ) - > m_processingBuffers ) <nl> + { <nl> + ( * it ) - > m_processingBuffers - > Flush ( ) ; <nl> m_discardBufferPools . push_back ( ( * it ) - > m_processingBuffers - > GetResampleBuffers ( ) ) ; <nl> + m_discardBufferPools . push_back ( ( * it ) - > m_processingBuffers - > GetAtempoBuffers ( ) ) ; <nl> + } <nl> delete ( * it ) - > m_processingBuffers ; <nl> CLog : : Log ( LOGDEBUG , " CActiveAE : : DiscardStream - audio stream deleted " ) ; <nl> m_stats . RemoveStream ( ( * it ) - > m_id ) ; <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . cpp <nl> <nl> * / <nl> <nl> # include " ActiveAEBuffer . h " <nl> + # include " ActiveAEFilter . h " <nl> # include " cores / AudioEngine / AEFactory . h " <nl> # include " cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSPProcess . h " <nl> # include " cores / AudioEngine / Engines / ActiveAE / ActiveAE . h " <nl> bool CActiveAEBufferPool : : Create ( unsigned int totaltime ) <nl> return true ; <nl> } <nl> <nl> - / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + / / Resample <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> <nl> CActiveAEBufferPoolResample : : CActiveAEBufferPoolResample ( AEAudioFormat inputFormat , AEAudioFormat outputFormat , AEQuality quality ) <nl> : CActiveAEBufferPool ( outputFormat ) <nl> CActiveAEBufferPoolResample : : CActiveAEBufferPoolResample ( AEAudioFormat inputForm <nl> <nl> CActiveAEBufferPoolResample : : ~ CActiveAEBufferPoolResample ( ) <nl> { <nl> + Flush ( ) ; <nl> + <nl> delete m_resampler ; <nl> + <nl> if ( m_useDSP ) <nl> CServiceBroker : : GetADSP ( ) . DestroyDSPs ( m_streamId ) ; <nl> if ( m_dspBuffer ) <nl> void CActiveAEBufferPoolResample : : SetDSPConfig ( bool usedsp , bool bypassdsp ) <nl> m_useDSP = usedsp ; <nl> m_bypassDSP = bypassdsp ; <nl> } <nl> + <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + / / Atempo <nl> + / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> + <nl> + CActiveAEBufferPoolAtempo : : CActiveAEBufferPoolAtempo ( AEAudioFormat format ) : CActiveAEBufferPool ( format ) <nl> + { <nl> + m_drain = false ; <nl> + m_empty = true ; <nl> + m_tempo = 1 . 0 ; <nl> + m_changeFilter = false ; <nl> + m_procSample = nullptr ; <nl> + } <nl> + <nl> + CActiveAEBufferPoolAtempo : : ~ CActiveAEBufferPoolAtempo ( ) <nl> + { <nl> + Flush ( ) ; <nl> + } <nl> + <nl> + bool CActiveAEBufferPoolAtempo : : Create ( unsigned int totaltime ) <nl> + { <nl> + CActiveAEBufferPool : : Create ( totaltime ) ; <nl> + <nl> + m_pTempoFilter . reset ( new CActiveAEFilter ( ) ) ; <nl> + m_pTempoFilter - > Init ( CAEUtil : : GetAVSampleFormat ( m_format . m_dataFormat ) , m_format . m_sampleRate , CAEUtil : : GetAVChannelLayout ( m_format . m_channelLayout ) ) ; <nl> + <nl> + return true ; <nl> + } <nl> + <nl> + void CActiveAEBufferPoolAtempo : : ChangeFilter ( ) <nl> + { <nl> + m_pTempoFilter - > SetTempo ( m_tempo ) ; <nl> + m_changeFilter = false ; <nl> + } <nl> + <nl> + bool CActiveAEBufferPoolAtempo : : ProcessBuffers ( ) <nl> + { <nl> + bool busy = false ; <nl> + CSampleBuffer * in ; <nl> + <nl> + if ( ! m_pTempoFilter - > IsActive ( ) ) <nl> + { <nl> + if ( m_changeFilter ) <nl> + { <nl> + if ( m_changeFilter ) <nl> + ChangeFilter ( ) ; <nl> + return true ; <nl> + } <nl> + while ( ! m_inputSamples . empty ( ) ) <nl> + { <nl> + in = m_inputSamples . front ( ) ; <nl> + m_inputSamples . pop_front ( ) ; <nl> + m_outputSamples . push_back ( in ) ; <nl> + busy = true ; <nl> + } <nl> + } <nl> + else if ( m_procSample | | ! m_freeSamples . empty ( ) ) <nl> + { <nl> + int free_samples ; <nl> + if ( m_procSample ) <nl> + free_samples = m_procSample - > pkt - > max_nb_samples - m_procSample - > pkt - > nb_samples ; <nl> + else <nl> + free_samples = m_format . m_frames ; <nl> + <nl> + bool skipInput = false ; <nl> + <nl> + / / avoid that bufferscr grows too large <nl> + if ( ! m_pTempoFilter - > NeedData ( ) ) <nl> + skipInput = true ; <nl> + <nl> + bool hasInput = ! m_inputSamples . empty ( ) ; <nl> + <nl> + if ( hasInput | | skipInput | | m_drain | | m_changeFilter ) <nl> + { <nl> + if ( ! m_procSample ) <nl> + { <nl> + m_procSample = GetFreeBuffer ( ) ; <nl> + } <nl> + <nl> + if ( hasInput & & ! skipInput & & ! m_changeFilter ) <nl> + { <nl> + in = m_inputSamples . front ( ) ; <nl> + m_inputSamples . pop_front ( ) ; <nl> + } <nl> + else <nl> + in = nullptr ; <nl> + <nl> + int start = m_procSample - > pkt - > nb_samples * <nl> + m_procSample - > pkt - > bytes_per_sample * <nl> + m_procSample - > pkt - > config . channels / <nl> + m_procSample - > pkt - > planes ; <nl> + <nl> + for ( int i = 0 ; i < m_procSample - > pkt - > planes ; i + + ) <nl> + { <nl> + m_planes [ i ] = m_procSample - > pkt - > data [ i ] + start ; <nl> + } <nl> + <nl> + int out_samples = m_pTempoFilter - > ProcessFilter ( m_planes , <nl> + m_procSample - > pkt - > max_nb_samples - m_procSample - > pkt - > nb_samples , <nl> + in ? in - > pkt - > data : nullptr , <nl> + in ? in - > pkt - > nb_samples : 0 , <nl> + in ? in - > pkt - > linesize * in - > pkt - > planes : 0 ) ; <nl> + <nl> + / / in case of error , trigger re - create of filter <nl> + if ( out_samples < 0 ) <nl> + { <nl> + out_samples = 0 ; <nl> + m_changeFilter = true ; <nl> + } <nl> + <nl> + m_procSample - > pkt - > nb_samples + = out_samples ; <nl> + busy = true ; <nl> + m_empty = m_pTempoFilter - > IsEof ( ) ; <nl> + <nl> + if ( in ) <nl> + { <nl> + if ( in - > timestamp ) <nl> + m_lastSamplePts = in - > timestamp ; <nl> + else <nl> + in - > pkt_start_offset = 0 ; <nl> + <nl> + / / pts of last sample we added to the buffer <nl> + m_lastSamplePts + = ( in - > pkt - > nb_samples - in - > pkt_start_offset ) * 1000 / m_format . m_sampleRate ; <nl> + } <nl> + <nl> + / / calculate pts for last sample in m_procSample <nl> + int bufferedSamples = m_pTempoFilter - > GetBufferedSamples ( ) ; <nl> + m_procSample - > pkt_start_offset = m_procSample - > pkt - > nb_samples ; <nl> + m_procSample - > timestamp = m_lastSamplePts - bufferedSamples * 1000 / m_format . m_sampleRate ; <nl> + <nl> + if ( ( m_drain | | m_changeFilter ) & & m_empty ) <nl> + { <nl> + if ( m_fillPackets & & m_procSample - > pkt - > nb_samples ! = 0 ) <nl> + { <nl> + / / pad with zero <nl> + start = m_procSample - > pkt - > nb_samples * <nl> + m_procSample - > pkt - > bytes_per_sample * <nl> + m_procSample - > pkt - > config . channels / <nl> + m_procSample - > pkt - > planes ; <nl> + for ( int i = 0 ; i < m_procSample - > pkt - > planes ; i + + ) <nl> + { <nl> + memset ( m_procSample - > pkt - > data [ i ] + start , 0 , m_procSample - > pkt - > linesize - start ) ; <nl> + } <nl> + } <nl> + <nl> + / / check if draining is finished <nl> + if ( m_drain & & m_procSample - > pkt - > nb_samples = = 0 ) <nl> + { <nl> + m_procSample - > Return ( ) ; <nl> + busy = false ; <nl> + } <nl> + else <nl> + m_outputSamples . push_back ( m_procSample ) ; <nl> + <nl> + m_procSample = nullptr ; <nl> + <nl> + if ( m_changeFilter ) <nl> + { <nl> + ChangeFilter ( ) ; <nl> + } <nl> + } <nl> + / / some methods like encode require completely filled packets <nl> + else if ( ! m_fillPackets | | ( m_procSample - > pkt - > nb_samples = = m_procSample - > pkt - > max_nb_samples ) ) <nl> + { <nl> + m_outputSamples . push_back ( m_procSample ) ; <nl> + m_procSample = nullptr ; <nl> + } <nl> + <nl> + if ( in ) <nl> + in - > Return ( ) ; <nl> + } <nl> + } <nl> + return busy ; <nl> + } <nl> + <nl> + void CActiveAEBufferPoolAtempo : : Flush ( ) <nl> + { <nl> + if ( m_procSample ) <nl> + { <nl> + m_procSample - > Return ( ) ; <nl> + m_procSample = nullptr ; <nl> + } <nl> + while ( ! m_inputSamples . empty ( ) ) <nl> + { <nl> + m_inputSamples . front ( ) - > Return ( ) ; <nl> + m_inputSamples . pop_front ( ) ; <nl> + } <nl> + while ( ! m_outputSamples . empty ( ) ) <nl> + { <nl> + m_outputSamples . front ( ) - > Return ( ) ; <nl> + m_outputSamples . pop_front ( ) ; <nl> + } <nl> + if ( m_pTempoFilter ) <nl> + ChangeFilter ( ) ; <nl> + } <nl> + <nl> + float CActiveAEBufferPoolAtempo : : GetDelay ( ) <nl> + { <nl> + float delay = 0 ; <nl> + <nl> + if ( m_procSample ) <nl> + delay + = ( float ) m_procSample - > pkt - > nb_samples / m_procSample - > pkt - > config . sample_rate ; <nl> + <nl> + for ( auto & buf : m_inputSamples ) <nl> + { <nl> + delay + = ( float ) buf - > pkt - > nb_samples / buf - > pkt - > config . sample_rate ; <nl> + } <nl> + <nl> + for ( auto & buf : m_outputSamples ) <nl> + { <nl> + delay + = ( float ) buf - > pkt - > nb_samples / buf - > pkt - > config . sample_rate ; <nl> + } <nl> + <nl> + if ( m_pTempoFilter - > IsActive ( ) ) <nl> + { <nl> + int samples = m_pTempoFilter - > GetBufferedSamples ( ) ; <nl> + delay + = ( float ) samples / m_format . m_sampleRate ; <nl> + } <nl> + <nl> + return delay ; <nl> + } <nl> + <nl> + void CActiveAEBufferPoolAtempo : : SetTempo ( float tempo ) <nl> + { <nl> + if ( tempo > 2 . 0 ) <nl> + tempo = 2 . 0 ; <nl> + else if ( tempo < 0 . 5 ) <nl> + tempo = 0 . 5 ; <nl> + <nl> + if ( tempo ! = m_tempo ) <nl> + m_changeFilter = true ; <nl> + <nl> + m_tempo = tempo ; <nl> + } <nl> + <nl> + float CActiveAEBufferPoolAtempo : : GetTempo ( ) <nl> + { <nl> + return m_tempo ; <nl> + } <nl> + <nl> + void CActiveAEBufferPoolAtempo : : FillBuffer ( ) <nl> + { <nl> + m_fillPackets = true ; <nl> + } <nl> + <nl> + void CActiveAEBufferPoolAtempo : : SetDrain ( bool drain ) <nl> + { <nl> + m_drain = drain ; <nl> + if ( ! m_drain ) <nl> + m_changeFilter = true ; <nl> + } <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . h <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEBuffer . h <nl> <nl> # include " cores / AudioEngine / Interfaces / AE . h " <nl> # include " cores / AudioEngine / Engines / ActiveAE / AudioDSPAddons / ActiveAEDSP . h " <nl> # include < deque > <nl> + # include < memory > <nl> <nl> extern " C " { <nl> # include " libavutil / avutil . h " <nl> class CActiveAEBufferPoolResample : public CActiveAEBufferPool <nl> bool m_bypassDSP ; <nl> } ; <nl> <nl> + class CActiveAEFilter ; <nl> + <nl> + class CActiveAEBufferPoolAtempo : public CActiveAEBufferPool <nl> + { <nl> + public : <nl> + CActiveAEBufferPoolAtempo ( AEAudioFormat format ) ; <nl> + virtual ~ CActiveAEBufferPoolAtempo ( ) ; <nl> + bool Create ( unsigned int totaltime ) override ; <nl> + bool ProcessBuffers ( ) ; <nl> + float GetDelay ( ) ; <nl> + void Flush ( ) ; <nl> + void SetTempo ( float tempo ) ; <nl> + float GetTempo ( ) ; <nl> + void FillBuffer ( ) ; <nl> + void SetDrain ( bool drain ) ; <nl> + std : : deque < CSampleBuffer * > m_inputSamples ; <nl> + std : : deque < CSampleBuffer * > m_outputSamples ; <nl> + <nl> + protected : <nl> + void ChangeFilter ( ) ; <nl> + std : : unique_ptr < CActiveAEFilter > m_pTempoFilter ; <nl> + uint8_t * m_planes [ 16 ] ; <nl> + CSampleBuffer * m_procSample ; <nl> + bool m_empty ; <nl> + bool m_drain ; <nl> + bool m_changeFilter ; <nl> + float m_tempo ; <nl> + int64_t m_lastSamplePts ; <nl> + bool m_fillPackets ; <nl> + } ; <nl> + <nl> } <nl> new file mode 100644 <nl> index 000000000000 . . e721effbd5a8 <nl> mmm / dev / null <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEFilter . cpp <nl> <nl> + / * <nl> + * Copyright ( C ) 2010 - 2016 Team Kodi <nl> + * http : / / xbmc . org <nl> + * <nl> + * This Program is free software ; you can redistribute it and / or modify <nl> + * it under the terms of the GNU General Public License as published by <nl> + * the Free Software Foundation ; either version 2 , or ( at your option ) <nl> + * any later version . <nl> + * <nl> + * This Program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with XBMC ; see the file COPYING . If not , see <nl> + * < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * / <nl> + <nl> + # include " ActiveAEFilter . h " <nl> + # include " utils / log . h " <nl> + # include " utils / StringUtils . h " <nl> + # include < algorithm > <nl> + <nl> + extern " C " { <nl> + # include " libavfilter / avfilter . h " <nl> + # include " libavfilter / buffersink . h " <nl> + # include " libavfilter / buffersrc . h " <nl> + # include " libswresample / swresample . h " <nl> + } <nl> + <nl> + using namespace ActiveAE ; <nl> + <nl> + CActiveAEFilter : : CActiveAEFilter ( ) <nl> + { <nl> + m_pFilterGraph = nullptr ; <nl> + m_pFilterCtxIn = nullptr ; <nl> + m_pFilterCtxOut = nullptr ; <nl> + m_pOutFrame = nullptr ; <nl> + m_pConvertCtx = nullptr ; <nl> + m_pConvertFrame = nullptr ; <nl> + m_needConvert = false ; <nl> + } <nl> + <nl> + CActiveAEFilter : : ~ CActiveAEFilter ( ) <nl> + { <nl> + CloseFilter ( ) ; <nl> + } <nl> + <nl> + void CActiveAEFilter : : Init ( AVSampleFormat fmt , int sampleRate , uint64_t channelLayout ) <nl> + { <nl> + m_sampleFormat = fmt ; <nl> + m_sampleRate = sampleRate ; <nl> + m_channelLayout = channelLayout ; <nl> + m_tempo = 1 . 0 ; <nl> + m_bufferedSamples = 0 ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : SetTempo ( float tempo ) <nl> + { <nl> + m_tempo = tempo ; <nl> + if ( m_tempo = = 1 . 0 ) <nl> + { <nl> + CloseFilter ( ) ; <nl> + return true ; <nl> + } <nl> + <nl> + if ( ! CreateFilterGraph ( ) ) <nl> + return false ; <nl> + <nl> + if ( ! CreateAtempoFilter ( ) ) <nl> + { <nl> + CloseFilter ( ) ; <nl> + return false ; <nl> + } <nl> + <nl> + m_bufferedSamples = 0 ; <nl> + return true ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : CreateFilterGraph ( ) <nl> + { <nl> + CloseFilter ( ) ; <nl> + <nl> + m_pFilterGraph = avfilter_graph_alloc ( ) ; <nl> + if ( ! m_pFilterGraph ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateFilterGraph - unable to alloc filter graph " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + AVFilter * srcFilter = avfilter_get_by_name ( " abuffer " ) ; <nl> + AVFilter * outFilter = avfilter_get_by_name ( " abuffersink " ) ; <nl> + <nl> + std : : string args = StringUtils : : Format ( " time_base = 1 / % d : sample_rate = % d : sample_fmt = % s : channel_layout = 0x % " PRIx64 , <nl> + m_sampleRate , <nl> + m_sampleRate , <nl> + av_get_sample_fmt_name ( m_sampleFormat ) , <nl> + m_channelLayout ) ; <nl> + <nl> + int ret = avfilter_graph_create_filter ( & m_pFilterCtxIn , srcFilter , " in " , args . c_str ( ) , NULL , m_pFilterGraph ) ; <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateFilterGraph - avfilter_graph_create_filter : src " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + ret = avfilter_graph_create_filter ( & m_pFilterCtxOut , outFilter , " out " , NULL , NULL , m_pFilterGraph ) ; <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateFilterGraph - avfilter_graph_create_filter : out " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + m_pOutFrame = av_frame_alloc ( ) ; <nl> + <nl> + return true ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : CreateAtempoFilter ( ) <nl> + { <nl> + AVFilter * atempo ; <nl> + <nl> + atempo = avfilter_get_by_name ( " atempo " ) ; <nl> + m_pFilterCtxAtempo = avfilter_graph_alloc_filter ( m_pFilterGraph , atempo , " atempo " ) ; <nl> + std : : string args = StringUtils : : Format ( " tempo = % f " , m_tempo ) ; <nl> + int ret = avfilter_init_str ( m_pFilterCtxAtempo , args . c_str ( ) ) ; <nl> + <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateAtempoFilter - avfilter_init_str failed " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + ret = avfilter_link ( m_pFilterCtxIn , 0 , m_pFilterCtxAtempo , 0 ) ; <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateAtempoFilter - avfilter_link failed for in filter " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + ret = avfilter_link ( m_pFilterCtxAtempo , 0 , m_pFilterCtxOut , 0 ) ; <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateAtempoFilter - avfilter_link failed for out filter " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + ret = avfilter_graph_config ( m_pFilterGraph , NULL ) ; <nl> + if ( ret < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : CreateAtempoFilter - avfilter_graph_config failed " ) ; <nl> + return false ; <nl> + } <nl> + <nl> + m_needConvert = false ; <nl> + if ( m_pFilterCtxAtempo - > outputs [ 0 ] - > format ! = m_sampleFormat ) <nl> + { <nl> + m_needConvert = true ; <nl> + m_pConvertCtx = swr_alloc ( ) ; <nl> + m_pConvertFrame = av_frame_alloc ( ) ; <nl> + } <nl> + <nl> + m_hasData = false ; <nl> + m_needData = true ; <nl> + m_filterEof = false ; <nl> + <nl> + return true ; <nl> + } <nl> + <nl> + void CActiveAEFilter : : CloseFilter ( ) <nl> + { <nl> + if ( m_pFilterGraph ) <nl> + { <nl> + avfilter_graph_free ( & m_pFilterGraph ) ; <nl> + <nl> + m_pFilterCtxIn = nullptr ; <nl> + m_pFilterCtxOut = nullptr ; <nl> + } <nl> + <nl> + if ( m_pOutFrame ) <nl> + av_frame_free ( & m_pOutFrame ) ; <nl> + <nl> + if ( m_pConvertFrame ) <nl> + av_frame_free ( & m_pConvertFrame ) ; <nl> + <nl> + if ( m_pConvertCtx ) <nl> + swr_free ( & m_pConvertCtx ) ; <nl> + <nl> + m_bufferedSamples = 0 ; <nl> + } <nl> + <nl> + int CActiveAEFilter : : ProcessFilter ( uint8_t * * dst_buffer , int dst_samples , uint8_t * * src_buffer , int src_samples , int src_bufsize ) <nl> + { <nl> + int result ; <nl> + <nl> + if ( src_samples ) <nl> + { <nl> + m_bufferedSamples + = src_samples ; <nl> + <nl> + AVFrame * frame = av_frame_alloc ( ) ; <nl> + if ( ! frame ) <nl> + return - 1 ; <nl> + <nl> + int channels = av_get_channel_layout_nb_channels ( m_channelLayout ) ; <nl> + <nl> + av_frame_set_channel_layout ( frame , m_channelLayout ) ; <nl> + av_frame_set_channels ( frame , channels ) ; <nl> + av_frame_set_sample_rate ( frame , m_sampleRate ) ; <nl> + frame - > nb_samples = src_samples ; <nl> + frame - > format = m_sampleFormat ; <nl> + <nl> + result = avcodec_fill_audio_frame ( frame , channels , m_sampleFormat , <nl> + src_buffer [ 0 ] , src_bufsize , 16 ) ; <nl> + if ( result < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : ProcessFilter - avcodec_fill_audio_frame failed " ) ; <nl> + return - 1 ; <nl> + } <nl> + <nl> + result = av_buffersrc_write_frame ( m_pFilterCtxIn , frame ) ; <nl> + av_frame_free ( & frame ) ; <nl> + if ( result < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : ProcessFilter - av_buffersrc_add_frame failed " ) ; <nl> + return - 1 ; <nl> + } <nl> + } <nl> + else if ( ! m_filterEof & & m_needData ) <nl> + { <nl> + result = av_buffersrc_write_frame ( m_pFilterCtxIn , nullptr ) ; <nl> + if ( result < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : ProcessFilter - av_buffersrc_add_frame " ) ; <nl> + return - 1 ; <nl> + } <nl> + } <nl> + <nl> + if ( ! m_hasData ) <nl> + { <nl> + m_needData = false ; <nl> + AVFrame * outFrame = m_needConvert ? m_pConvertFrame : m_pOutFrame ; <nl> + <nl> + result = av_buffersink_get_frame ( m_pFilterCtxOut , outFrame ) ; <nl> + <nl> + if ( result = = AVERROR ( EAGAIN ) ) <nl> + { <nl> + m_needData = true ; <nl> + return 0 ; <nl> + } <nl> + else if ( result = = AVERROR_EOF ) <nl> + { <nl> + result = av_buffersink_get_frame ( m_pFilterCtxOut , outFrame ) ; <nl> + m_filterEof = true ; <nl> + if ( result < 0 ) <nl> + return 0 ; <nl> + } <nl> + else if ( result < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : ProcessFilter - av_buffersink_get_frame " ) ; <nl> + return - 1 ; <nl> + } <nl> + <nl> + if ( m_needConvert ) <nl> + { <nl> + av_frame_unref ( m_pOutFrame ) ; <nl> + m_pOutFrame - > format = m_sampleFormat ; <nl> + av_frame_set_channel_layout ( m_pOutFrame , m_channelLayout ) ; <nl> + av_frame_set_sample_rate ( m_pOutFrame , m_sampleRate ) ; <nl> + result = swr_convert_frame ( m_pConvertCtx , m_pOutFrame , m_pConvertFrame ) ; <nl> + if ( result < 0 ) <nl> + { <nl> + CLog : : Log ( LOGERROR , " CActiveAEFilter : : ProcessFilter - swr_convert_frame failed " ) ; <nl> + return - 1 ; <nl> + } <nl> + } <nl> + <nl> + m_hasData = true ; <nl> + m_sampleOffset = 0 ; <nl> + } <nl> + <nl> + if ( m_hasData ) <nl> + { <nl> + int channels = av_get_channel_layout_nb_channels ( m_channelLayout ) ; <nl> + int planes = av_sample_fmt_is_planar ( m_sampleFormat ) ? channels : 1 ; <nl> + int samples = std : : min ( dst_samples , m_pOutFrame - > nb_samples - m_sampleOffset ) ; <nl> + int bytes = samples * av_get_bytes_per_sample ( m_sampleFormat ) * channels / planes ; <nl> + int bytesOffset = m_sampleOffset * av_get_bytes_per_sample ( m_sampleFormat ) * channels / planes ; <nl> + for ( int i = 0 ; i < planes ; i + + ) <nl> + { <nl> + memcpy ( dst_buffer [ i ] , m_pOutFrame - > extended_data [ i ] + bytesOffset , bytes ) ; <nl> + } <nl> + m_sampleOffset + = samples ; <nl> + <nl> + if ( m_sampleOffset > = m_pOutFrame - > nb_samples ) <nl> + { <nl> + av_frame_unref ( m_pOutFrame ) ; <nl> + m_hasData = false ; <nl> + } <nl> + <nl> + m_bufferedSamples - = samples * m_tempo ; <nl> + if ( m_bufferedSamples < 0 ) <nl> + m_bufferedSamples = 0 ; <nl> + return samples ; <nl> + } <nl> + <nl> + return 0 ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : IsEof ( ) <nl> + { <nl> + return m_filterEof ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : NeedData ( ) <nl> + { <nl> + return m_needData ; <nl> + } <nl> + <nl> + bool CActiveAEFilter : : IsActive ( ) <nl> + { <nl> + if ( m_pFilterGraph ) <nl> + return true ; <nl> + else <nl> + return false ; <nl> + } <nl> + <nl> + int CActiveAEFilter : : GetBufferedSamples ( ) <nl> + { <nl> + return m_bufferedSamples ; <nl> + } <nl> new file mode 100644 <nl> index 000000000000 . . 5eb9a5231fbf <nl> mmm / dev / null <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEFilter . h <nl> <nl> + # pragma once <nl> + / * <nl> + * Copyright ( C ) 2010 - 2016 Team Kodi <nl> + * http : / / xbmc . org <nl> + * <nl> + * This Program is free software ; you can redistribute it and / or modify <nl> + * it under the terms of the GNU General Public License as published by <nl> + * the Free Software Foundation ; either version 2 , or ( at your option ) <nl> + * any later version . <nl> + * <nl> + * This Program is distributed in the hope that it will be useful , <nl> + * but WITHOUT ANY WARRANTY ; without even the implied warranty of <nl> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the <nl> + * GNU General Public License for more details . <nl> + * <nl> + * You should have received a copy of the GNU General Public License <nl> + * along with XBMC ; see the file COPYING . If not , see <nl> + * < http : / / www . gnu . org / licenses / > . <nl> + * <nl> + * / <nl> + <nl> + extern " C " { <nl> + # include " libavfilter / avfilter . h " <nl> + # include " libavutil / frame . h " <nl> + } <nl> + <nl> + struct SwrContext ; <nl> + <nl> + namespace ActiveAE <nl> + { <nl> + <nl> + class CActiveAEFilter <nl> + { <nl> + public : <nl> + CActiveAEFilter ( ) ; <nl> + virtual ~ CActiveAEFilter ( ) ; <nl> + void Init ( AVSampleFormat fmt , int sampleRate , uint64_t channelLayout ) ; <nl> + int ProcessFilter ( uint8_t * * dst_buffer , int dst_samples , uint8_t * * src_buffer , int src_samples , int src_bufsize ) ; <nl> + bool SetTempo ( float tempo ) ; <nl> + bool NeedData ( ) ; <nl> + bool IsEof ( ) ; <nl> + bool IsActive ( ) ; <nl> + int GetBufferedSamples ( ) ; <nl> + <nl> + protected : <nl> + bool CreateFilterGraph ( ) ; <nl> + bool CreateAtempoFilter ( ) ; <nl> + void CloseFilter ( ) ; <nl> + <nl> + AVSampleFormat m_sampleFormat ; <nl> + int m_sampleRate ; <nl> + uint64_t m_channelLayout ; <nl> + AVFilterGraph * m_pFilterGraph ; <nl> + AVFilterContext * m_pFilterCtxIn ; <nl> + AVFilterContext * m_pFilterCtxOut ; <nl> + AVFilterContext * m_pFilterCtxAtempo ; <nl> + AVFrame * m_pOutFrame ; <nl> + SwrContext * m_pConvertCtx ; <nl> + AVFrame * m_pConvertFrame ; <nl> + bool m_needConvert ; <nl> + float m_tempo ; <nl> + bool m_filterEof ; <nl> + bool m_hasData ; <nl> + bool m_needData ; <nl> + int m_sampleOffset ; <nl> + int m_bufferedSamples ; <nl> + } ; <nl> + <nl> + } <nl> \ No newline at end of file <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEStream . cpp <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEStream . cpp <nl> CActiveAEStreamBuffers : : CActiveAEStreamBuffers ( AEAudioFormat inputFormat , AEAudi <nl> { <nl> m_inputFormat = inputFormat ; <nl> m_resampleBuffers = new CActiveAEBufferPoolResample ( inputFormat , outputFormat , quality ) ; <nl> + m_atempoBuffers = new CActiveAEBufferPoolAtempo ( outputFormat ) ; <nl> } <nl> <nl> CActiveAEStreamBuffers : : ~ CActiveAEStreamBuffers ( ) <nl> { <nl> delete m_resampleBuffers ; <nl> + delete m_atempoBuffers ; <nl> } <nl> <nl> bool CActiveAEStreamBuffers : : HasInputLevel ( int level ) <nl> { <nl> - if ( m_inputSamples . size ( ) > m_resampleBuffers - > m_allSamples . size ( ) * 100 / level ) <nl> + if ( ( m_inputSamples . size ( ) + m_resampleBuffers - > m_inputSamples . size ( ) ) > <nl> + ( m_resampleBuffers - > m_allSamples . size ( ) * level / 100 ) ) <nl> return true ; <nl> else <nl> return false ; <nl> bool CActiveAEStreamBuffers : : HasInputLevel ( int level ) <nl> <nl> bool CActiveAEStreamBuffers : : Create ( unsigned int totaltime , bool remap , bool upmix , bool normalize , bool useDSP ) <nl> { <nl> - return m_resampleBuffers - > Create ( totaltime , remap , upmix , normalize , useDSP ) ; <nl> + if ( ! m_resampleBuffers - > Create ( totaltime , remap , upmix , normalize , useDSP ) ) <nl> + return false ; <nl> + <nl> + if ( ! m_atempoBuffers - > Create ( totaltime ) ) <nl> + return false ; <nl> + <nl> + return true ; <nl> } <nl> <nl> void CActiveAEStreamBuffers : : SetExtraData ( int profile , enum AVMatrixEncoding matrix_encoding , enum AVAudioServiceType audio_service_type ) <nl> bool CActiveAEStreamBuffers : : ProcessBuffers ( ) <nl> { <nl> buf = m_resampleBuffers - > m_outputSamples . front ( ) ; <nl> m_resampleBuffers - > m_outputSamples . pop_front ( ) ; <nl> + m_atempoBuffers - > m_inputSamples . push_back ( buf ) ; <nl> + busy = true ; <nl> + } <nl> + <nl> + busy | = m_atempoBuffers - > ProcessBuffers ( ) ; <nl> + <nl> + while ( ! m_atempoBuffers - > m_outputSamples . empty ( ) ) <nl> + { <nl> + buf = m_atempoBuffers - > m_outputSamples . front ( ) ; <nl> + m_atempoBuffers - > m_outputSamples . pop_front ( ) ; <nl> m_outputSamples . push_back ( buf ) ; <nl> busy = true ; <nl> } <nl> void CActiveAEStreamBuffers : : ConfigureResampler ( bool normalizelevels , bool dspen <nl> <nl> float CActiveAEStreamBuffers : : GetDelay ( ) <nl> { <nl> - return m_resampleBuffers - > GetDelay ( ) ; <nl> + float delay = 0 ; <nl> + <nl> + for ( auto & buf : m_inputSamples ) <nl> + { <nl> + delay + = ( float ) buf - > pkt - > nb_samples / buf - > pkt - > config . sample_rate ; <nl> + } <nl> + <nl> + delay + = m_resampleBuffers - > GetDelay ( ) ; <nl> + delay + = m_atempoBuffers - > GetDelay ( ) ; <nl> + <nl> + for ( auto & buf : m_outputSamples ) <nl> + { <nl> + delay + = ( float ) buf - > pkt - > nb_samples / buf - > pkt - > config . sample_rate ; <nl> + } <nl> + <nl> + return delay ; <nl> } <nl> <nl> void CActiveAEStreamBuffers : : Flush ( ) <nl> { <nl> m_resampleBuffers - > Flush ( ) ; <nl> + m_atempoBuffers - > Flush ( ) ; <nl> + <nl> while ( ! m_inputSamples . empty ( ) ) <nl> { <nl> m_inputSamples . front ( ) - > Return ( ) ; <nl> void CActiveAEStreamBuffers : : Flush ( ) <nl> void CActiveAEStreamBuffers : : SetDrain ( bool drain ) <nl> { <nl> m_resampleBuffers - > SetDrain ( drain ) ; <nl> + m_atempoBuffers - > SetDrain ( drain ) ; <nl> } <nl> <nl> bool CActiveAEStreamBuffers : : IsDrained ( ) <nl> { <nl> if ( m_resampleBuffers - > m_inputSamples . empty ( ) & & <nl> m_resampleBuffers - > m_outputSamples . empty ( ) & & <nl> + m_atempoBuffers - > m_inputSamples . empty ( ) & & <nl> + m_atempoBuffers - > m_outputSamples . empty ( ) & & <nl> m_inputSamples . empty ( ) & & <nl> m_outputSamples . empty ( ) ) <nl> return true ; <nl> bool CActiveAEStreamBuffers : : IsDrained ( ) <nl> <nl> void CActiveAEStreamBuffers : : SetRR ( double rr ) <nl> { <nl> - m_resampleBuffers - > SetRR ( rr ) ; <nl> + if ( rr < 1 . 02 & & rr > 0 . 98 ) <nl> + { <nl> + m_resampleBuffers - > SetRR ( rr ) ; <nl> + m_atempoBuffers - > SetTempo ( 1 . 0 ) ; <nl> + } <nl> + else <nl> + { <nl> + m_resampleBuffers - > SetRR ( 1 . 0 ) ; <nl> + m_atempoBuffers - > SetTempo ( 1 . 0 / rr ) ; <nl> + } <nl> } <nl> <nl> double CActiveAEStreamBuffers : : GetRR ( ) <nl> { <nl> - return m_resampleBuffers - > GetRR ( ) ; <nl> + double tempo = m_resampleBuffers - > GetRR ( ) ; <nl> + tempo / = m_atempoBuffers - > GetTempo ( ) ; <nl> + return tempo ; <nl> } <nl> <nl> void CActiveAEStreamBuffers : : FillBuffer ( ) <nl> { <nl> m_resampleBuffers - > FillBuffer ( ) ; <nl> + m_atempoBuffers - > FillBuffer ( ) ; <nl> } <nl> <nl> bool CActiveAEStreamBuffers : : DoesNormalize ( ) <nl> CActiveAEBufferPool * CActiveAEStreamBuffers : : GetResampleBuffers ( ) <nl> return ret ; <nl> } <nl> <nl> + CActiveAEBufferPool * CActiveAEStreamBuffers : : GetAtempoBuffers ( ) <nl> + { <nl> + CActiveAEBufferPool * ret = m_atempoBuffers ; <nl> + m_atempoBuffers = nullptr ; <nl> + return ret ; <nl> + } <nl> + <nl> bool CActiveAEStreamBuffers : : HasWork ( ) <nl> { <nl> if ( ! m_inputSamples . empty ( ) ) <nl> bool CActiveAEStreamBuffers : : HasWork ( ) <nl> return true ; <nl> if ( ! m_resampleBuffers - > m_outputSamples . empty ( ) ) <nl> return true ; <nl> + if ( ! m_atempoBuffers - > m_inputSamples . empty ( ) ) <nl> + return true ; <nl> + if ( ! m_atempoBuffers - > m_outputSamples . empty ( ) ) <nl> + return true ; <nl> <nl> return false ; <nl> } <nl> mmm a / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEStream . h <nl> ppp b / xbmc / cores / AudioEngine / Engines / ActiveAE / ActiveAEStream . h <nl> class CActiveAEStreamBuffers <nl> void SetDSPConfig ( bool usedsp , bool bypassdsp ) ; <nl> bool HasWork ( ) ; <nl> CActiveAEBufferPool * GetResampleBuffers ( ) ; <nl> + CActiveAEBufferPool * GetAtempoBuffers ( ) ; <nl> + <nl> AEAudioFormat m_inputFormat ; <nl> std : : deque < CSampleBuffer * > m_outputSamples ; <nl> std : : deque < CSampleBuffer * > m_inputSamples ; <nl> <nl> protected : <nl> CActiveAEBufferPoolResample * m_resampleBuffers ; <nl> + CActiveAEBufferPoolAtempo * m_atempoBuffers ; <nl> } ; <nl> <nl> class CActiveAEStream : public IAEStream <nl> mmm a / xbmc / cores / AudioEngine / Makefile . in <nl> ppp b / xbmc / cores / AudioEngine / Makefile . in <nl> SRCS + = Engines / ActiveAE / ActiveAESound . cpp <nl> SRCS + = Engines / ActiveAE / ActiveAEResampleFFMPEG . cpp <nl> SRCS + = Engines / ActiveAE / ActiveAEResamplePi . cpp <nl> SRCS + = Engines / ActiveAE / ActiveAEBuffer . cpp <nl> + SRCS + = Engines / ActiveAE / ActiveAEFilter . cpp <nl> <nl> ifeq ( @ USE_ANDROID @ , 1 ) <nl> SRCS + = Sinks / AESinkAUDIOTRACK . cpp <nl> | AE : add ffmpeg atempo filter | xbmc/xbmc | 3acccef70908fbcc947307789e91987cfa981395 | 2016-07-30T11:41:36Z |
mmm a / src / compiler / access - info . cc <nl> ppp b / src / compiler / access - info . cc <nl> PropertyAccessInfo PropertyAccessInfo : : DataConstant ( <nl> Zone * zone , Handle < Map > receiver_map , <nl> ZoneVector < CompilationDependencies : : Dependency const * > & & dependencies , <nl> FieldIndex field_index , Representation field_representation , <nl> - Type field_type , MaybeHandle < Map > field_map , MaybeHandle < JSObject > holder , <nl> - MaybeHandle < Map > transition_map ) { <nl> - return PropertyAccessInfo ( kDataConstant , holder , transition_map , field_index , <nl> - field_representation , field_type , field_map , <nl> - { { receiver_map } , zone } , std : : move ( dependencies ) ) ; <nl> + Type field_type , MaybeHandle < Map > field_map , MaybeHandle < JSObject > holder ) { <nl> + return PropertyAccessInfo ( kDataConstant , holder , MaybeHandle < Map > ( ) , <nl> + field_index , field_representation , field_type , <nl> + field_map , { { receiver_map } , zone } , <nl> + std : : move ( dependencies ) ) ; <nl> } <nl> <nl> / / static <nl> PropertyAccessInfo AccessInfoFactory : : LookupTransition ( <nl> unrecorded_dependencies . push_back ( <nl> dependencies ( ) - > TransitionDependencyOffTheRecord ( <nl> MapRef ( broker ( ) , transition_map ) ) ) ; <nl> - / / Transitioning stores * may * store to const fields . The resulting <nl> - / / DataConstant access infos can be distinguished from later , i . e . redundant , <nl> - / / stores to the same constant field by the presence of a transition map . <nl> - switch ( details . constness ( ) ) { <nl> - case PropertyConstness : : kMutable : <nl> - return PropertyAccessInfo : : DataField ( <nl> - zone ( ) , map , std : : move ( unrecorded_dependencies ) , field_index , <nl> - details_representation , field_type , field_map , holder , <nl> - transition_map ) ; <nl> - case PropertyConstness : : kConst : <nl> - return PropertyAccessInfo : : DataConstant ( <nl> - zone ( ) , map , std : : move ( unrecorded_dependencies ) , field_index , <nl> - details_representation , field_type , field_map , holder , <nl> - transition_map ) ; <nl> - } <nl> - UNREACHABLE ( ) ; <nl> + / / Transitioning stores are never stores to constant fields . <nl> + return PropertyAccessInfo : : DataField ( <nl> + zone ( ) , map , std : : move ( unrecorded_dependencies ) , field_index , <nl> + details_representation , field_type , field_map , holder , transition_map ) ; <nl> } <nl> <nl> } / / namespace compiler <nl> mmm a / src / compiler / access - info . h <nl> ppp b / src / compiler / access - info . h <nl> class PropertyAccessInfo final { <nl> ZoneVector < CompilationDependencies : : Dependency const * > & & <nl> unrecorded_dependencies , <nl> FieldIndex field_index , Representation field_representation , <nl> - Type field_type , MaybeHandle < Map > field_map , MaybeHandle < JSObject > holder , <nl> - MaybeHandle < Map > transition_map = MaybeHandle < Map > ( ) ) ; <nl> + Type field_type , MaybeHandle < Map > field_map , <nl> + MaybeHandle < JSObject > holder ) ; <nl> static PropertyAccessInfo AccessorConstant ( Zone * zone , <nl> Handle < Map > receiver_map , <nl> Handle < Object > constant , <nl> mmm a / src / compiler / js - create - lowering . cc <nl> ppp b / src / compiler / js - create - lowering . cc <nl> Node * JSCreateLowering : : AllocateFastLiteral ( Node * effect , Node * control , <nl> DCHECK_EQ ( kData , property_details . kind ( ) ) ; <nl> NameRef property_name = boilerplate_map . GetPropertyKey ( i ) ; <nl> FieldIndex index = boilerplate_map . GetFieldIndexFor ( i ) ; <nl> - FieldAccess access = { kTaggedBase , <nl> - index . offset ( ) , <nl> - property_name . object ( ) , <nl> - MaybeHandle < Map > ( ) , <nl> - Type : : Any ( ) , <nl> - MachineType : : TypeCompressedTagged ( ) , <nl> - kFullWriteBarrier , <nl> - LoadSensitivity : : kUnsafe , <nl> - property_details . constness ( ) } ; <nl> + FieldAccess access = { <nl> + kTaggedBase , index . offset ( ) , property_name . object ( ) , <nl> + MaybeHandle < Map > ( ) , Type : : Any ( ) , MachineType : : TypeCompressedTagged ( ) , <nl> + kFullWriteBarrier } ; <nl> Node * value ; <nl> if ( boilerplate_map . IsUnboxedDoubleField ( i ) ) { <nl> access . machine_type = MachineType : : Float64 ( ) ; <nl> access . type = Type : : Number ( ) ; <nl> - uint64_t value_bits = boilerplate . RawFastDoublePropertyAsBitsAt ( index ) ; <nl> - if ( value_bits = = kHoleNanInt64 ) { <nl> - / / This special case is analogous to is_uninitialized being true in the <nl> - / / non - unboxed - double case below . The store of the hole NaN value here <nl> - / / will always be followed by another store that actually initializes <nl> - / / the field . The hole NaN should therefore be unobservable . <nl> - / / Load elimination expects there to be at most one const store to any <nl> - / / given field , so we always mark the unobservable ones as mutable . <nl> - access . constness = PropertyConstness : : kMutable ; <nl> - } <nl> - value = jsgraph ( ) - > Constant ( bit_cast < double > ( value_bits ) ) ; <nl> + value = jsgraph ( ) - > Constant ( boilerplate . RawFastDoublePropertyAt ( index ) ) ; <nl> } else { <nl> ObjectRef boilerplate_value = boilerplate . RawFastPropertyAt ( index ) ; <nl> - bool is_uninitialized = <nl> - boilerplate_value . IsHeapObject ( ) & & <nl> - boilerplate_value . AsHeapObject ( ) . map ( ) . oddball_type ( ) = = <nl> - OddballType : : kUninitialized ; <nl> - if ( is_uninitialized ) { <nl> - access . constness = PropertyConstness : : kMutable ; <nl> - } <nl> if ( boilerplate_value . IsJSObject ( ) ) { <nl> JSObjectRef boilerplate_object = boilerplate_value . AsJSObject ( ) ; <nl> value = effect = AllocateFastLiteral ( effect , control , <nl> Node * JSCreateLowering : : AllocateFastLiteral ( Node * effect , Node * control , <nl> value = effect = builder . Finish ( ) ; <nl> } else if ( property_details . representation ( ) . IsSmi ( ) ) { <nl> / / Ensure that value is stored as smi . <nl> + bool is_uninitialized = <nl> + boilerplate_value . IsHeapObject ( ) & & <nl> + boilerplate_value . AsHeapObject ( ) . map ( ) . oddball_type ( ) = = <nl> + OddballType : : kUninitialized ; <nl> value = is_uninitialized <nl> ? jsgraph ( ) - > ZeroConstant ( ) <nl> : jsgraph ( ) - > Constant ( boilerplate_value . AsSmi ( ) ) ; <nl> mmm a / src / compiler / js - heap - broker . cc <nl> ppp b / src / compiler / js - heap - broker . cc <nl> void CallHandlerInfoData : : Serialize ( JSHeapBroker * broker ) { <nl> class JSObjectField { <nl> public : <nl> bool IsDouble ( ) const { return object_ = = nullptr ; } <nl> - uint64_t AsBitsOfDouble ( ) const { <nl> - CHECK ( IsDouble ( ) ) ; <nl> - return number_bits_ ; <nl> - } <nl> double AsDouble ( ) const { <nl> CHECK ( IsDouble ( ) ) ; <nl> - return bit_cast < double > ( number_bits_ ) ; <nl> + return number_ ; <nl> } <nl> <nl> bool IsObject ( ) const { return object_ ! = nullptr ; } <nl> class JSObjectField { <nl> return object_ ; <nl> } <nl> <nl> - explicit JSObjectField ( uint64_t value_bits ) : number_bits_ ( value_bits ) { } <nl> + explicit JSObjectField ( double value ) : number_ ( value ) { } <nl> explicit JSObjectField ( ObjectData * value ) : object_ ( value ) { } <nl> <nl> private : <nl> ObjectData * object_ = nullptr ; <nl> - uint64_t number_bits_ = 0 ; <nl> + double number_ = 0 ; <nl> } ; <nl> <nl> class JSObjectData : public HeapObjectData { <nl> void JSObjectData : : SerializeRecursive ( JSHeapBroker * broker , int depth ) { <nl> DCHECK_EQ ( field_index . property_index ( ) , <nl> static_cast < int > ( inobject_fields_ . size ( ) ) ) ; <nl> if ( boilerplate - > IsUnboxedDoubleField ( field_index ) ) { <nl> - uint64_t value_bits = <nl> - boilerplate - > RawFastDoublePropertyAsBitsAt ( field_index ) ; <nl> - inobject_fields_ . push_back ( JSObjectField { value_bits } ) ; <nl> + double value = boilerplate - > RawFastDoublePropertyAt ( field_index ) ; <nl> + inobject_fields_ . push_back ( JSObjectField { value } ) ; <nl> } else { <nl> Handle < Object > value ( boilerplate - > RawFastPropertyAt ( field_index ) , <nl> isolate ) ; <nl> double JSObjectRef : : RawFastDoublePropertyAt ( FieldIndex index ) const { <nl> return object_data - > GetInobjectField ( index . property_index ( ) ) . AsDouble ( ) ; <nl> } <nl> <nl> - uint64_t JSObjectRef : : RawFastDoublePropertyAsBitsAt ( FieldIndex index ) const { <nl> - if ( broker ( ) - > mode ( ) = = JSHeapBroker : : kDisabled ) { <nl> - AllowHandleDereference handle_dereference ; <nl> - return object ( ) - > RawFastDoublePropertyAsBitsAt ( index ) ; <nl> - } <nl> - JSObjectData * object_data = data ( ) - > AsJSObject ( ) ; <nl> - CHECK ( index . is_inobject ( ) ) ; <nl> - return object_data - > GetInobjectField ( index . property_index ( ) ) . AsBitsOfDouble ( ) ; <nl> - } <nl> - <nl> ObjectRef JSObjectRef : : RawFastPropertyAt ( FieldIndex index ) const { <nl> if ( broker ( ) - > mode ( ) = = JSHeapBroker : : kDisabled ) { <nl> AllowHandleAllocation handle_allocation ; <nl> mmm a / src / compiler / js - heap - broker . h <nl> ppp b / src / compiler / js - heap - broker . h <nl> class JSObjectRef : public HeapObjectRef { <nl> using HeapObjectRef : : HeapObjectRef ; <nl> Handle < JSObject > object ( ) const ; <nl> <nl> - uint64_t RawFastDoublePropertyAsBitsAt ( FieldIndex index ) const ; <nl> double RawFastDoublePropertyAt ( FieldIndex index ) const ; <nl> ObjectRef RawFastPropertyAt ( FieldIndex index ) const ; <nl> <nl> mmm a / src / compiler / js - native - context - specialization . cc <nl> ppp b / src / compiler / js - native - context - specialization . cc <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> & control , if_exceptions , access_info ) ; <nl> } else { <nl> DCHECK ( access_info . IsDataField ( ) | | access_info . IsDataConstant ( ) ) ; <nl> - DCHECK ( access_mode = = AccessMode : : kStore | | <nl> - access_mode = = AccessMode : : kStoreInLiteral ) ; <nl> FieldIndex const field_index = access_info . field_index ( ) ; <nl> Type const field_type = access_info . field_type ( ) ; <nl> MachineRepresentation const field_representation = <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> simplified ( ) - > LoadField ( AccessBuilder : : ForJSObjectPropertiesOrHash ( ) ) , <nl> storage , effect , control ) ; <nl> } <nl> - PropertyConstness constness = access_info . IsDataConstant ( ) <nl> - ? PropertyConstness : : kConst <nl> - : PropertyConstness : : kMutable ; <nl> - bool store_to_existing_constant_field = access_info . IsDataConstant ( ) & & <nl> - access_mode = = AccessMode : : kStore & & <nl> - ! access_info . HasTransitionMap ( ) ; <nl> FieldAccess field_access = { <nl> kTaggedBase , <nl> field_index . offset ( ) , <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> MaybeHandle < Map > ( ) , <nl> field_type , <nl> MachineType : : TypeForRepresentation ( field_representation ) , <nl> - kFullWriteBarrier , <nl> - LoadSensitivity : : kUnsafe , <nl> - constness } ; <nl> + kFullWriteBarrier } ; <nl> + bool store_to_constant_field = <nl> + ( access_mode = = AccessMode : : kStore ) & & access_info . IsDataConstant ( ) ; <nl> <nl> + DCHECK ( access_mode = = AccessMode : : kStore | | <nl> + access_mode = = AccessMode : : kStoreInLiteral ) ; <nl> switch ( field_representation ) { <nl> case MachineRepresentation : : kFloat64 : { <nl> value = effect = <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> Type : : OtherInternal ( ) ) ; <nl> a . Store ( AccessBuilder : : ForMap ( ) , <nl> factory ( ) - > mutable_heap_number_map ( ) ) ; <nl> - FieldAccess value_field_access = <nl> - AccessBuilder : : ForHeapNumberValue ( ) ; <nl> - value_field_access . constness = field_access . constness ; <nl> - a . Store ( value_field_access , value ) ; <nl> + a . Store ( AccessBuilder : : ForHeapNumberValue ( ) , value ) ; <nl> value = effect = a . Finish ( ) ; <nl> <nl> field_access . type = Type : : Any ( ) ; <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> MaybeHandle < Map > ( ) , <nl> Type : : OtherInternal ( ) , <nl> MachineType : : TypeCompressedTaggedPointer ( ) , <nl> - kPointerWriteBarrier , <nl> - LoadSensitivity : : kUnsafe , <nl> - constness } ; <nl> + kPointerWriteBarrier } ; <nl> storage = effect = <nl> graph ( ) - > NewNode ( simplified ( ) - > LoadField ( storage_access ) , <nl> storage , effect , control ) ; <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> field_access . machine_type = MachineType : : Float64 ( ) ; <nl> } <nl> } <nl> - if ( store_to_existing_constant_field ) { <nl> + if ( store_to_constant_field ) { <nl> DCHECK ( ! access_info . HasTransitionMap ( ) ) ; <nl> / / If the field is constant check that the value we are going <nl> / / to store matches current value . <nl> JSNativeContextSpecialization : : BuildPropertyStore ( <nl> case MachineRepresentation : : kCompressedSigned : <nl> case MachineRepresentation : : kCompressedPointer : <nl> case MachineRepresentation : : kCompressed : <nl> - if ( store_to_existing_constant_field ) { <nl> + if ( store_to_constant_field ) { <nl> DCHECK ( ! access_info . HasTransitionMap ( ) ) ; <nl> / / If the field is constant check that the value we are going <nl> / / to store matches current value . <nl> mmm a / src / compiler / load - elimination . cc <nl> ppp b / src / compiler / load - elimination . cc <nl> Node * LoadElimination : : AbstractState : : LookupField ( <nl> if ( AbstractField const * this_field = fields [ index ] ) { <nl> return this_field - > Lookup ( object ) ; <nl> } <nl> + if ( constness = = PropertyConstness : : kConst ) { <nl> + return LookupField ( object , index , PropertyConstness : : kMutable ) ; <nl> + } <nl> return nullptr ; <nl> } <nl> <nl> Reduction LoadElimination : : ReduceLoadField ( Node * node , <nl> } else { <nl> int field_index = FieldIndexOf ( access ) ; <nl> if ( field_index > = 0 ) { <nl> - PropertyConstness constness = access . constness ; <nl> - Node * replacement = state - > LookupField ( object , field_index , constness ) ; <nl> - if ( ! replacement & & constness = = PropertyConstness : : kConst ) { <nl> - replacement = state - > LookupField ( object , field_index , <nl> - PropertyConstness : : kMutable ) ; <nl> - } <nl> - if ( replacement ) { <nl> + if ( Node * replacement = <nl> + state - > LookupField ( object , field_index , access . constness ) ) { <nl> / / Make sure we don ' t resurrect dead { replacement } nodes . <nl> if ( ! replacement - > IsDead ( ) ) { <nl> / / Introduce a TypeGuard if the type of the { replacement } node is not <nl> Reduction LoadElimination : : ReduceLoadField ( Node * node , <nl> return Replace ( replacement ) ; <nl> } <nl> } <nl> - state = state - > AddField ( object , field_index , node , access . name , constness , <nl> - zone ( ) ) ; <nl> + state = state - > AddField ( object , field_index , node , access . name , <nl> + access . constness , zone ( ) ) ; <nl> } <nl> } <nl> Handle < Map > field_map ; <nl> Reduction LoadElimination : : ReduceStoreField ( Node * node , <nl> } else { <nl> int field_index = FieldIndexOf ( access ) ; <nl> if ( field_index > = 0 ) { <nl> - PropertyConstness constness = access . constness ; <nl> Node * const old_value = <nl> - state - > LookupField ( object , field_index , constness ) ; <nl> - <nl> - if ( constness = = PropertyConstness : : kConst & & old_value ) { <nl> - / / At runtime , we should never see two consecutive const stores , i . e . , <nl> - / / DCHECK_NULL ( old_value ) <nl> - / / ought to hold , but we might see such ( unreachable ) code statically . <nl> - Node * control = NodeProperties : : GetControlInput ( node ) ; <nl> - Node * unreachable = <nl> - graph ( ) - > NewNode ( common ( ) - > Unreachable ( ) , effect , control ) ; <nl> - return Replace ( unreachable ) ; <nl> - } <nl> - <nl> + state - > LookupField ( object , field_index , access . constness ) ; <nl> if ( old_value = = new_value ) { <nl> / / This store is fully redundant . <nl> return Replace ( effect ) ; <nl> } <nl> - <nl> / / Kill all potentially aliasing fields and record the new value . <nl> state = state - > KillField ( object , field_index , access . name , zone ( ) ) ; <nl> state = state - > AddField ( object , field_index , new_value , access . name , <nl> - PropertyConstness : : kMutable , zone ( ) ) ; <nl> - if ( constness = = PropertyConstness : : kConst ) { <nl> - / / For const stores , we track information in both the const and the <nl> - / / mutable world to guard against field accesses that should have <nl> - / / been marked const , but were not . <nl> - state = state - > AddField ( object , field_index , new_value , access . name , <nl> - constness , zone ( ) ) ; <nl> - } <nl> + access . constness , zone ( ) ) ; <nl> } else { <nl> / / Unsupported StoreField operator . <nl> state = state - > KillFields ( object , access . name , zone ( ) ) ; <nl> LoadElimination : : AbstractState const * LoadElimination : : ComputeLoopState ( <nl> MaybeHandle < Name > ( ) , zone ( ) ) ; <nl> break ; <nl> } <nl> - case IrOpcode : : kStoreField : { <nl> - FieldAccess access = FieldAccessOf ( current - > op ( ) ) ; <nl> - if ( access . constness = = PropertyConstness : : kMutable ) { <nl> - state = ComputeLoopStateForStoreField ( current , state , access ) ; <nl> - } <nl> + case IrOpcode : : kStoreField : <nl> + state = ComputeLoopStateForStoreField ( current , state , <nl> + FieldAccessOf ( current - > op ( ) ) ) ; <nl> break ; <nl> - } <nl> case IrOpcode : : kStoreElement : { <nl> Node * const object = NodeProperties : : GetValueInput ( current , 0 ) ; <nl> Node * const index = NodeProperties : : GetValueInput ( current , 1 ) ; <nl> mmm a / src / compiler / simplified - operator . cc <nl> ppp b / src / compiler / simplified - operator . cc <nl> std : : ostream & operator < < ( std : : ostream & os , FieldAccess const & access ) { <nl> } <nl> # endif <nl> os < < access . type < < " , " < < access . machine_type < < " , " <nl> - < < access . write_barrier_kind < < " , " < < access . constness ; <nl> + < < access . write_barrier_kind ; <nl> if ( FLAG_untrusted_code_mitigations ) { <nl> os < < " , " < < access . load_sensitivity ; <nl> } <nl> mmm a / src / objects / property - details . h <nl> ppp b / src / objects / property - details . h <nl> inline PropertyConstness GeneralizeConstness ( PropertyConstness a , <nl> <nl> V8_EXPORT_PRIVATE std : : ostream & operator < < ( <nl> std : : ostream & os , const PropertyAttributes & attributes ) ; <nl> - V8_EXPORT_PRIVATE std : : ostream & operator < < ( std : : ostream & os , <nl> - PropertyConstness constness ) ; <nl> } / / namespace internal <nl> } / / namespace v8 <nl> <nl> mmm a / src / objects / property . cc <nl> ppp b / src / objects / property . cc <nl> std : : ostream & operator < < ( std : : ostream & os , <nl> return os ; <nl> } <nl> <nl> - std : : ostream & operator < < ( std : : ostream & os , PropertyConstness constness ) { <nl> - switch ( constness ) { <nl> - case PropertyConstness : : kMutable : <nl> - return os < < " mutable " ; <nl> - case PropertyConstness : : kConst : <nl> - return os < < " const " ; <nl> - } <nl> - UNREACHABLE ( ) ; <nl> - } <nl> - <nl> Descriptor : : Descriptor ( ) : details_ ( Smi : : zero ( ) ) { } <nl> <nl> Descriptor : : Descriptor ( Handle < Name > key , const MaybeObjectHandle & value , <nl> mmm a / test / mjsunit / compiler / load - elimination - const - field . js <nl> ppp b / test / mjsunit / compiler / load - elimination - const - field . js <nl> <nl> ( function ( ) { <nl> function maybe_sideeffect ( b ) { return 42 ; } <nl> <nl> - % NeverOptimizeFunction ( maybe_sideeffect ) ; <nl> - <nl> - class B { <nl> - constructor ( x ) { <nl> - this . value = x ; <nl> - } <nl> - } <nl> - % EnsureFeedbackVectorForFunction ( B ) ; <nl> - <nl> - <nl> - function lit_const_smi ( ) { <nl> - let b = { value : 123 } ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , 123 ) ) ; <nl> - } <nl> - <nl> - lit_const_smi ( ) ; lit_const_smi ( ) ; <nl> - % OptimizeFunctionOnNextCall ( lit_const_smi ) ; lit_const_smi ( ) ; <nl> - <nl> - <nl> - function lit_const_object ( ) { <nl> - let o = { x : 123 } ; <nl> - let b = { value : o } ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , o ) ) ; <nl> - } <nl> - <nl> - lit_const_object ( ) ; lit_const_object ( ) ; <nl> - % OptimizeFunctionOnNextCall ( lit_const_object ) ; lit_const_object ( ) ; <nl> - <nl> - <nl> - function lit_computed_smi ( k ) { <nl> - let kk = 2 * k ; <nl> - let b = { value : kk } ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , kk ) ) ; <nl> - } <nl> - <nl> - lit_computed_smi ( 1 ) ; lit_computed_smi ( 2 ) ; <nl> - % OptimizeFunctionOnNextCall ( lit_computed_smi ) ; lit_computed_smi ( 3 ) ; <nl> - <nl> - / / TODO ( bmeurer ) : Fix const tracking for double fields in object literals <nl> - / / lit_computed_smi ( 1 . 1 ) ; lit_computed_smi ( 2 . 2 ) ; <nl> - / / % OptimizeFunctionOnNextCall ( lit_computed_smi ) ; lit_computed_smi ( 3 . 3 ) ; <nl> - <nl> - <nl> - function lit_param_object ( k ) { <nl> + function f ( k ) { <nl> let b = { value : k } ; <nl> maybe_sideeffect ( b ) ; <nl> let v1 = b . value ; <nl> maybe_sideeffect ( b ) ; <nl> let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , k ) ) ; <nl> + % TurbofanStaticAssert ( v1 = = v2 ) ; <nl> + / / TODO ( gsps ) : Improve analysis to also propagate stored value <nl> + / / Eventually , this should also work : <nl> + / / % TurbofanStaticAssert ( v2 = = k ) ; <nl> } <nl> <nl> - lit_param_object ( { x : 1 } ) ; lit_param_object ( { x : 2 } ) ; <nl> - % OptimizeFunctionOnNextCall ( lit_param_object ) ; lit_param_object ( { x : 3 } ) ; <nl> - <nl> - <nl> - function nested_lit_param ( k ) { <nl> - let b = { x : { value : k } } ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . x . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . x . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , k ) ) ; <nl> - } <nl> - <nl> - nested_lit_param ( 1 ) ; nested_lit_param ( 2 ) ; <nl> - % OptimizeFunctionOnNextCall ( nested_lit_param ) ; nested_lit_param ( 3 ) ; <nl> - <nl> - / / TODO ( bmeurer ) : Fix const tracking for double fields in object literals <nl> - / / nested_lit_param ( 1 . 1 ) ; nested_lit_param ( 2 . 2 ) ; <nl> - / / % OptimizeFunctionOnNextCall ( nested_lit_param ) ; nested_lit_param ( 3 . 3 ) ; <nl> - <nl> - <nl> - function nested_lit_param_object ( k ) { <nl> - let b = { x : { value : k } } ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . x . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . x . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , k ) ) ; <nl> - } <nl> - <nl> - nested_lit_param_object ( { x : 1 } ) ; nested_lit_param_object ( { x : 2 } ) ; <nl> - % OptimizeFunctionOnNextCall ( nested_lit_param_object ) ; <nl> - nested_lit_param_object ( { x : 3 } ) ; <nl> - <nl> - <nl> - % EnsureFeedbackVectorForFunction ( inst_param ) ; <nl> - function inst_param ( k ) { <nl> - let b = new B ( k ) ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , k ) ) ; <nl> - } <nl> - <nl> - inst_param ( 1 ) ; inst_param ( 2 ) ; <nl> - % OptimizeFunctionOnNextCall ( inst_param ) ; inst_param ( 3 ) ; <nl> - <nl> - / / TODO ( gsps ) : Reenable once we fully support const field information <nl> - / / tracking in the presence of pointer compression . <nl> - / / inst_param ( 1 . 1 ) ; inst_param ( 2 . 2 ) ; <nl> - / / % OptimizeFunctionOnNextCall ( inst_param ) ; inst_param ( 3 . 3 ) ; <nl> - <nl> - inst_param ( { x : 1 } ) ; inst_param ( { x : 2 } ) ; <nl> - % OptimizeFunctionOnNextCall ( inst_param ) ; inst_param ( { x : 3 } ) ; <nl> - <nl> - <nl> - % EnsureFeedbackVectorForFunction ( inst_computed ) ; <nl> - function inst_computed ( k ) { <nl> - let kk = 2 * k ; <nl> - let b = new B ( kk ) ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v1 = b . value ; <nl> - maybe_sideeffect ( b ) ; <nl> - let v2 = b . value ; <nl> - % TurbofanStaticAssert ( Object . is ( v1 , v2 ) ) ; <nl> - % TurbofanStaticAssert ( Object . is ( v2 , kk ) ) ; <nl> - } <nl> - <nl> - inst_computed ( 1 ) ; inst_computed ( 2 ) ; <nl> - % OptimizeFunctionOnNextCall ( inst_computed ) ; inst_computed ( 3 ) ; <nl> - <nl> - inst_computed ( 1 . 1 ) ; inst_computed ( 2 . 2 ) ; <nl> - % OptimizeFunctionOnNextCall ( inst_computed ) ; inst_computed ( 3 . 3 ) ; <nl> + % NeverOptimizeFunction ( maybe_sideeffect ) ; <nl> + f ( 1 ) ; <nl> + f ( 2 ) ; <nl> + % OptimizeFunctionOnNextCall ( f ) ; <nl> + f ( 3 ) ; <nl> } ) ( ) ; <nl> | Revert " Make LoadElimination aware of const fields ( Part 2 ; stores ) " | v8/v8 | b851d753c94151da67f9805cf30e4aa12800bf97 | 2019-05-24T06:28:17Z |
mmm a / tools / bindings_generator . py <nl> ppp b / tools / bindings_generator . py <nl> def path_from_root ( * pathelems ) : <nl> def generate_class ( generating_cname , cname , clazz ) : <nl> inherited = generating_cname ! = cname <nl> <nl> - # Nothing to generate for pure virtual classes <nl> - if len ( clazz . get_all_pure_virtual_methods ( ) . keys ( ) ) > 0 : return <nl> - for method in clazz [ ' methods ' ] [ ' public ' ] : <nl> - if method [ ' pure_virtual ' ] : return <nl> - <nl> for method in clazz [ ' methods ' ] [ ' public ' ] : <nl> mname = method [ ' name ' ] <nl> + # print " zz generating : " , generating_cname , cname , mname <nl> if cname + ' : : ' + mname in ignored : continue <nl> <nl> args = method [ ' parameters ' ] <nl> def generate_class ( generating_cname , cname , clazz ) : <nl> for cname , clazz in classes . iteritems ( ) : <nl> if cname in ignored : continue <nl> <nl> + # Nothing to generate for pure virtual classes <nl> + <nl> + def check_pure_virtual ( clazz , progeny ) : <nl> + if any ( [ check_pure_virtual ( classes [ parent [ ' class ' ] ] , [ clazz ] + progeny ) for parent in clazz [ ' inherits ' ] ] ) : return True <nl> + <nl> + def dirtied ( mname ) : <nl> + for progen in progeny : <nl> + for method in clazz [ ' methods ' ] [ ' public ' ] : <nl> + if method [ ' name ' ] = = mname and not method [ ' pure_virtual ' ] : return True <nl> + return False <nl> + <nl> + for method in clazz [ ' methods ' ] [ ' public ' ] : <nl> + if method [ ' pure_virtual ' ] and not dirtied ( method [ ' name ' ] ) : return True <nl> + <nl> + if check_pure_virtual ( clazz , [ ] ) : continue <nl> + <nl> + # Add a constructor if none exist <nl> + has_constructor = False <nl> + for method in clazz [ ' methods ' ] [ ' public ' ] : <nl> + mname = method [ ' name ' ] <nl> + has_constructor = has_constructor or ( cname = = mname ) <nl> + <nl> + if not has_constructor : <nl> + clazz [ ' methods ' ] [ ' public ' ] = [ { <nl> + ' name ' : cname , <nl> + ' parameters ' : [ ] , <nl> + ' pure_virtual ' : False , <nl> + ' destructor ' : False , <nl> + } ] + clazz [ ' methods ' ] [ ' public ' ] <nl> + <nl> generate_class ( cname , cname , clazz ) <nl> <nl> # In addition , generate all methods of parent classes . We do not inherit in JS ( how would we do multiple inheritance etc . ? ) <nl> | better support for abstract base classes and constructorless classes in bindings generator | emscripten-core/emscripten | 51505880de541576c79ece8b93e73690290c6c5e | 2011-07-15T04:25:40Z |
mmm a / build / fbcode_builder / manifests / googletest <nl> ppp b / build / fbcode_builder / manifests / googletest <nl> <nl> name = googletest <nl> <nl> [ download ] <nl> - url = https : / / github . com / google / googletest / archive / release - 1 . 8 . 1 . tar . gz <nl> - sha256 = 9bf1fe5182a604b4135edc1a425ae356c9ad15e9b23f9f12a02e80184c3a249c <nl> + url = https : / / github . com / google / googletest / archive / release - 1 . 10 . 0 . tar . gz <nl> + sha256 = 9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb <nl> <nl> [ build ] <nl> builder = cmake <nl> - subdir = googletest - release - 1 . 8 . 1 <nl> + subdir = googletest - release - 1 . 10 . 0 <nl> <nl> [ cmake . defines ] <nl> # Everything else defaults to the shared runtime , so tell gtest that <nl> | getdeps : upgrade googletest from 1 . 8 . 1 to 1 . 10 . 0 | facebook/watchman | b49b454c2ddd543893b244afe1002b8457ed4a98 | 2020-04-30T19:20:39Z |
mmm a / R - package / demo / 00Index <nl> ppp b / R - package / demo / 00Index <nl> cross_validation Cross validation <nl> create_sparse_matrix Create Sparse Matrix <nl> predict_leaf_indices Predicting the corresponding leaves <nl> early_Stopping Early Stop in training <nl> + poisson_regression Poisson Regression on count data <nl> new file mode 100644 <nl> index 0000000000 . . f9dc4ac621 <nl> mmm / dev / null <nl> ppp b / R - package / demo / poisson_regression . R <nl> <nl> + data ( mtcars ) <nl> + head ( mtcars ) <nl> + bst = xgboost ( data = as . matrix ( mtcars [ , - 11 ] ) , label = mtcars [ , 11 ] , <nl> + objective = ' count : poisson ' , nrounds = 5 ) <nl> + pred = predict ( bst , as . matrix ( mtcars [ , - 11 ] ) ) <nl> + sqrt ( mean ( ( pred - mtcars [ , 11 ] ) ^ 2 ) ) <nl> + <nl> mmm a / R - package / demo / runall . R <nl> ppp b / R - package / demo / runall . R <nl> demo ( generalized_linear_model ) <nl> demo ( cross_validation ) <nl> demo ( create_sparse_matrix ) <nl> demo ( predict_leaf_indices ) <nl> + demo ( early_Stopping ) <nl> + demo ( poisson_regression ) <nl> | add poisson demo | dmlc/xgboost | 60d307c44513a803069bfa705eebed3f28c7b972 | 2015-05-11T22:21:54Z |
mmm a / xbmc / video / VideoDatabase . cpp <nl> ppp b / xbmc / video / VideoDatabase . cpp <nl> class CShowLink <nl> <nl> void CVideoDatabase : : UpdateTables ( int iVersion ) <nl> { <nl> + / / Important : DO NOT use CREATE TABLE [ . . . ] AS SELECT [ . . . ] - it does not work on MySQL with GTID consistency enforced <nl> + <nl> if ( iVersion < 76 ) <nl> { <nl> m_pDS - > exec ( " ALTER TABLE settings ADD StereoMode integer " ) ; <nl> void CVideoDatabase : : UpdateTables ( int iVersion ) <nl> <nl> if ( iVersion < 109 ) <nl> { <nl> - m_pDS - > exec ( " CREATE TABLE settingsnew AS SELECT idFile , Deinterlace , ViewMode , ZoomAmount , PixelRatio , VerticalShift , AudioStream , SubtitleStream , SubtitleDelay , SubtitlesOn , Brightness , Contrast , Gamma , VolumeAmplification , AudioDelay , ResumeTime , Sharpness , NoiseReduction , NonLinStretch , PostProcess , ScalingMethod , DeinterlaceMode , StereoMode , StereoInvert , VideoStream FROM settings " ) ; <nl> - m_pDS - > exec ( " DROP TABLE settings " ) ; <nl> - m_pDS - > exec ( " ALTER TABLE settingsnew RENAME TO settings " ) ; <nl> + m_pDS - > exec ( " ALTER TABLE settings RENAME TO settingsold " ) ; <nl> + m_pDS - > exec ( " CREATE TABLE settings ( idFile integer , Deinterlace bool , " <nl> + " ViewMode integer , ZoomAmount float , PixelRatio float , VerticalShift float , AudioStream integer , SubtitleStream integer , " <nl> + " SubtitleDelay float , SubtitlesOn bool , Brightness float , Contrast float , Gamma float , " <nl> + " VolumeAmplification float , AudioDelay float , ResumeTime integer , " <nl> + " Sharpness float , NoiseReduction float , NonLinStretch bool , PostProcess bool , " <nl> + " ScalingMethod integer , DeinterlaceMode integer , StereoMode integer , StereoInvert bool , VideoStream integer ) " ) ; <nl> + m_pDS - > exec ( " INSERT INTO settings SELECT idFile , Deinterlace , ViewMode , ZoomAmount , PixelRatio , VerticalShift , AudioStream , SubtitleStream , SubtitleDelay , SubtitlesOn , Brightness , Contrast , Gamma , VolumeAmplification , AudioDelay , ResumeTime , Sharpness , NoiseReduction , NonLinStretch , PostProcess , ScalingMethod , DeinterlaceMode , StereoMode , StereoInvert , VideoStream FROM settingsold " ) ; <nl> + m_pDS - > exec ( " DROP TABLE settingsold " ) ; <nl> } <nl> <nl> if ( iVersion < 110 ) <nl> mmm a / xbmc / view / ViewDatabase . cpp <nl> ppp b / xbmc / view / ViewDatabase . cpp <nl> void CViewDatabase : : UpdateTables ( int version ) <nl> if ( version < 6 ) <nl> { <nl> / / convert the " path " table <nl> - m_pDS - > exec ( " CREATE TABLE tmp_view AS SELECT * FROM view " ) ; <nl> - m_pDS - > exec ( " DROP TABLE view " ) ; <nl> + m_pDS - > exec ( " ALTER TABLE view RENAME TO tmp_view " ) ; <nl> <nl> m_pDS - > exec ( " CREATE TABLE view ( " <nl> " idView integer primary key , " <nl> | Merge pull request from pkerling / no - create - select | xbmc/xbmc | 86b0a998504678638bbebf09f27189e1a6298fe2 | 2019-01-03T16:03:10Z |
mmm a / hphp / hack / src / typing / typing . ml <nl> ppp b / hphp / hack / src / typing / typing . ml <nl> and stmt env = function <nl> | Throw ( _ , e ) - > <nl> let p = fst e in <nl> let env , ty = expr env e in <nl> - let exn_ty = Reason . Rthrow p , Tapply ( ( p , SN . Classes . cException ) , [ ] ) in <nl> - Type . sub_type p ( Reason . URthrow ) env exn_ty ty <nl> + exception_ty p env ty <nl> | Continue _ <nl> | Break _ - > env <nl> <nl> and catch parent_lenv after_try env ( ety , exn , b ) = <nl> let env = { env with Env . lenv = after_try } in <nl> let env = LEnv . fully_integrate env parent_lenv in <nl> let cid = CI ety in <nl> - let env = instantiable_cid ( fst ety ) env cid in <nl> - let env , ety = static_class_id ( fst ety ) env cid in <nl> + let ety_p = ( fst ety ) in <nl> + let env = instantiable_cid ety_p env cid in <nl> + let env , ety = static_class_id ety_p env cid in <nl> + let env = exception_ty ety_p env ety in <nl> let env = Env . set_local env ( snd exn ) ety in <nl> let env = block env b in <nl> ( * Only keep the local bindings if this catch is non - terminal * ) <nl> and instantiable_cid p env cid = <nl> Errors . uninstantiable_class pos class_ . tc_pos name ; env <nl> | None | Some _ - > env ) <nl> <nl> + and exception_ty pos env ty = <nl> + let exn_ty = Reason . Rthrow pos , Tapply ( ( pos , SN . Classes . cException ) , [ ] ) in <nl> + Type . sub_type pos ( Reason . URthrow ) env exn_ty ty <nl> + <nl> ( * While converting code from PHP to Hack , some arrays are used <nl> * as tuples . Example : array ( ' ' , 0 ) . Since the elements have <nl> * incompatible types , it should be a tuple . However , while migrating <nl> new file mode 100644 <nl> index 00000000000 . . 9eadc1220cc <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / catch_exception . php <nl> <nl> + < ? hh / / strict <nl> + <nl> + class MyException extends Exception { <nl> + public function __construct ( ) { <nl> + parent : : __construct ( ' dummy ' ) ; <nl> + } <nl> + } <nl> + <nl> + function f ( ) : void { } <nl> + <nl> + function g ( ) : void { <nl> + try { <nl> + } catch ( MyException $ m ) { <nl> + } catch ( Exception $ e ) { <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / catch_exception . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl> new file mode 100644 <nl> index 00000000000 . . bb2f3cd95f9 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / catch_non_exception . php <nl> <nl> + < ? hh / / strict <nl> + <nl> + class NonException { } <nl> + <nl> + function f ( ) : void { <nl> + try { <nl> + } catch ( NonException $ m ) { <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 20cbb2f60b1 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / catch_non_exception . php . exp <nl> <nl> + File " catch_non_exception . php " , line 7 , characters 12 - 23 : <nl> + Invalid exception ( Typing [ 4110 ] ) <nl> + File " catch_non_exception . php " , line 7 , characters 12 - 23 : <nl> + This is an object of type Exception because it is used as an exception <nl> + File " catch_non_exception . php " , line 7 , characters 12 - 23 : <nl> + It is incompatible with an object of type NonException <nl> mmm a / hphp / hack / test / typecheck / test_to_sort_exn . php <nl> ppp b / hphp / hack / test / typecheck / test_to_sort_exn . php <nl> <nl> * <nl> * / <nl> <nl> - class A { <nl> + class A extends \ Exception { <nl> public function f ( ) : void { } <nl> } <nl> <nl> new file mode 100644 <nl> index 00000000000 . . 8ef4fd914f0 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / throw_exception . php <nl> <nl> + < ? hh / / strict <nl> + <nl> + class MyException extends Exception { <nl> + public function __construct ( ) { <nl> + parent : : __construct ( ' dummy ' ) ; <nl> + } <nl> + } <nl> + <nl> + function f ( ) : void { <nl> + throw new MyException ( ) ; <nl> + } <nl> + <nl> + function g ( ) : void { <nl> + throw new Exception ( ' msg ' ) ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 4269126fceb <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / throw_exception . php . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + No errors <nl> new file mode 100644 <nl> index 00000000000 . . dc8458b23db <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / throw_non_exception . php <nl> <nl> + < ? hh / / strict <nl> + <nl> + class NonException { } <nl> + <nl> + function f ( ) : void { <nl> + throw new NonException ( ) ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . 431f3e274f7 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / typecheck / throw_non_exception . php . exp <nl> <nl> + File " throw_non_exception . php " , line 6 , characters 9 - 26 : <nl> + Invalid exception ( Typing [ 4110 ] ) <nl> + File " throw_non_exception . php " , line 6 , characters 9 - 26 : <nl> + This is an object of type Exception because it is used as an exception <nl> + File " throw_non_exception . php " , line 6 , characters 9 - 26 : <nl> + It is incompatible with an object of type NonException <nl> | typechecker : error on catch of non - Exception . | facebook/hhvm | 4994066c167d916a272f22dc9866c701147a3119 | 2015-04-15T23:30:52Z |
mmm a / docker / bare / prepare <nl> ppp b / docker / bare / prepare <nl> set - e <nl> SRC_DIR = . . / . . <nl> BUILD_DIR = $ { SRC_DIR } / build <nl> <nl> - # BTW , . so files are acceptable from any Linux distribution for the last 12 years at least . <nl> + # BTW , . so files are acceptable from any Linux distribution for the last 12 years ( at least ) . <nl> + # See https : / / presentations . clickhouse . tech / cpp_russia_2020 / for the details . <nl> <nl> mkdir root <nl> pushd root <nl> | Added a comment | ClickHouse/ClickHouse | a41e5740a95347f7d69e747f032dce831615a5d1 | 2020-07-03T22:52:19Z |
mmm a / dbms / src / Common / Macros . cpp <nl> ppp b / dbms / src / Common / Macros . cpp <nl> String Macros : : expand ( const String & s , size_t level , const String & database_na <nl> throw Exception ( " Unbalanced { and } in string with macros : ' " + s + " ' " , ErrorCodes : : SYNTAX_ERROR ) ; <nl> <nl> String macro_name = s . substr ( begin , end - begin ) ; <nl> + auto it = macros . find ( macro_name ) ; <nl> <nl> - if ( macro_name = = " database " ) <nl> + / / / Prefer explicit macros over implicit . <nl> + if ( it ! = macros . end ( ) ) <nl> + res + = it - > second ; <nl> + else if ( macro_name = = " database " & & ! database_name . empty ( ) ) <nl> res + = database_name ; <nl> - else if ( macro_name = = " table " ) <nl> + else if ( macro_name = = " table " & & ! table_name . empty ( ) ) <nl> res + = table_name ; <nl> else <nl> - { <nl> - auto it = macros . find ( macro_name ) ; <nl> - if ( it = = macros . end ( ) ) <nl> - throw Exception ( " No macro " + macro_name + " in config " , ErrorCodes : : SYNTAX_ERROR ) ; <nl> - <nl> - res + = it - > second ; <nl> - } <nl> + throw Exception ( " No macro " + macro_name + " in config " , ErrorCodes : : SYNTAX_ERROR ) ; <nl> <nl> pos = end + 1 ; <nl> } <nl> String Macros : : expand ( const String & s , size_t level , const String & database_na <nl> return expand ( res , level + 1 , database_name , table_name ) ; <nl> } <nl> <nl> + String Macros : : expand ( const String & s , const String & database_name , const String & table_name ) const <nl> + { <nl> + return expand ( s , 0 , database_name , table_name ) ; <nl> + } <nl> + <nl> Names Macros : : expand ( const Names & source_names , size_t level ) const <nl> { <nl> Names result_names ; <nl> mmm a / dbms / src / Common / Macros . h <nl> ppp b / dbms / src / Common / Macros . h <nl> class Macros <nl> Macros ( const Poco : : Util : : AbstractConfiguration & config , const String & key ) ; <nl> <nl> / * * Replace the substring of the form { macro_name } with the value for macro_name , obtained from the config file . <nl> + * If { database } and { table } macros aren ` t defined explicitly , expand them as database_name and table_name respectively . <nl> * level - the level of recursion . <nl> * / <nl> String expand ( const String & s , size_t level = 0 , const String & database_name = " " , const String & table_name = " " ) const ; <nl> <nl> + String expand ( const String & s , const String & database_name , const String & table_name ) const ; <nl> + <nl> + <nl> / * * Apply expand for the list . <nl> * / <nl> Names expand ( const Names & source_names , size_t level = 0 ) const ; <nl> mmm a / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> ppp b / dbms / src / Storages / StorageReplicatedMergeTree . cpp <nl> StorageReplicatedMergeTree : : StorageReplicatedMergeTree ( <nl> : context ( context_ ) , <nl> database_name ( database_name_ ) , <nl> table_name ( name_ ) , full_path ( path_ + escapeForFileName ( table_name ) + ' / ' ) , <nl> - zookeeper_path ( context . getMacros ( ) - > expand ( zookeeper_path_ , 0 , database_name , table_name ) ) , <nl> - replica_name ( context . getMacros ( ) - > expand ( replica_name_ ) ) , <nl> + zookeeper_path ( context . getMacros ( ) - > expand ( zookeeper_path_ , database_name , table_name ) ) , <nl> + replica_name ( context . getMacros ( ) - > expand ( replica_name_ , database_name , table_name ) ) , <nl> data ( database_name , table_name , <nl> full_path , columns_ , <nl> context_ , primary_expr_ast_ , secondary_sorting_expr_list_ , date_column_name , partition_expr_ast_ , <nl> mmm a / dbms / tests / integration / test_distributed_ddl / test . py <nl> ppp b / dbms / tests / integration / test_distributed_ddl / test . py <nl> def test_macro ( started_cluster ) : <nl> ddl_check_query ( instance , " DROP TABLE IF EXISTS distr ON CLUSTER ' { cluster } ' " ) <nl> ddl_check_query ( instance , " DROP TABLE IF EXISTS tab ON CLUSTER ' { cluster } ' " ) <nl> <nl> + <nl> + def test_implicit_macros ( started_cluster ) : <nl> # Temporarily disable random ZK packet drops , they might broke creation if ReplicatedMergeTree replicas <nl> firewall_drops_rules = cluster . pm_random_drops . pop_rules ( ) <nl> <nl> + instance = cluster . instances [ ' ch2 ' ] <nl> + <nl> ddl_check_query ( instance , " DROP DATABASE IF EXISTS test_db ON CLUSTER ' { cluster } ' " ) <nl> - ddl_check_query ( instance , " DROP TABLE IF EXISTS test_db . test_macro ON CLUSTER ' { cluster } ' " ) <nl> ddl_check_query ( instance , " CREATE DATABASE IF NOT EXISTS test_db ON CLUSTER ' { cluster } ' " ) <nl> <nl> ddl_check_query ( instance , " " " <nl> | fix implicit macros | ClickHouse/ClickHouse | ed77e40dfea80761178a0eeb98ec35fe1c5c35be | 2018-10-01T09:01:50Z |
mmm a / js / common / tests / shell / shell - general - graph . js <nl> ppp b / js / common / tests / shell / shell - general - graph . js <nl> function EdgesAndVerticesSuite ( ) { <nl> var vertexId2 = vertex2 . _id ; <nl> var edge = g [ ec1 ] . save ( vertexId1 , vertexId2 , { } ) ; <nl> var edgeId1 = edge . _id ; <nl> - edge = g [ ec1 ] . replace ( edgeId1 , { label : " knows " } ) ; <nl> + edge = g [ ec1 ] . replace ( edgeId1 , { _from : vertexId1 , _to : vertexId2 , label : " knows " } ) ; <nl> assertFalse ( edge . error ) ; <nl> var edgeObj = g [ ec1 ] . document ( edgeId1 ) ; <nl> assertEqual ( edgeObj . label , " knows " ) ; <nl> function EdgesAndVerticesSuite ( ) { <nl> var vertexId2 = vertex2 . _id ; <nl> var edge = g [ ec1 ] . save ( vertexId1 , vertexId2 , { } ) ; <nl> var edgeId1 = edge . _id ; <nl> - edge = g [ ec1 ] . replace ( edgeId1 , { label : " knows " } ) ; <nl> + edge = g [ ec1 ] . replace ( edgeId1 , { _from : vertexId1 , _to : vertexId2 , label : " knows " } ) ; <nl> edge = g [ ec1 ] . update ( edgeId1 , { blub : " blub " } ) ; <nl> assertFalse ( edge . error ) ; <nl> var edgeObj = g [ ec1 ] . document ( edgeId1 ) ; <nl> | fixed test | arangodb/arangodb | 007e0c6e5c8318b8de66ec1e7c4a5b75d8838ff8 | 2016-03-24T12:14:59Z |
mmm a / api / bazel / repository_locations . bzl <nl> ppp b / api / bazel / repository_locations . bzl <nl> <nl> - BAZEL_SKYLIB_RELEASE = " 1 . 0 . 3 " <nl> - BAZEL_SKYLIB_SHA256 = " 1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c " <nl> - <nl> - OPENCENSUS_PROTO_RELEASE = " 0 . 3 . 0 " <nl> - OPENCENSUS_PROTO_SHA256 = " b7e13f0b4259e80c3070b583c2f39e53153085a6918718b1c710caf7037572b0 " <nl> - <nl> - PGV_GIT_SHA = " 278964a8052f96a2f514add0298098f63fb7f47f " # June 9 , 2020 <nl> - PGV_SHA256 = " e368733c9fb7f8489591ffaf269170d7658cc0cd1ee322b601512b769446d3c8 " <nl> - <nl> - GOOGLEAPIS_GIT_SHA = " 82944da21578a53b74e547774cf62ed31a05b841 " # Dec 2 , 2019 <nl> - GOOGLEAPIS_SHA = " a45019af4d3290f02eaeb1ce10990166978c807cb33a9692141a076ba46d1405 " <nl> - <nl> - PROMETHEUS_GIT_SHA = " 60555c9708c786597e6b07bf846d0dc5c2a46f54 " # Jun 23 , 2020 <nl> - PROMETHEUS_SHA = " 6748b42f6879ad4d045c71019d2512c94be3dd86f60965e9e31e44a3f464323e " <nl> - <nl> - UDPA_RELEASE = " 0 . 0 . 1 " <nl> - UDPA_SHA256 = " 83a7dcc316d741031f34c0409021432b74a39c4811845a177133f02f948fe2d8 " <nl> - <nl> - ZIPKINAPI_RELEASE = " 0 . 2 . 2 " <nl> - ZIPKINAPI_SHA256 = " 688c4fe170821dd589f36ec45aaadc03a618a40283bc1f97da8fa11686fc816b " <nl> - <nl> - RULES_PROTO_GIT_SHA = " 40298556293ae502c66579620a7ce867d5f57311 " # Aug 17 , 2020 <nl> - RULES_PROTO_SHA256 = " aa1ee19226f707d44bee44c720915199c20c84a23318bb0597ed4e5c873ccbd5 " <nl> - <nl> - REPOSITORY_LOCATIONS = dict ( <nl> + DEPENDENCY_REPOSITORIES_SPEC = dict ( <nl> bazel_skylib = dict ( <nl> - sha256 = BAZEL_SKYLIB_SHA256 , <nl> - urls = [ " https : / / github . com / bazelbuild / bazel - skylib / releases / download / " + BAZEL_SKYLIB_RELEASE + " / bazel - skylib - " + BAZEL_SKYLIB_RELEASE + " . tar . gz " ] , <nl> + project_name = " bazel - skylib " , <nl> + project_desc = " Common useful functions and rules for Bazel " , <nl> + project_url = " https : / / github . com / bazelbuild / bazel - skylib " , <nl> + version = " 1 . 0 . 3 " , <nl> + sha256 = " 1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c " , <nl> + urls = [ " https : / / github . com / bazelbuild / bazel - skylib / releases / download / { version } / bazel - skylib - { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 08 - 27 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> com_envoyproxy_protoc_gen_validate = dict ( <nl> - sha256 = PGV_SHA256 , <nl> - strip_prefix = " protoc - gen - validate - " + PGV_GIT_SHA , <nl> - urls = [ " https : / / github . com / envoyproxy / protoc - gen - validate / archive / " + PGV_GIT_SHA + " . tar . gz " ] , <nl> + project_name = " protoc - gen - validate ( PGV ) " , <nl> + project_desc = " protoc plugin to generate polyglot message validators " , <nl> + project_url = " https : / / github . com / envoyproxy / protoc - gen - validate " , <nl> + version = " 278964a8052f96a2f514add0298098f63fb7f47f " , <nl> + sha256 = " e368733c9fb7f8489591ffaf269170d7658cc0cd1ee322b601512b769446d3c8 " , <nl> + strip_prefix = " protoc - gen - validate - { version } " , <nl> + urls = [ " https : / / github . com / envoyproxy / protoc - gen - validate / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 09 " , <nl> + use_category = [ " api " ] , <nl> + ) , <nl> + com_github_cncf_udpa = dict ( <nl> + project_name = " Universal Data Plane API " , <nl> + project_desc = " Universal Data Plane API Working Group ( UDPA - WG ) " , <nl> + project_url = " https : / / github . com / cncf / udpa " , <nl> + version = " 0 . 0 . 1 " , <nl> + sha256 = " 83a7dcc316d741031f34c0409021432b74a39c4811845a177133f02f948fe2d8 " , <nl> + strip_prefix = " udpa - { version } " , <nl> + urls = [ " https : / / github . com / cncf / udpa / archive / v { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 09 - 23 " , <nl> + use_category = [ " api " ] , <nl> + ) , <nl> + com_github_openzipkin_zipkinapi = dict ( <nl> + project_name = " Zipkin API " , <nl> + project_desc = " Zipkin ' s language independent model and HTTP Api Definitions " , <nl> + project_url = " https : / / github . com / openzipkin / zipkin - api " , <nl> + version = " 0 . 2 . 2 " , <nl> + sha256 = " 688c4fe170821dd589f36ec45aaadc03a618a40283bc1f97da8fa11686fc816b " , <nl> + strip_prefix = " zipkin - api - { version } " , <nl> + urls = [ " https : / / github . com / openzipkin / zipkin - api / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 09 - 23 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> com_google_googleapis = dict ( <nl> # TODO ( dio ) : Consider writing a Starlark macro for importing Google API proto . <nl> - sha256 = GOOGLEAPIS_SHA , <nl> - strip_prefix = " googleapis - " + GOOGLEAPIS_GIT_SHA , <nl> - urls = [ " https : / / github . com / googleapis / googleapis / archive / " + GOOGLEAPIS_GIT_SHA + " . tar . gz " ] , <nl> + project_name = " Google APIs " , <nl> + project_desc = " Public interface definitions of Google APIs " , <nl> + project_url = " https : / / github . com / googleapis / googleapis " , <nl> + version = " 82944da21578a53b74e547774cf62ed31a05b841 " , <nl> + sha256 = " a45019af4d3290f02eaeb1ce10990166978c807cb33a9692141a076ba46d1405 " , <nl> + strip_prefix = " googleapis - { version } " , <nl> + urls = [ " https : / / github . com / googleapis / googleapis / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2019 - 12 - 02 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> - com_github_cncf_udpa = dict ( <nl> - sha256 = UDPA_SHA256 , <nl> - strip_prefix = " udpa - " + UDPA_RELEASE , <nl> - urls = [ " https : / / github . com / cncf / udpa / archive / v " + UDPA_RELEASE + " . tar . gz " ] , <nl> + opencensus_proto = dict ( <nl> + project_name = " OpenCensus Proto " , <nl> + project_desc = " Language Independent Interface Types For OpenCensus " , <nl> + project_url = " https : / / github . com / census - instrumentation / opencensus - proto " , <nl> + version = " 0 . 3 . 0 " , <nl> + sha256 = " b7e13f0b4259e80c3070b583c2f39e53153085a6918718b1c710caf7037572b0 " , <nl> + strip_prefix = " opencensus - proto - { version } / src " , <nl> + urls = [ " https : / / github . com / census - instrumentation / opencensus - proto / archive / v { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 20 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> prometheus_metrics_model = dict ( <nl> - sha256 = PROMETHEUS_SHA , <nl> - strip_prefix = " client_model - " + PROMETHEUS_GIT_SHA , <nl> - urls = [ " https : / / github . com / prometheus / client_model / archive / " + PROMETHEUS_GIT_SHA + " . tar . gz " ] , <nl> - ) , <nl> - opencensus_proto = dict ( <nl> - sha256 = OPENCENSUS_PROTO_SHA256 , <nl> - strip_prefix = " opencensus - proto - " + OPENCENSUS_PROTO_RELEASE + " / src " , <nl> - urls = [ " https : / / github . com / census - instrumentation / opencensus - proto / archive / v " + OPENCENSUS_PROTO_RELEASE + " . tar . gz " ] , <nl> + project_name = " Prometheus client model " , <nl> + project_desc = " Data model artifacts for Prometheus " , <nl> + project_url = " https : / / github . com / prometheus / client_model " , <nl> + version = " 60555c9708c786597e6b07bf846d0dc5c2a46f54 " , <nl> + sha256 = " 6748b42f6879ad4d045c71019d2512c94be3dd86f60965e9e31e44a3f464323e " , <nl> + strip_prefix = " client_model - { version } " , <nl> + urls = [ " https : / / github . com / prometheus / client_model / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 23 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> rules_proto = dict ( <nl> - sha256 = RULES_PROTO_SHA256 , <nl> - strip_prefix = " rules_proto - " + RULES_PROTO_GIT_SHA + " " , <nl> - urls = [ " https : / / github . com / bazelbuild / rules_proto / archive / " + RULES_PROTO_GIT_SHA + " . tar . gz " ] , <nl> - ) , <nl> - com_github_openzipkin_zipkinapi = dict ( <nl> - sha256 = ZIPKINAPI_SHA256 , <nl> - strip_prefix = " zipkin - api - " + ZIPKINAPI_RELEASE , <nl> - urls = [ " https : / / github . com / openzipkin / zipkin - api / archive / " + ZIPKINAPI_RELEASE + " . tar . gz " ] , <nl> + project_name = " Protobuf Rules for Bazel " , <nl> + project_desc = " Protocol buffer rules for Bazel " , <nl> + project_url = " https : / / github . com / bazelbuild / rules_proto " , <nl> + version = " 40298556293ae502c66579620a7ce867d5f57311 " , <nl> + sha256 = " aa1ee19226f707d44bee44c720915199c20c84a23318bb0597ed4e5c873ccbd5 " , <nl> + strip_prefix = " rules_proto - { version } " , <nl> + urls = [ " https : / / github . com / bazelbuild / rules_proto / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 08 - 17 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> ) <nl> + <nl> + def _format_version ( s , version ) : <nl> + return s . format ( version = version , dash_version = version . replace ( " . " , " - " ) , underscore_version = version . replace ( " . " , " _ " ) ) <nl> + <nl> + # Interpolate { version } in the above dependency specs . This code should be capable of running in both Python <nl> + # and Starlark . <nl> + def _dependency_repositories ( ) : <nl> + locations = { } <nl> + for key , location in DEPENDENCY_REPOSITORIES_SPEC . items ( ) : <nl> + mutable_location = dict ( location ) <nl> + locations [ key ] = mutable_location <nl> + <nl> + # Fixup with version information . <nl> + if " version " in location : <nl> + if " strip_prefix " in location : <nl> + mutable_location [ " strip_prefix " ] = _format_version ( location [ " strip_prefix " ] , location [ " version " ] ) <nl> + mutable_location [ " urls " ] = [ _format_version ( url , location [ " version " ] ) for url in location [ " urls " ] ] <nl> + return locations <nl> + <nl> + REPOSITORY_LOCATIONS = _dependency_repositories ( ) <nl> mmm a / api / envoy / config / metrics / v2 / stats . proto <nl> ppp b / api / envoy / config / metrics / v2 / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / api / envoy / config / metrics / v3 / stats . proto <nl> ppp b / api / envoy / config / metrics / v3 / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / api / envoy / config / metrics / v4alpha / stats . proto <nl> ppp b / api / envoy / config / metrics / v4alpha / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / bazel / repositories_extra . bzl <nl> ppp b / bazel / repositories_extra . bzl <nl> def _python_deps ( ) : <nl> pip_repositories ( ) <nl> <nl> pip3_import ( <nl> + name = " config_validation_pip3 " , <nl> + requirements = " @ envoy / / tools / config_validation : requirements . txt " , <nl> + extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> # project_name = " PyYAML " , <nl> # project_url = " https : / / github . com / yaml / pyyaml " , <nl> # version = " 5 . 3 . 1 " , <nl> + # last_update = " 2020 - 03 - 18 " <nl> # use_category = [ " other " ] , <nl> # cpe = " cpe : 2 . 3 : a : pyyaml : pyyaml : * " , <nl> - name = " config_validation_pip3 " , <nl> - requirements = " @ envoy / / tools / config_validation : requirements . txt " , <nl> - extra_pip_args = [ " - - require - hashes " ] , <nl> ) <nl> pip3_import ( <nl> + name = " configs_pip3 " , <nl> + requirements = " @ envoy / / configs : requirements . txt " , <nl> + extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> # project_name = " Jinja " , <nl> # project_url = " http : / / palletsprojects . com / p / jinja " , <nl> # version = " 2 . 11 . 2 " , <nl> + # last_update = " 2020 - 04 - 13 " <nl> # use_category = [ " test " ] , <nl> # cpe = " cpe : 2 . 3 : a : palletsprojects : jinja : * " , <nl> - name = " configs_pip3 " , <nl> - requirements = " @ envoy / / configs : requirements . txt " , <nl> - extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> + # project_name = " MarkupSafe " , <nl> + # project_url = " https : / / markupsafe . palletsprojects . com / en / 1 . 1 . x / " , <nl> + # version = " 1 . 1 . 1 " , <nl> + # last_update = " 2019 - 02 - 23 " <nl> + # use_category = [ " test " ] , <nl> ) <nl> pip3_import ( <nl> + name = " kafka_pip3 " , <nl> + requirements = " @ envoy / / source / extensions / filters / network / kafka : requirements . txt " , <nl> + extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> # project_name = " Jinja " , <nl> # project_url = " http : / / palletsprojects . com / p / jinja " , <nl> # version = " 2 . 11 . 2 " , <nl> + # last_update = " 2020 - 04 - 13 " <nl> # use_category = [ " test " ] , <nl> # cpe = " cpe : 2 . 3 : a : palletsprojects : jinja : * " , <nl> - name = " kafka_pip3 " , <nl> - requirements = " @ envoy / / source / extensions / filters / network / kafka : requirements . txt " , <nl> - extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> + # project_name = " MarkupSafe " , <nl> + # project_url = " https : / / markupsafe . palletsprojects . com / en / 1 . 1 . x / " , <nl> + # version = " 1 . 1 . 1 " , <nl> + # last_update = " 2019 - 02 - 23 " <nl> + # use_category = [ " test " ] , <nl> ) <nl> pip3_import ( <nl> name = " headersplit_pip3 " , <nl> requirements = " @ envoy / / tools / envoy_headersplit : requirements . txt " , <nl> extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> + # project_name = " Clang " , <nl> + # project_url = " https : / / clang . llvm . org / " , <nl> + # version = " 10 . 0 . 1 " , <nl> + # last_update = " 2020 - 07 - 21 " <nl> + # use_category = [ " other " ] , <nl> + # cpe = " cpe : 2 . 3 : a : llvm : clang : * " , <nl> ) <nl> pip3_import ( <nl> + name = " protodoc_pip3 " , <nl> + requirements = " @ envoy / / tools / protodoc : requirements . txt " , <nl> + extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> # project_name = " PyYAML " , <nl> # project_url = " https : / / github . com / yaml / pyyaml " , <nl> # version = " 5 . 3 . 1 " , <nl> + # last_update = " 2020 - 03 - 18 " <nl> # use_category = [ " other " ] , <nl> # cpe = " cpe : 2 . 3 : a : pyyaml : pyyaml : * " , <nl> - name = " protodoc_pip3 " , <nl> - requirements = " @ envoy / / tools / protodoc : requirements . txt " , <nl> - extra_pip_args = [ " - - require - hashes " ] , <nl> ) <nl> pip3_import ( <nl> + name = " thrift_pip3 " , <nl> + requirements = " @ envoy / / test / extensions / filters / network / thrift_proxy : requirements . txt " , <nl> + extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> # project_name = " Apache Thrift " , <nl> # project_url = " http : / / thrift . apache . org / " , <nl> # version = " 0 . 11 . 0 " , <nl> + # last_update = " 2017 - 12 - 07 " <nl> # use_category = [ " dataplane " ] , <nl> # cpe = " cpe : 2 . 3 : a : apache : thrift : * " , <nl> - name = " thrift_pip3 " , <nl> - requirements = " @ envoy / / test / extensions / filters / network / thrift_proxy : requirements . txt " , <nl> - extra_pip_args = [ " - - require - hashes " ] , <nl> + <nl> + # project_name = " Six : Python 2 and 3 Compatibility Library " , <nl> + # project_url = " https : / / six . readthedocs . io / " , <nl> + # version = " 1 . 15 . 0 " , <nl> + # last_update = " 2020 - 05 - 21 " <nl> + # use_category = [ " dataplane " ] , <nl> ) <nl> <nl> # Envoy deps that rely on a first stage of dependency loading in envoy_dependencies ( ) . <nl> mmm a / ci / flaky_test / requirements . txt <nl> ppp b / ci / flaky_test / requirements . txt <nl> <nl> - multidict <nl> - yarl <nl> - wheel = = 0 . 35 . 1 <nl> - slackclient = = 2 . 8 . 0 <nl> \ No newline at end of file <nl> + aiohttp = = 3 . 6 . 2 \ <nl> + - - hash = sha256 : 1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \ <nl> + - - hash = sha256 : 259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \ <nl> + - - hash = sha256 : 2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \ <nl> + - - hash = sha256 : 32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \ <nl> + - - hash = sha256 : 344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \ <nl> + - - hash = sha256 : 460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \ <nl> + - - hash = sha256 : 4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \ <nl> + - - hash = sha256 : 50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \ <nl> + - - hash = sha256 : 6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \ <nl> + - - hash = sha256 : 65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \ <nl> + - - hash = sha256 : ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \ <nl> + - - hash = sha256 : b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965 <nl> + async - timeout = = 3 . 0 . 1 \ <nl> + - - hash = sha256 : 0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ <nl> + - - hash = sha256 : 4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 <nl> + attrs = = 20 . 2 . 0 \ <nl> + - - hash = sha256 : 26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594 \ <nl> + - - hash = sha256 : fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc <nl> + chardet = = 3 . 0 . 4 \ <nl> + - - hash = sha256 : 84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ <nl> + - - hash = sha256 : fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 <nl> + idna = = 2 . 10 \ <nl> + - - hash = sha256 : b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ <nl> + - - hash = sha256 : b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 <nl> + idna_ssl = = 1 . 1 . 0 \ <nl> + - - hash = sha256 : a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c <nl> + multidict = = 4 . 7 . 6 \ <nl> + - - hash = sha256 : 1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a \ <nl> + - - hash = sha256 : 275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000 \ <nl> + - - hash = sha256 : 3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2 \ <nl> + - - hash = sha256 : 4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507 \ <nl> + - - hash = sha256 : 5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5 \ <nl> + - - hash = sha256 : 51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7 \ <nl> + - - hash = sha256 : 5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d \ <nl> + - - hash = sha256 : 6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463 \ <nl> + - - hash = sha256 : 7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19 \ <nl> + - - hash = sha256 : 9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3 \ <nl> + - - hash = sha256 : c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b \ <nl> + - - hash = sha256 : d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c \ <nl> + - - hash = sha256 : e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87 \ <nl> + - - hash = sha256 : f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7 \ <nl> + - - hash = sha256 : fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430 \ <nl> + - - hash = sha256 : fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255 \ <nl> + - - hash = sha256 : feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d <nl> + slackclient = = 2 . 9 . 1 \ <nl> + - - hash = sha256 : 214edd4a494cc74353c8084ec184ff97a116d4b12cde287f805a9af948ef39ae \ <nl> + - - hash = sha256 : 3a3e84fd4f13d9715740c13ce6c3c25b970147aeeeec22ef137d796124dfcf08 <nl> + typing - extensions = = 3 . 7 . 4 . 3 \ <nl> + - - hash = sha256 : 7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ <nl> + - - hash = sha256 : 99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ <nl> + - - hash = sha256 : dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f <nl> + wheel = = 0 . 35 . 1 \ <nl> + - - hash = sha256 : 497add53525d16c173c2c1c733b8f655510e909ea78cc0e29d374243544b77a2 \ <nl> + - - hash = sha256 : 99a22d87add3f634ff917310a3d87e499f19e663413a52eb9232c447aa646c9f <nl> + yarl = = 1 . 6 . 0 \ <nl> + - - hash = sha256 : 04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e \ <nl> + - - hash = sha256 : 3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5 \ <nl> + - - hash = sha256 : 5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580 \ <nl> + - - hash = sha256 : 5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc \ <nl> + - - hash = sha256 : 61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b \ <nl> + - - hash = sha256 : 67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2 \ <nl> + - - hash = sha256 : 6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a \ <nl> + - - hash = sha256 : 7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921 \ <nl> + - - hash = sha256 : b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e \ <nl> + - - hash = sha256 : c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1 \ <nl> + - - hash = sha256 : c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d \ <nl> + - - hash = sha256 : c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131 \ <nl> + - - hash = sha256 : d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a \ <nl> + - - hash = sha256 : db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1 \ <nl> + - - hash = sha256 : e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188 \ <nl> + - - hash = sha256 : e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020 \ <nl> + - - hash = sha256 : fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a <nl> mmm a / docs / build . sh <nl> ppp b / docs / build . sh <nl> rm - rf " $ { GENERATED_RST_DIR } " <nl> mkdir - p " $ { GENERATED_RST_DIR } " <nl> <nl> source_venv " $ BUILD_DIR " <nl> - pip3 install - r " $ { SCRIPT_DIR } " / requirements . txt <nl> + pip3 install - - require - hashes - r " $ { SCRIPT_DIR } " / requirements . txt <nl> <nl> # Clean up any stale files in the API tree output . Bazel remembers valid cached <nl> # files still . <nl> mmm a / docs / requirements . txt <nl> ppp b / docs / requirements . txt <nl> <nl> - alabaster = = 0 . 7 . 12 <nl> - Babel = = 2 . 8 . 0 <nl> - docutils = = 0 . 16 <nl> - gitdb = = 4 . 0 . 5 <nl> - GitPython = = 3 . 1 . 7 <nl> - imagesize = = 1 . 2 . 0 <nl> - Jinja2 = = 2 . 11 . 2 <nl> - MarkupSafe = = 1 . 1 . 1 <nl> - Pygments = = 2 . 6 . 1 <nl> - pytz = = 2020 . 1 <nl> - requests > = 2 . 24 . 0 <nl> - six = = 1 . 15 . 0 <nl> - smmap = = 3 . 0 . 4 <nl> - snowballstemmer = = 2 . 0 . 0 <nl> - sphinx_rtd_theme = = 0 . 5 . 0 <nl> - Sphinx = = 3 . 2 . 1 <nl> - sphinxcontrib - httpdomain = = 1 . 7 . 0 <nl> - sphinx - tabs = = 1 . 3 . 0 <nl> - sphinx - copybutton = = 0 . 3 . 0 <nl> + alabaster = = 0 . 7 . 12 \ <nl> + - - hash = sha256 : 446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ <nl> + - - hash = sha256 : a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 <nl> + Babel = = 2 . 8 . 0 \ <nl> + - - hash = sha256 : 1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38 \ <nl> + - - hash = sha256 : d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4 <nl> + certifi = = 2020 . 6 . 20 \ <nl> + - - hash = sha256 : 5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3 \ <nl> + - - hash = sha256 : 8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41 <nl> + chardet = = 3 . 0 . 4 \ <nl> + - - hash = sha256 : 84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ <nl> + - - hash = sha256 : fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 <nl> + docutils = = 0 . 16 \ <nl> + - - hash = sha256 : 0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ <nl> + - - hash = sha256 : c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc <nl> + gitdb = = 4 . 0 . 5 \ <nl> + - - hash = sha256 : 91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac \ <nl> + - - hash = sha256 : c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9 <nl> + GitPython = = 3 . 1 . 8 \ <nl> + - - hash = sha256 : 080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912 \ <nl> + - - hash = sha256 : 1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910 <nl> + idna = = 2 . 10 \ <nl> + - - hash = sha256 : b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ <nl> + - - hash = sha256 : b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 <nl> + imagesize = = 1 . 2 . 0 \ <nl> + - - hash = sha256 : 6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \ <nl> + - - hash = sha256 : b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1 <nl> + Jinja2 = = 2 . 11 . 2 \ <nl> + - - hash = sha256 : 89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \ <nl> + - - hash = sha256 : f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 <nl> + MarkupSafe = = 1 . 1 . 1 \ <nl> + - - hash = sha256 : 00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \ <nl> + - - hash = sha256 : 09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \ <nl> + - - hash = sha256 : 09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \ <nl> + - - hash = sha256 : 1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \ <nl> + - - hash = sha256 : 13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \ <nl> + - - hash = sha256 : 24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \ <nl> + - - hash = sha256 : 29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \ <nl> + - - hash = sha256 : 43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \ <nl> + - - hash = sha256 : 46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \ <nl> + - - hash = sha256 : 500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \ <nl> + - - hash = sha256 : 535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \ <nl> + - - hash = sha256 : 596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \ <nl> + - - hash = sha256 : 62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \ <nl> + - - hash = sha256 : 6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \ <nl> + - - hash = sha256 : 6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \ <nl> + - - hash = sha256 : 717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \ <nl> + - - hash = sha256 : 79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \ <nl> + - - hash = sha256 : 7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \ <nl> + - - hash = sha256 : 88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \ <nl> + - - hash = sha256 : 8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \ <nl> + - - hash = sha256 : 98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \ <nl> + - - hash = sha256 : 9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \ <nl> + - - hash = sha256 : 9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \ <nl> + - - hash = sha256 : ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \ <nl> + - - hash = sha256 : b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \ <nl> + - - hash = sha256 : b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \ <nl> + - - hash = sha256 : b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \ <nl> + - - hash = sha256 : ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \ <nl> + - - hash = sha256 : c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \ <nl> + - - hash = sha256 : cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \ <nl> + - - hash = sha256 : cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \ <nl> + - - hash = sha256 : e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \ <nl> + - - hash = sha256 : e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be <nl> + packaging = = 20 . 4 \ <nl> + - - hash = sha256 : 4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8 \ <nl> + - - hash = sha256 : 998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181 <nl> + Pygments = = 2 . 7 . 1 \ <nl> + - - hash = sha256 : 307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \ <nl> + - - hash = sha256 : 926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 <nl> + pyparsing = = 2 . 4 . 7 \ <nl> + - - hash = sha256 : c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ <nl> + - - hash = sha256 : ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b <nl> + pytz = = 2020 . 1 \ <nl> + - - hash = sha256 : a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed \ <nl> + - - hash = sha256 : c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048 <nl> + requests = = 2 . 24 . 0 \ <nl> + - - hash = sha256 : b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b \ <nl> + - - hash = sha256 : fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898 <nl> + six = = 1 . 15 . 0 \ <nl> + - - hash = sha256 : 30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ <nl> + - - hash = sha256 : 8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced <nl> + smmap = = 3 . 0 . 4 \ <nl> + - - hash = sha256 : 54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4 \ <nl> + - - hash = sha256 : 9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24 <nl> + snowballstemmer = = 2 . 0 . 0 \ <nl> + - - hash = sha256 : 209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0 \ <nl> + - - hash = sha256 : df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52 <nl> + Sphinx = = 3 . 2 . 1 \ <nl> + - - hash = sha256 : 321d6d9b16fa381a5306e5a0b76cd48ffbc588e6340059a729c6fdd66087e0e8 \ <nl> + - - hash = sha256 : ce6fd7ff5b215af39e2fcd44d4a321f6694b4530b6f2b2109b64d120773faea0 <nl> + sphinx - copybutton = = 0 . 3 . 0 \ <nl> + - - hash = sha256 : 4becad3a1e7c50211f1477e34fd4b6d027680e1612f497cb5b88cf85bccddaaa \ <nl> + - - hash = sha256 : 4cd06afd0588aa43eba968bfc6105e1ec6546c50a51f880af1d89afaebc6fb58 <nl> + sphinx - rtd - theme = = 0 . 5 . 0 \ <nl> + - - hash = sha256 : 22c795ba2832a169ca301cd0a083f7a434e09c538c70beb42782c073651b707d \ <nl> + - - hash = sha256 : 373413d0f82425aaa28fb288009bf0d0964711d347763af2f1b65cafcb028c82 <nl> + sphinx - tabs = = 1 . 3 . 0 \ <nl> + - - hash = sha256 : 537857f91f1b371f7b45eb8ac83001618b3e3178c78df073d2cc4558a8e66ef5 \ <nl> + - - hash = sha256 : 54132c8a57aa19bba6e17fe26eb94ea9df531708ff3f509b119313b32d0d5aff <nl> + sphinxcontrib - applehelp = = 1 . 0 . 2 \ <nl> + - - hash = sha256 : 806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \ <nl> + - - hash = sha256 : a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 <nl> + sphinxcontrib - devhelp = = 1 . 0 . 2 \ <nl> + - - hash = sha256 : 8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \ <nl> + - - hash = sha256 : ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 <nl> + sphinxcontrib - htmlhelp = = 1 . 0 . 3 \ <nl> + - - hash = sha256 : 3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f \ <nl> + - - hash = sha256 : e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b <nl> + sphinxcontrib - httpdomain = = 1 . 7 . 0 \ <nl> + - - hash = sha256 : 1fb5375007d70bf180cdd1c79e741082be7aa2d37ba99efe561e1c2e3f38191e \ <nl> + - - hash = sha256 : ac40b4fba58c76b073b03931c7b8ead611066a6aebccafb34dc19694f4eb6335 <nl> + sphinxcontrib - jsmath = = 1 . 0 . 1 \ <nl> + - - hash = sha256 : 2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ <nl> + - - hash = sha256 : a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 <nl> + sphinxcontrib - qthelp = = 1 . 0 . 3 \ <nl> + - - hash = sha256 : 4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \ <nl> + - - hash = sha256 : bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 <nl> + sphinxcontrib - serializinghtml = = 1 . 1 . 4 \ <nl> + - - hash = sha256 : eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc \ <nl> + - - hash = sha256 : f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a <nl> + urllib3 = = 1 . 25 . 10 \ <nl> + - - hash = sha256 : 91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a \ <nl> + - - hash = sha256 : e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461 <nl> mmm a / generated_api_shadow / bazel / repository_locations . bzl <nl> ppp b / generated_api_shadow / bazel / repository_locations . bzl <nl> <nl> - BAZEL_SKYLIB_RELEASE = " 1 . 0 . 3 " <nl> - BAZEL_SKYLIB_SHA256 = " 1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c " <nl> - <nl> - OPENCENSUS_PROTO_RELEASE = " 0 . 3 . 0 " <nl> - OPENCENSUS_PROTO_SHA256 = " b7e13f0b4259e80c3070b583c2f39e53153085a6918718b1c710caf7037572b0 " <nl> - <nl> - PGV_GIT_SHA = " 278964a8052f96a2f514add0298098f63fb7f47f " # June 9 , 2020 <nl> - PGV_SHA256 = " e368733c9fb7f8489591ffaf269170d7658cc0cd1ee322b601512b769446d3c8 " <nl> - <nl> - GOOGLEAPIS_GIT_SHA = " 82944da21578a53b74e547774cf62ed31a05b841 " # Dec 2 , 2019 <nl> - GOOGLEAPIS_SHA = " a45019af4d3290f02eaeb1ce10990166978c807cb33a9692141a076ba46d1405 " <nl> - <nl> - PROMETHEUS_GIT_SHA = " 60555c9708c786597e6b07bf846d0dc5c2a46f54 " # Jun 23 , 2020 <nl> - PROMETHEUS_SHA = " 6748b42f6879ad4d045c71019d2512c94be3dd86f60965e9e31e44a3f464323e " <nl> - <nl> - UDPA_RELEASE = " 0 . 0 . 1 " <nl> - UDPA_SHA256 = " 83a7dcc316d741031f34c0409021432b74a39c4811845a177133f02f948fe2d8 " <nl> - <nl> - ZIPKINAPI_RELEASE = " 0 . 2 . 2 " <nl> - ZIPKINAPI_SHA256 = " 688c4fe170821dd589f36ec45aaadc03a618a40283bc1f97da8fa11686fc816b " <nl> - <nl> - RULES_PROTO_GIT_SHA = " 40298556293ae502c66579620a7ce867d5f57311 " # Aug 17 , 2020 <nl> - RULES_PROTO_SHA256 = " aa1ee19226f707d44bee44c720915199c20c84a23318bb0597ed4e5c873ccbd5 " <nl> - <nl> - REPOSITORY_LOCATIONS = dict ( <nl> + DEPENDENCY_REPOSITORIES_SPEC = dict ( <nl> bazel_skylib = dict ( <nl> - sha256 = BAZEL_SKYLIB_SHA256 , <nl> - urls = [ " https : / / github . com / bazelbuild / bazel - skylib / releases / download / " + BAZEL_SKYLIB_RELEASE + " / bazel - skylib - " + BAZEL_SKYLIB_RELEASE + " . tar . gz " ] , <nl> + project_name = " bazel - skylib " , <nl> + project_desc = " Common useful functions and rules for Bazel " , <nl> + project_url = " https : / / github . com / bazelbuild / bazel - skylib " , <nl> + version = " 1 . 0 . 3 " , <nl> + sha256 = " 1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c " , <nl> + urls = [ " https : / / github . com / bazelbuild / bazel - skylib / releases / download / { version } / bazel - skylib - { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 08 - 27 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> com_envoyproxy_protoc_gen_validate = dict ( <nl> - sha256 = PGV_SHA256 , <nl> - strip_prefix = " protoc - gen - validate - " + PGV_GIT_SHA , <nl> - urls = [ " https : / / github . com / envoyproxy / protoc - gen - validate / archive / " + PGV_GIT_SHA + " . tar . gz " ] , <nl> + project_name = " protoc - gen - validate ( PGV ) " , <nl> + project_desc = " protoc plugin to generate polyglot message validators " , <nl> + project_url = " https : / / github . com / envoyproxy / protoc - gen - validate " , <nl> + version = " 278964a8052f96a2f514add0298098f63fb7f47f " , <nl> + sha256 = " e368733c9fb7f8489591ffaf269170d7658cc0cd1ee322b601512b769446d3c8 " , <nl> + strip_prefix = " protoc - gen - validate - { version } " , <nl> + urls = [ " https : / / github . com / envoyproxy / protoc - gen - validate / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 09 " , <nl> + use_category = [ " api " ] , <nl> + ) , <nl> + com_github_cncf_udpa = dict ( <nl> + project_name = " Universal Data Plane API " , <nl> + project_desc = " Universal Data Plane API Working Group ( UDPA - WG ) " , <nl> + project_url = " https : / / github . com / cncf / udpa " , <nl> + version = " 0 . 0 . 1 " , <nl> + sha256 = " 83a7dcc316d741031f34c0409021432b74a39c4811845a177133f02f948fe2d8 " , <nl> + strip_prefix = " udpa - { version } " , <nl> + urls = [ " https : / / github . com / cncf / udpa / archive / v { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 09 - 23 " , <nl> + use_category = [ " api " ] , <nl> + ) , <nl> + com_github_openzipkin_zipkinapi = dict ( <nl> + project_name = " Zipkin API " , <nl> + project_desc = " Zipkin ' s language independent model and HTTP Api Definitions " , <nl> + project_url = " https : / / github . com / openzipkin / zipkin - api " , <nl> + version = " 0 . 2 . 2 " , <nl> + sha256 = " 688c4fe170821dd589f36ec45aaadc03a618a40283bc1f97da8fa11686fc816b " , <nl> + strip_prefix = " zipkin - api - { version } " , <nl> + urls = [ " https : / / github . com / openzipkin / zipkin - api / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 09 - 23 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> com_google_googleapis = dict ( <nl> # TODO ( dio ) : Consider writing a Starlark macro for importing Google API proto . <nl> - sha256 = GOOGLEAPIS_SHA , <nl> - strip_prefix = " googleapis - " + GOOGLEAPIS_GIT_SHA , <nl> - urls = [ " https : / / github . com / googleapis / googleapis / archive / " + GOOGLEAPIS_GIT_SHA + " . tar . gz " ] , <nl> + project_name = " Google APIs " , <nl> + project_desc = " Public interface definitions of Google APIs " , <nl> + project_url = " https : / / github . com / googleapis / googleapis " , <nl> + version = " 82944da21578a53b74e547774cf62ed31a05b841 " , <nl> + sha256 = " a45019af4d3290f02eaeb1ce10990166978c807cb33a9692141a076ba46d1405 " , <nl> + strip_prefix = " googleapis - { version } " , <nl> + urls = [ " https : / / github . com / googleapis / googleapis / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2019 - 12 - 02 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> - com_github_cncf_udpa = dict ( <nl> - sha256 = UDPA_SHA256 , <nl> - strip_prefix = " udpa - " + UDPA_RELEASE , <nl> - urls = [ " https : / / github . com / cncf / udpa / archive / v " + UDPA_RELEASE + " . tar . gz " ] , <nl> + opencensus_proto = dict ( <nl> + project_name = " OpenCensus Proto " , <nl> + project_desc = " Language Independent Interface Types For OpenCensus " , <nl> + project_url = " https : / / github . com / census - instrumentation / opencensus - proto " , <nl> + version = " 0 . 3 . 0 " , <nl> + sha256 = " b7e13f0b4259e80c3070b583c2f39e53153085a6918718b1c710caf7037572b0 " , <nl> + strip_prefix = " opencensus - proto - { version } / src " , <nl> + urls = [ " https : / / github . com / census - instrumentation / opencensus - proto / archive / v { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 20 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> prometheus_metrics_model = dict ( <nl> - sha256 = PROMETHEUS_SHA , <nl> - strip_prefix = " client_model - " + PROMETHEUS_GIT_SHA , <nl> - urls = [ " https : / / github . com / prometheus / client_model / archive / " + PROMETHEUS_GIT_SHA + " . tar . gz " ] , <nl> - ) , <nl> - opencensus_proto = dict ( <nl> - sha256 = OPENCENSUS_PROTO_SHA256 , <nl> - strip_prefix = " opencensus - proto - " + OPENCENSUS_PROTO_RELEASE + " / src " , <nl> - urls = [ " https : / / github . com / census - instrumentation / opencensus - proto / archive / v " + OPENCENSUS_PROTO_RELEASE + " . tar . gz " ] , <nl> + project_name = " Prometheus client model " , <nl> + project_desc = " Data model artifacts for Prometheus " , <nl> + project_url = " https : / / github . com / prometheus / client_model " , <nl> + version = " 60555c9708c786597e6b07bf846d0dc5c2a46f54 " , <nl> + sha256 = " 6748b42f6879ad4d045c71019d2512c94be3dd86f60965e9e31e44a3f464323e " , <nl> + strip_prefix = " client_model - { version } " , <nl> + urls = [ " https : / / github . com / prometheus / client_model / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 06 - 23 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> rules_proto = dict ( <nl> - sha256 = RULES_PROTO_SHA256 , <nl> - strip_prefix = " rules_proto - " + RULES_PROTO_GIT_SHA + " " , <nl> - urls = [ " https : / / github . com / bazelbuild / rules_proto / archive / " + RULES_PROTO_GIT_SHA + " . tar . gz " ] , <nl> - ) , <nl> - com_github_openzipkin_zipkinapi = dict ( <nl> - sha256 = ZIPKINAPI_SHA256 , <nl> - strip_prefix = " zipkin - api - " + ZIPKINAPI_RELEASE , <nl> - urls = [ " https : / / github . com / openzipkin / zipkin - api / archive / " + ZIPKINAPI_RELEASE + " . tar . gz " ] , <nl> + project_name = " Protobuf Rules for Bazel " , <nl> + project_desc = " Protocol buffer rules for Bazel " , <nl> + project_url = " https : / / github . com / bazelbuild / rules_proto " , <nl> + version = " 40298556293ae502c66579620a7ce867d5f57311 " , <nl> + sha256 = " aa1ee19226f707d44bee44c720915199c20c84a23318bb0597ed4e5c873ccbd5 " , <nl> + strip_prefix = " rules_proto - { version } " , <nl> + urls = [ " https : / / github . com / bazelbuild / rules_proto / archive / { version } . tar . gz " ] , <nl> + last_updated = " 2020 - 08 - 17 " , <nl> + use_category = [ " api " ] , <nl> ) , <nl> ) <nl> + <nl> + def _format_version ( s , version ) : <nl> + return s . format ( version = version , dash_version = version . replace ( " . " , " - " ) , underscore_version = version . replace ( " . " , " _ " ) ) <nl> + <nl> + # Interpolate { version } in the above dependency specs . This code should be capable of running in both Python <nl> + # and Starlark . <nl> + def _dependency_repositories ( ) : <nl> + locations = { } <nl> + for key , location in DEPENDENCY_REPOSITORIES_SPEC . items ( ) : <nl> + mutable_location = dict ( location ) <nl> + locations [ key ] = mutable_location <nl> + <nl> + # Fixup with version information . <nl> + if " version " in location : <nl> + if " strip_prefix " in location : <nl> + mutable_location [ " strip_prefix " ] = _format_version ( location [ " strip_prefix " ] , location [ " version " ] ) <nl> + mutable_location [ " urls " ] = [ _format_version ( url , location [ " version " ] ) for url in location [ " urls " ] ] <nl> + return locations <nl> + <nl> + REPOSITORY_LOCATIONS = _dependency_repositories ( ) <nl> mmm a / generated_api_shadow / envoy / config / metrics / v2 / stats . proto <nl> ppp b / generated_api_shadow / envoy / config / metrics / v2 / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / generated_api_shadow / envoy / config / metrics / v3 / stats . proto <nl> ppp b / generated_api_shadow / envoy / config / metrics / v3 / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / generated_api_shadow / envoy / config / metrics / v4alpha / stats . proto <nl> ppp b / generated_api_shadow / envoy / config / metrics / v4alpha / stats . proto <nl> message TagSpecifier { <nl> / / <nl> / / { <nl> / / " tag_name " : " envoy . cluster_name " , <nl> - / / " regex " : " ^ cluster \ . ( ( . + ? ) \ . ) " <nl> + / / " regex " : " ^ cluster \ \ . ( ( . + ? ) \ \ . ) " <nl> / / } <nl> / / <nl> / / Note that the regex will remove ` ` foo_cluster . ` ` making the tag extracted <nl> message TagSpecifier { <nl> / / [ <nl> / / { <nl> / / " tag_name " : " envoy . http_user_agent " , <nl> - / / " regex " : " ^ http ( ? = \ . ) . * ? \ . user_agent \ . ( ( . + ? ) \ . ) \ w + ? $ " <nl> + / / " regex " : " ^ http ( ? = \ \ . ) . * ? \ \ . user_agent \ \ . ( ( . + ? ) \ \ . ) \ \ w + ? $ " <nl> / / } , <nl> / / { <nl> / / " tag_name " : " envoy . http_conn_manager_prefix " , <nl> - / / " regex " : " ^ http \ . ( ( . * ? ) \ . ) " <nl> + / / " regex " : " ^ http \ \ . ( ( . * ? ) \ \ . ) " <nl> / / } <nl> / / ] <nl> / / <nl> mmm a / tools / code_format / requirements . txt <nl> ppp b / tools / code_format / requirements . txt <nl> <nl> - flake8 = = 3 . 8 . 3 <nl> - yapf = = 0 . 30 . 0 <nl> + flake8 = = 3 . 8 . 3 \ <nl> + - - hash = sha256 : 15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c \ <nl> + - - hash = sha256 : f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208 <nl> + importlib - metadata = = 2 . 0 . 0 \ <nl> + - - hash = sha256 : 77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da \ <nl> + - - hash = sha256 : cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3 <nl> + mccabe = = 0 . 6 . 1 \ <nl> + - - hash = sha256 : ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ <nl> + - - hash = sha256 : dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f <nl> + pycodestyle = = 2 . 6 . 0 \ <nl> + - - hash = sha256 : 2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367 \ <nl> + - - hash = sha256 : c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e <nl> + pyflakes = = 2 . 2 . 0 \ <nl> + - - hash = sha256 : 0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \ <nl> + - - hash = sha256 : 35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 <nl> + yapf = = 0 . 30 . 0 \ <nl> + - - hash = sha256 : 3000abee4c28daebad55da6c85f3cd07b8062ce48e2e9943c8da1b9667d48427 \ <nl> + - - hash = sha256 : 3abf61ba67cf603069710d30acbc88cfe565d907e16ad81429ae90ce9651e0c9 <nl> + zipp = = 3 . 2 . 0 \ <nl> + - - hash = sha256 : 43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6 \ <nl> + - - hash = sha256 : b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f <nl> mmm a / tools / deprecate_features / requirements . txt <nl> ppp b / tools / deprecate_features / requirements . txt <nl> @ @ - 1 + 1 , 3 @ @ <nl> - six = = 1 . 15 . 0 <nl> + six = = 1 . 15 . 0 \ <nl> + - - hash = sha256 : 30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ <nl> + - - hash = sha256 : 8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced <nl> mmm a / tools / deprecate_version / requirements . txt <nl> ppp b / tools / deprecate_version / requirements . txt <nl> <nl> - GitPython = = 3 . 1 . 7 <nl> - PyGithub = = 1 . 43 . 8 <nl> + GitPython = = 3 . 1 . 8 \ <nl> + - - hash = sha256 : 080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912 \ <nl> + - - hash = sha256 : 1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910 <nl> + PyGithub = = 1 . 53 \ <nl> + - - hash = sha256 : 776befaddab9d8fddd525d52a6ca1ac228cf62b5b1e271836d766f4925e1452e \ <nl> + - - hash = sha256 : 8ad656bf79958e775ec59f7f5a3dbcbadac12147ae3dc42708b951064096af15 <nl> mmm a / tools / github / requirements . txt <nl> ppp b / tools / github / requirements . txt <nl> @ @ - 1 + 1 , 3 @ @ <nl> - PyGithub = = 1 . 43 . 8 <nl> + PyGithub = = 1 . 53 \ <nl> + - - hash = sha256 : 776befaddab9d8fddd525d52a6ca1ac228cf62b5b1e271836d766f4925e1452e \ <nl> + - - hash = sha256 : 8ad656bf79958e775ec59f7f5a3dbcbadac12147ae3dc42708b951064096af15 <nl> | Dependencies : refactor API dependencies , fix proto JSON block , finish pip install hashes ( ) | envoyproxy/envoy | 4b4dc5cae283f5bd2478b345e0caea8b31cef682 | 2020-10-02T20:19:40Z |
new file mode 100644 <nl> index 0000000000000 . . 61665a591c181 <nl> mmm / dev / null <nl> ppp b / tensorflow / java / src / main / java / org / tensorflow / NativeLibrary . java <nl> <nl> + / * Copyright 2017 The TensorFlow Authors . All Rights Reserved . <nl> + <nl> + Licensed under the Apache License , Version 2 . 0 ( the " License " ) ; <nl> + you may not use this file except in compliance with the License . <nl> + You may obtain a copy of the License at <nl> + <nl> + http : / / www . apache . org / licenses / LICENSE - 2 . 0 <nl> + <nl> + Unless required by applicable law or agreed to in writing , software <nl> + distributed under the License is distributed on an " AS IS " BASIS , <nl> + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . <nl> + See the License for the specific language governing permissions and <nl> + limitations under the License . <nl> + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> + <nl> + package org . tensorflow ; <nl> + <nl> + import java . io . File ; <nl> + import java . io . FileOutputStream ; <nl> + import java . io . IOException ; <nl> + import java . io . InputStream ; <nl> + <nl> + / * * <nl> + * Helper class for loading the TensorFlow Java native library . <nl> + * <nl> + * < p > The Java TensorFlow bindings require a native ( JNI ) library . This library <nl> + * ( libtensorflow_jni . so on Linux , libtensorflow_jni . dylib on OS X , tensorflow_jni . dll on Windows ) <nl> + * can be made available to the JVM using the java . library . path System property ( e . g . , using <nl> + * - Djava . library . path command - line argument ) . However , doing so requires an additional step of <nl> + * configuration . <nl> + * <nl> + * < p > Alternatively , the native libraries can be packaed in a . jar , making them easily usable from <nl> + * build systems like Maven . However , in such cases , the native library has to be extracted from the <nl> + * . jar archive . <nl> + * <nl> + * < p > NativeLibrary . load ( ) takes care of this . First looking for the library in java . library . path <nl> + * and failing that , it tries to find the OS and architecture specific version of the library in the <nl> + * set of ClassLoader resources ( under org / tensorflow / native / OS - ARCH ) . The resources paths used for <nl> + * lookup must be consistent with any packaging ( such as on Maven Central ) of the TensorFlow Java <nl> + * native libraries . <nl> + * / <nl> + final class NativeLibrary { <nl> + private static final boolean DEBUG = <nl> + System . getProperty ( " org . tensorflow . NativeLibrary . DEBUG " ) ! = null ; <nl> + private static final String LIBNAME = " tensorflow_jni " ; <nl> + <nl> + public static void load ( ) { <nl> + if ( isLoaded ( ) | | tryLoadLibrary ( ) ) { <nl> + / / Either : <nl> + / / ( 1 ) The native library has already been statically loaded , OR <nl> + / / ( 2 ) The required native code has been statically linked ( through a custom launcher ) , OR <nl> + / / ( 3 ) The native code is part of another library ( such as an an application - level libraryh ) <nl> + / / that has already been loaded . For example , tensorflow / examples / android and <nl> + / / tensorflow / contrib / android include the required native code in differently named libraries . <nl> + / / <nl> + / / Doesn ' t matter how , but it seems the native code is loaded , so nothing else to do . <nl> + return ; <nl> + } <nl> + / / Native code is not present , perhaps it has been packaged into the . jar file containing this . <nl> + final String resourceName = makeResourceName ( ) ; <nl> + log ( " resourceName : " + resourceName ) ; <nl> + final InputStream resource = <nl> + NativeLibrary . class . getClassLoader ( ) . getResourceAsStream ( resourceName ) ; <nl> + if ( resource = = null ) { <nl> + throw new UnsatisfiedLinkError ( <nl> + String . format ( <nl> + " Cannot find TensorFlow native library for OS : % s , architecture : % s . " <nl> + + " See https : / / github . com / tensorflow / tensorflow / tree / master / java / README . md " <nl> + + " for possible solutions ( such as building the library from source ) . " , <nl> + os ( ) , architecture ( ) ) ) ; <nl> + } <nl> + try { <nl> + System . load ( extractResource ( resource ) ) ; <nl> + } catch ( IOException e ) { <nl> + throw new UnsatisfiedLinkError ( <nl> + String . format ( <nl> + " Unable to extract native library into a temporary file ( % s ) " , e . toString ( ) ) ) ; <nl> + } <nl> + } <nl> + <nl> + private static boolean tryLoadLibrary ( ) { <nl> + try { <nl> + System . loadLibrary ( LIBNAME ) ; <nl> + return true ; <nl> + } catch ( UnsatisfiedLinkError e ) { <nl> + log ( " tryLoadLibraryFailed : " + e . getMessage ( ) ) ; <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> + private static boolean isLoaded ( ) { <nl> + try { <nl> + TensorFlow . version ( ) ; <nl> + log ( " isLoaded : true " ) ; <nl> + return true ; <nl> + } catch ( UnsatisfiedLinkError e ) { <nl> + return false ; <nl> + } <nl> + } <nl> + <nl> + private static String extractResource ( InputStream resource ) throws IOException { <nl> + final String sampleFilename = System . mapLibraryName ( LIBNAME ) ; <nl> + final int dot = sampleFilename . indexOf ( " . " ) ; <nl> + final String prefix = ( dot < 0 ) ? sampleFilename : sampleFilename . substring ( 0 , dot ) ; <nl> + final String suffix = ( dot < 0 ) ? null : sampleFilename . substring ( dot ) ; <nl> + <nl> + final File dst = File . createTempFile ( prefix , suffix ) ; <nl> + final String dstPath = dst . getAbsolutePath ( ) ; <nl> + dst . deleteOnExit ( ) ; <nl> + log ( " extracting native library to : " + dstPath ) ; <nl> + final long nbytes = copy ( resource , dst ) ; <nl> + log ( String . format ( " copied % d bytes to % s " , nbytes , dstPath ) ) ; <nl> + return dstPath ; <nl> + } <nl> + <nl> + private static String os ( ) { <nl> + final String p = System . getProperty ( " os . name " ) . toLowerCase ( ) ; <nl> + if ( p . contains ( " linux " ) ) { <nl> + return " linux " ; <nl> + } else if ( p . contains ( " os x " ) | | p . contains ( " darwin " ) ) { <nl> + return " darwin " ; <nl> + } else if ( p . contains ( " windows " ) ) { <nl> + return " windows " ; <nl> + } else { <nl> + return p . replaceAll ( " \ \ s " , " " ) ; <nl> + } <nl> + } <nl> + <nl> + private static String architecture ( ) { <nl> + final String arch = System . getProperty ( " os . arch " ) . toLowerCase ( ) ; <nl> + return ( arch . equals ( " amd64 " ) ) ? " x86_64 " : arch ; <nl> + } <nl> + <nl> + private static void log ( String msg ) { <nl> + if ( DEBUG ) { <nl> + System . err . println ( " org . tensorflow . NativeLibrary : " + msg ) ; <nl> + } <nl> + } <nl> + <nl> + private static String makeResourceName ( ) { <nl> + / / Using string addition instead of more modern functions like <nl> + / / java . nio . file . Paths . get ( ) to make this method usable on Android , <nl> + / / where as of March 2017 , the java . nio . files package was not available . <nl> + final String separator = System . getProperty ( " file . separator " ) ; <nl> + return " org " <nl> + + separator <nl> + + " tensorflow " <nl> + + separator <nl> + + " native " <nl> + + separator <nl> + + String . format ( " % s - % s " , os ( ) , architecture ( ) ) <nl> + + separator <nl> + + System . mapLibraryName ( LIBNAME ) ; <nl> + } <nl> + <nl> + private static long copy ( InputStream src , File dstFile ) throws IOException { <nl> + FileOutputStream dst = new FileOutputStream ( dstFile ) ; <nl> + try { <nl> + byte [ ] buffer = new byte [ 1 < < 20 ] ; / / 1MB <nl> + long ret = 0 ; <nl> + int n = 0 ; <nl> + while ( ( n = src . read ( buffer ) ) > = 0 ) { <nl> + dst . write ( buffer , 0 , n ) ; <nl> + ret + = n ; <nl> + } <nl> + return ret ; <nl> + } finally { <nl> + dst . close ( ) ; <nl> + src . close ( ) ; <nl> + } <nl> + } <nl> + } <nl> mmm a / tensorflow / java / src / main / java / org / tensorflow / TensorFlow . java <nl> ppp b / tensorflow / java / src / main / java / org / tensorflow / TensorFlow . java <nl> private TensorFlow ( ) { } <nl> <nl> / * * Load the TensorFlow runtime C library . * / <nl> static void init ( ) { <nl> - try { <nl> - System . loadLibrary ( " tensorflow_jni " ) ; <nl> - } catch ( UnsatisfiedLinkError e ) { <nl> - / / The native code might have been statically linked ( through a custom launcher ) or be part of <nl> - / / an application - level library . For example , tensorflow / examples / android and <nl> - / / tensorflow / contrib / android include the required native code in differently named libraries . <nl> - / / To allow for such cases , the UnsatisfiedLinkError does not bubble up here . <nl> - try { <nl> - version ( ) ; <nl> - } catch ( UnsatisfiedLinkError e2 ) { <nl> - System . err . println ( <nl> - " TensorFlow Java API methods will throw an UnsatisfiedLinkError unless native code shared libraries are loaded " ) ; <nl> - } <nl> - } <nl> + NativeLibrary . load ( ) ; <nl> } <nl> <nl> static { <nl> init ( ) ; <nl> } <nl> + <nl> } <nl> | Java : Load native library from class loader resources if possible . | tensorflow/tensorflow | 7303b631e0c2715f52d2d44127b7dc78662d207e | 2017-03-21T00:30:38Z |
new file mode 100644 <nl> index 000000000000 . . 5d9f2009d974 <nl> mmm / dev / null <nl> ppp b / buildscripts / distmirror . py <nl> <nl> + # ! / usr / bin / python <nl> + <nl> + # Download mongodb stuff ( at present builds , sources , docs , but not <nl> + # drivers ) . <nl> + <nl> + # Usage : < progname > [ directory ] # directory defaults to cwd . <nl> + <nl> + # FIXME : this script is fairly sloppy . <nl> + import sys <nl> + import os <nl> + import urllib2 <nl> + import time <nl> + import hashlib <nl> + import warnings <nl> + <nl> + def report ( url , filename ) : <nl> + print " downloading % s to % s " % ( url , filename ) <nl> + <nl> + def checkmd5 ( md5str , filename ) : <nl> + m = hashlib . md5 ( ) <nl> + m . update ( open ( filename , ' rb ' ) . read ( ) ) <nl> + d = m . hexdigest ( ) <nl> + if d ! = md5str : <nl> + warnings . warn ( " md5sum mismatch for file % s : wanted % s ; got % s " % ( filename , md5str , d ) ) <nl> + <nl> + osarches = ( ( " osx " , ( " i386 " , " i386 - tiger " , " x86_64 " ) , ( " tgz " , ) ) , <nl> + ( " linux " , ( " i686 " , " x86_64 " ) , ( " tgz " , ) ) , <nl> + ( " win32 " , ( " i386 " , " x86_64 " ) , ( " zip " , ) ) , <nl> + ( " sunos5 " , ( " i86pc " , " x86_64 " ) , ( " tgz " , ) ) , <nl> + ( " src " , ( " src " , ) , ( " tar . gz " , " zip " ) ) , ) <nl> + <nl> + # KLUDGE : this will need constant editing . <nl> + versions = ( " 1 . 4 . 2 " , " 1 . 5 . 1 " , " latest " ) <nl> + <nl> + url_format = " http : / / downloads . mongodb . org / % s / mongodb - % s - % s . % s " <nl> + filename_format = " mongodb - % s - % s . % s " <nl> + <nl> + def do_it ( ) : <nl> + for version in versions : <nl> + for ( os , architectures , archives ) in osarches : <nl> + for architecture in architectures : <nl> + for archive in archives : <nl> + osarch = os + ' - ' + architecture if architecture ! = ' src ' else ' src ' <nl> + # ugh . <nl> + if architecture = = ' src ' and version = = ' latest ' : <nl> + if archive = = ' tar . gz ' : <nl> + archive2 = ' tarball ' <nl> + elif archive = = ' zip ' : <nl> + archive2 = = ' zipball ' <nl> + url = " http : / / github . com / mongodb / mongo / " + archive2 + " / master " <nl> + version2 = " master " <nl> + else : <nl> + version2 = version if architecture ! = ' src ' else ' r ' + version <nl> + url = url_format % ( os , osarch , version2 , archive ) <nl> + # ugh ugh <nl> + md5url = url + ' . md5 ' if architecture ! = ' src ' else None <nl> + filename = filename_format % ( osarch , version2 , archive ) <nl> + report ( url , filename ) <nl> + open ( filename , ' w ' ) . write ( urllib2 . urlopen ( url ) . read ( ) ) <nl> + if md5url : <nl> + print " fetching md5 url " + md5url <nl> + md5str = urllib2 . urlopen ( md5url ) . read ( ) <nl> + checkmd5 ( md5str , filename ) <nl> + <nl> + # FIXME : in principle , the doc PDFs could be out of date . <nl> + docs_url = time . strftime ( " http : / / downloads . mongodb . org / docs / mongodb - docs - % Y - % m - % d . pdf " ) <nl> + docs_filename = time . strftime ( " mongodb - docs - % Y - % m - % d . pdf " ) <nl> + report ( docs_url , docs_filename ) <nl> + open ( docs_filename , ' w ' ) . write ( urllib2 . urlopen ( docs_url ) . read ( ) ) <nl> + <nl> + # Drivers . . . FIXME : drivers . <nl> + # langs = ( " c " , " java " , " python " , " php " , " perl " ) <nl> + <nl> + if len ( sys . argv ) > 1 : <nl> + dir = sys . argv [ 1 ] <nl> + os . makedirs ( dir ) <nl> + os . chdir ( dir ) <nl> + <nl> + print " " " NOTE : the md5sums for all the - latest tarballs are out of <nl> + date . You will probably see warnings as this script runs . ( If you <nl> + don ' t , feel free to delete this note . ) " " " <nl> + do_it ( ) <nl> | distmirror . py : new script for making a mirror of mongodb distributions . | mongodb/mongo | 9691e2c743e135ae05af8a5e152c1d8736cbd74b | 2010-05-19T16:48:40Z |
mmm a / cmake / OpenCVDetectCUDA . cmake <nl> ppp b / cmake / OpenCVDetectCUDA . cmake <nl> if ( WIN32 AND NOT MSVC ) <nl> return ( ) <nl> endif ( ) <nl> <nl> - if ( NOT APPLE AND CV_CLANG ) <nl> + if ( NOT UNIX AND CV_CLANG ) <nl> message ( STATUS " CUDA compilation is disabled ( due to Clang unsupported on your platform ) . " ) <nl> return ( ) <nl> endif ( ) <nl> if ( CUDA_FOUND ) <nl> foreach ( var CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_DEBUG ) <nl> set ( $ { var } _backup_in_cuda_compile_ " $ { $ { var } } " ) <nl> <nl> + if ( CV_CLANG ) <nl> + # we remove - Winconsistent - missing - override and - Qunused - arguments <nl> + # just in case we are compiling CUDA with gcc but OpenCV with clang <nl> + string ( REPLACE " - Winconsistent - missing - override " " " $ { var } " $ { $ { var } } " ) <nl> + string ( REPLACE " - Qunused - arguments " " " $ { var } " $ { $ { var } } " ) <nl> + endif ( ) <nl> + <nl> # we remove / EHa as it generates warnings under windows <nl> string ( REPLACE " / EHa " " " $ { var } " $ { $ { var } } " ) <nl> <nl> | Merge pull request from xsacha : master | opencv/opencv | 183cfd3a2e4a0e41c9c6314e9a90f7e389f18f24 | 2018-08-04T13:14:22Z |
mmm a / aten / src / ATen / native / SummaryOps . cpp <nl> ppp b / aten / src / ATen / native / SummaryOps . cpp <nl> Tensor _bincount_cpu_template ( <nl> } <nl> <nl> Tensor output ; <nl> + int64_t self_size = self . size ( 0 ) ; <nl> int64_t nbins = static_cast < int64_t > ( * self . max ( ) . data_ptr < input_t > ( ) ) + 1L ; <nl> nbins = std : : max ( nbins , minlength ) ; / / at least minlength # of bins <nl> <nl> Tensor _bincount_cpu_template ( <nl> output = native : : zeros ( { nbins } , weights . options ( ) ) ; <nl> weights_t * output_p = output . data_ptr < weights_t > ( ) ; <nl> const weights_t * weights_p = weights . data_ptr < weights_t > ( ) ; <nl> - for ( int64_t i = 0 ; i < self . size ( 0 ) ; i + + ) { <nl> + for ( int64_t i = 0 ; i < self_size ; i + + ) { <nl> output_p [ self_p [ i ] ] + = weights_p [ i ] ; <nl> } <nl> } else { <nl> output = native : : zeros ( { nbins } , kLong ) ; <nl> int64_t * output_p = output . data_ptr < int64_t > ( ) ; <nl> - for ( int64_t i = 0 ; i < self . size ( 0 ) ; i + + ) { <nl> + for ( int64_t i = 0 ; i < self_size ; i + + ) { <nl> output_p [ self_p [ i ] ] + = 1L ; <nl> } <nl> } <nl> | Optimized bincount for the CPU by removing extra size ( ) calls ( ) | pytorch/pytorch | ae71c5c7e6922c54189399dc1e5554cef7e0011d | 2020-04-08T18:09:14Z |
mmm a / lib / IRGen / IRGenDebugInfo . cpp <nl> ppp b / lib / IRGen / IRGenDebugInfo . cpp <nl> llvm : : DIFile * IRGenDebugInfo : : getFile ( llvm : : DIScope * Scope ) { <nl> return cast < llvm : : DIFile > ( Scope ) ; <nl> } <nl> <nl> - / / / Return the storage size of an explosion value . <nl> - static uint64_t getSizeFromExplosionValue ( const clang : : TargetInfo & TI , <nl> - llvm : : Value * V ) { <nl> - llvm : : Type * Ty = V - > getType ( ) ; <nl> - if ( unsigned PrimitiveSize = Ty - > getPrimitiveSizeInBits ( ) ) <nl> - return PrimitiveSize ; <nl> - else if ( Ty - > isPointerTy ( ) ) <nl> - return TI . getPointerWidth ( 0 ) ; <nl> - else <nl> - llvm_unreachable ( " unhandled type of explosion value " ) ; <nl> - } <nl> - <nl> - / / / A generator that recursively returns the size of each element of a <nl> - / / / composite type . <nl> - class ElementSizes { <nl> - const TrackingDIRefMap & DIRefMap ; <nl> - llvm : : SmallPtrSetImpl < const llvm : : DIType * > & IndirectEnums ; <nl> - llvm : : SmallVector < const llvm : : DIType * , 12 > Stack ; <nl> - public : <nl> - ElementSizes ( const llvm : : DIType * DITy , const TrackingDIRefMap & DIRefMap , <nl> - llvm : : SmallPtrSetImpl < const llvm : : DIType * > & IndirectEnums ) <nl> - : DIRefMap ( DIRefMap ) , IndirectEnums ( IndirectEnums ) , Stack ( 1 , DITy ) { } <nl> - <nl> - struct SizeAlign { <nl> - uint64_t SizeInBits , AlignInBits ; <nl> - } ; <nl> - <nl> - struct SizeAlign getNext ( ) { <nl> - if ( Stack . empty ( ) ) <nl> - return { 0 , 0 } ; <nl> - <nl> - auto * Cur = Stack . pop_back_val ( ) ; <nl> - if ( isa < llvm : : DICompositeType > ( Cur ) & & <nl> - Cur - > getTag ( ) ! = llvm : : dwarf : : DW_TAG_subroutine_type ) { <nl> - auto * CTy = cast < llvm : : DICompositeType > ( Cur ) ; <nl> - auto Elts = CTy - > getElements ( ) ; <nl> - unsigned N = Cur - > getTag ( ) = = llvm : : dwarf : : DW_TAG_union_type <nl> - ? std : : min ( 1U , Elts . size ( ) ) / / For unions , pick any one . <nl> - : Elts . size ( ) ; <nl> - <nl> - if ( N ) { <nl> - / / Push all elements in reverse order . <nl> - / / FIXME : With a little more state we don ' t need to actually <nl> - / / store them on the Stack . <nl> - for ( unsigned I = N ; I > 0 ; - - I ) <nl> - Stack . push_back ( cast < llvm : : DIType > ( Elts [ I - 1 ] ) ) ; <nl> - return getNext ( ) ; <nl> - } <nl> - } <nl> - switch ( Cur - > getTag ( ) ) { <nl> - case llvm : : dwarf : : DW_TAG_member : <nl> - / / FIXME : Correctly handle the explosion value for enum types <nl> - / / with indirect members . <nl> - if ( IndirectEnums . count ( Cur ) ) <nl> - return { 0 , 0 } ; <nl> - [ [ clang : : fallthrough ] ] ; <nl> - case llvm : : dwarf : : DW_TAG_typedef : { <nl> - / / Replace top of stack . <nl> - auto * DTy = cast < llvm : : DIDerivedType > ( Cur ) ; <nl> - Stack . push_back ( DTy - > getBaseType ( ) . resolve ( DIRefMap ) ) ; <nl> - return getNext ( ) ; <nl> - } <nl> - default : <nl> - return { Cur - > getSizeInBits ( ) , Cur - > getAlignInBits ( ) } ; <nl> - } <nl> - } <nl> - } ; <nl> - <nl> static Size <nl> getStorageSize ( const llvm : : DataLayout & DL , ArrayRef < llvm : : Value * > Storage ) { <nl> unsigned size = 0 ; <nl> void IRGenDebugInfo : : emitVariableDeclaration ( <nl> Optimized , Flags ) ; <nl> <nl> / / Insert a debug intrinsic into the current block . <nl> - unsigned OffsetInBits = 0 ; <nl> auto * BB = Builder . GetInsertBlock ( ) ; <nl> bool IsPiece = Storage . size ( ) > 1 ; <nl> uint64_t SizeOfByte = CI . getTargetInfo ( ) . getCharWidth ( ) ; <nl> unsigned VarSizeInBits = getSizeInBits ( Var , DIRefMap ) ; <nl> - ElementSizes EltSizes ( DITy , DIRefMap , IndirectEnumCases ) ; <nl> - auto Dim = EltSizes . getNext ( ) ; <nl> + <nl> + / / Running variables for the current / previous piece . <nl> + unsigned SizeInBits = 0 ; <nl> + unsigned AlignInBits = SizeOfByte ; <nl> + unsigned OffsetInBits = 0 ; <nl> + <nl> for ( llvm : : Value * Piece : Storage ) { <nl> SmallVector < uint64_t , 3 > Operands ; <nl> if ( Indirection ) <nl> void IRGenDebugInfo : : emitVariableDeclaration ( <nl> Piece = llvm : : ConstantInt : : get ( llvm : : Type : : getInt64Ty ( M . getContext ( ) ) , 0 ) ; <nl> <nl> if ( IsPiece ) { <nl> - / / Try to get the size from the type if possible . <nl> - auto StorageSize = getSizeFromExplosionValue ( CI . getTargetInfo ( ) , Piece ) ; <nl> - / / FIXME : The TypeInfo for bound generic enum types reports a <nl> - / / type < { } > ( with size 0 ) but a concrete instance may still <nl> - / / have storage allocated for it . rdar : / / problem / 21470869 <nl> - if ( ! Dim . SizeInBits | | ( StorageSize & & Dim . SizeInBits > StorageSize ) ) <nl> - Dim . SizeInBits = StorageSize ; <nl> - <nl> - / / FIXME : Occasionally we miss out that the Storage is actually a <nl> - / / refcount wrapper . Silently skip these for now . <nl> - if ( OffsetInBits + Dim . SizeInBits > VarSizeInBits ) <nl> - break ; <nl> - if ( OffsetInBits = = 0 & & Dim . SizeInBits = = VarSizeInBits ) <nl> - break ; <nl> - if ( Dim . SizeInBits = = 0 ) <nl> - break ; <nl> - <nl> - assert ( Dim . SizeInBits < VarSizeInBits <nl> - & & " piece covers entire var " ) ; <nl> - assert ( OffsetInBits + Dim . SizeInBits < = VarSizeInBits & & " pars > totum " ) ; <nl> + / / Advance the offset and align it for the next piece . <nl> + OffsetInBits + = llvm : : alignTo ( SizeInBits , AlignInBits ) ; <nl> + SizeInBits = IGM . DataLayout . getTypeSizeInBits ( Piece - > getType ( ) ) ; <nl> + AlignInBits = IGM . DataLayout . getABITypeAlignment ( Piece - > getType ( ) ) ; <nl> + if ( ! AlignInBits ) <nl> + AlignInBits = SizeOfByte ; <nl> + <nl> + / / Sanity checks . <nl> + assert ( SizeInBits & & " zero - sized piece " ) ; <nl> + assert ( SizeInBits < VarSizeInBits & & " piece covers entire var " ) ; <nl> + assert ( OffsetInBits + SizeInBits < = VarSizeInBits & & " pars > totum " ) ; <nl> + <nl> + / / Add the piece DWARF expression . <nl> Operands . push_back ( llvm : : dwarf : : DW_OP_bit_piece ) ; <nl> Operands . push_back ( OffsetInBits ) ; <nl> - Operands . push_back ( Dim . SizeInBits ) ; <nl> - <nl> - auto Size = Dim . SizeInBits ; <nl> - Dim = EltSizes . getNext ( ) ; <nl> - OffsetInBits + = <nl> - llvm : : alignTo ( Size , Dim . AlignInBits ? Dim . AlignInBits <nl> - : SizeOfByte ) ; <nl> + Operands . push_back ( SizeInBits ) ; <nl> } <nl> emitDbgIntrinsic ( BB , Piece , Var , DBuilder . createExpression ( Operands ) , Line , <nl> Loc . Column , Scope , DS ) ; <nl> llvm : : DIType * IRGenDebugInfo : : createPointerSizedStruct ( <nl> unsigned PtrAlign = CI . getTargetInfo ( ) . getPointerAlign ( 0 ) ; <nl> auto PtrTy = DBuilder . createPointerType ( PointeeTy , PtrSize , PtrAlign ) ; <nl> llvm : : Metadata * Elements [ ] = { <nl> - DBuilder . createMemberType ( Scope , " pointer " , File , 0 , <nl> + DBuilder . createMemberType ( Scope , " ptr " , File , 0 , <nl> PtrSize , PtrAlign , 0 , Flags , PtrTy ) <nl> } ; <nl> return DBuilder . createStructType ( <nl> Scope , Name , File , Line , PtrSize , PtrAlign , Flags , <nl> - nullptr , / / DerivedFrom <nl> - DBuilder . getOrCreateArray ( Elements ) , llvm : : dwarf : : DW_LANG_Swift , <nl> - nullptr , MangledName ) ; <nl> + / * DerivedFrom * / nullptr , DBuilder . getOrCreateArray ( Elements ) , <nl> + llvm : : dwarf : : DW_LANG_Swift , nullptr , MangledName ) ; <nl> + } <nl> + <nl> + / / / Create a 2 * pointer - sized struct with a mangled name and a single <nl> + / / / member of PointeeTy . <nl> + llvm : : DIType * IRGenDebugInfo : : createDoublePointerSizedStruct ( <nl> + llvm : : DIScope * Scope , StringRef Name , llvm : : DIType * PointeeTy , <nl> + llvm : : DIFile * File , unsigned Line , unsigned Flags , StringRef MangledName ) { <nl> + unsigned PtrSize = CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ; <nl> + unsigned PtrAlign = CI . getTargetInfo ( ) . getPointerAlign ( 0 ) ; <nl> + llvm : : Metadata * Elements [ ] = { <nl> + DBuilder . createMemberType ( <nl> + Scope , " ptr " , File , 0 , PtrSize , PtrAlign , 0 , Flags , <nl> + DBuilder . createPointerType ( PointeeTy , PtrSize , PtrAlign ) ) , <nl> + DBuilder . createMemberType ( <nl> + Scope , " _ " , File , 0 , PtrSize , PtrAlign , 0 , Flags , <nl> + DBuilder . createPointerType ( nullptr , PtrSize , PtrAlign ) ) } ; <nl> + return DBuilder . createStructType ( <nl> + Scope , Name , File , Line , 2 * PtrSize , PtrAlign , Flags , <nl> + / * DerivedFrom * / nullptr , DBuilder . getOrCreateArray ( Elements ) , <nl> + llvm : : dwarf : : DW_LANG_Swift , nullptr , MangledName ) ; <nl> + } <nl> + <nl> + / / / Create an opaque struct with a mangled name . <nl> + llvm : : DIType * <nl> + IRGenDebugInfo : : createOpaqueStruct ( llvm : : DIScope * Scope , StringRef Name , <nl> + llvm : : DIFile * File , unsigned Line , <nl> + unsigned SizeInBits , unsigned AlignInBits , <nl> + unsigned Flags , StringRef MangledName ) { <nl> + return DBuilder . createStructType ( <nl> + Scope , Name , File , Line , SizeInBits , AlignInBits , Flags , <nl> + / * DerivedFrom * / nullptr , <nl> + DBuilder . getOrCreateArray ( ArrayRef < llvm : : Metadata * > ( ) ) , <nl> + llvm : : dwarf : : DW_LANG_Swift , nullptr , MangledName ) ; <nl> } <nl> <nl> / / / Construct a DIType from a DebugTypeInfo object . <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> } <nl> Scope = getOrCreateModule ( ModuleName , TheCU , ModuleName , ModulePath ) ; <nl> } <nl> + assert ( SizeInBits = = CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ) ; <nl> return createPointerSizedStruct ( Scope , Decl - > getNameStr ( ) , <nl> getOrCreateFile ( L . Filename ) , L . Line , Flags , <nl> MangledName ) ; <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> / / FIXME : ( LLVM branch ) This should probably be a DW_TAG_interface_type . <nl> auto L = getDebugLoc ( SM , Decl ) ; <nl> auto File = getOrCreateFile ( L . Filename ) ; <nl> - return createPointerSizedStruct ( Scope , <nl> - Decl ? Decl - > getNameStr ( ) : MangledName , <nl> - File , L . Line , Flags , MangledName ) ; <nl> + return createOpaqueStruct ( Scope , Decl ? Decl - > getNameStr ( ) : MangledName , <nl> + File , L . Line , SizeInBits , AlignInBits , Flags , <nl> + MangledName ) ; <nl> } <nl> <nl> case TypeKind : : ProtocolComposition : { <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> <nl> / / FIXME : emit types <nl> / / auto ProtocolCompositionTy = BaseTy - > castTo < ProtocolCompositionType > ( ) ; <nl> - return createPointerSizedStruct ( Scope , <nl> - Decl ? Decl - > getNameStr ( ) : MangledName , <nl> - File , L . Line , Flags , MangledName ) ; <nl> + return createOpaqueStruct ( Scope , Decl ? Decl - > getNameStr ( ) : MangledName , <nl> + File , L . Line , SizeInBits , AlignInBits , Flags , <nl> + MangledName ) ; <nl> } <nl> <nl> case TypeKind : : UnboundGeneric : { <nl> auto * UnboundTy = BaseTy - > castTo < UnboundGenericType > ( ) ; <nl> auto * Decl = UnboundTy - > getDecl ( ) ; <nl> auto L = getDebugLoc ( SM , Decl ) ; <nl> + assert ( SizeInBits = = CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ) ; <nl> return createPointerSizedStruct ( Scope , <nl> Decl ? Decl - > getNameStr ( ) : MangledName , <nl> File , L . Line , Flags , MangledName ) ; <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> auto * StructTy = BaseTy - > castTo < BoundGenericStructType > ( ) ; <nl> auto * Decl = StructTy - > getDecl ( ) ; <nl> auto L = getDebugLoc ( SM , Decl ) ; <nl> - return createPointerSizedStruct ( Scope , <nl> - Decl ? Decl - > getNameStr ( ) : MangledName , <nl> - File , L . Line , Flags , MangledName ) ; <nl> + return createOpaqueStruct ( Scope , Decl ? Decl - > getNameStr ( ) : MangledName , <nl> + File , L . Line , SizeInBits , AlignInBits , Flags , <nl> + MangledName ) ; <nl> } <nl> <nl> case TypeKind : : BoundGenericClass : { <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> auto L = getDebugLoc ( SM , Decl ) ; <nl> / / TODO : We may want to peek at Decl - > isObjC ( ) and set this <nl> / / attribute accordingly . <nl> + assert ( SizeInBits = = CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ) ; <nl> return createPointerSizedStruct ( Scope , <nl> Decl ? Decl - > getNameStr ( ) : MangledName , <nl> File , L . Line , Flags , MangledName ) ; <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> case TypeKind : : Function : <nl> case TypeKind : : PolymorphicFunction : <nl> case TypeKind : : GenericFunction : { <nl> - auto FwdDecl = llvm : : TempDINode ( <nl> - DBuilder . createReplaceableCompositeType ( <nl> + auto FwdDecl = llvm : : TempDINode ( DBuilder . createReplaceableCompositeType ( <nl> llvm : : dwarf : : DW_TAG_subroutine_type , MangledName , Scope , File , 0 , <nl> - llvm : : dwarf : : DW_LANG_Swift , SizeInBits , AlignInBits , Flags , <nl> - MangledName ) ) ; <nl> - <nl> + llvm : : dwarf : : DW_LANG_Swift , SizeInBits , AlignInBits , Flags , <nl> + MangledName ) ) ; <nl> + <nl> auto TH = llvm : : TrackingMDNodeRef ( FwdDecl . get ( ) ) ; <nl> DITypeCache [ DbgTy . getType ( ) ] = TH ; <nl> <nl> - CanSILFunctionType FunctionTy ; <nl> + CanSILFunctionType FunTy ; <nl> if ( auto * SILFnTy = dyn_cast < SILFunctionType > ( BaseTy ) ) <nl> - FunctionTy = CanSILFunctionType ( SILFnTy ) ; <nl> + FunTy = CanSILFunctionType ( SILFnTy ) ; <nl> / / FIXME : Handling of generic parameters in SIL type lowering is in flux . <nl> / / DebugInfo doesn ' t appear to care about the generic context , so just <nl> / / throw it away before lowering . <nl> llvm : : DIType * IRGenDebugInfo : : createType ( DebugTypeInfo DbgTy , <nl> isa < PolymorphicFunctionType > ( BaseTy ) ) { <nl> auto * fTy = cast < AnyFunctionType > ( BaseTy ) ; <nl> auto * nongenericTy = FunctionType : : get ( fTy - > getInput ( ) , fTy - > getResult ( ) , <nl> - fTy - > getExtInfo ( ) ) ; <nl> + fTy - > getExtInfo ( ) ) ; <nl> <nl> - FunctionTy = IGM . SILMod - > Types . getLoweredType ( nongenericTy ) <nl> - . castTo < SILFunctionType > ( ) ; <nl> + FunTy = IGM . SILMod - > Types . getLoweredType ( nongenericTy ) <nl> + . castTo < SILFunctionType > ( ) ; <nl> } else <nl> - FunctionTy = <nl> + FunTy = <nl> IGM . SILMod - > Types . getLoweredType ( BaseTy ) . castTo < SILFunctionType > ( ) ; <nl> - auto Params = createParameterTypes ( FunctionTy , DbgTy . getDeclContext ( ) ) ; <nl> + auto Params = createParameterTypes ( FunTy , DbgTy . getDeclContext ( ) ) ; <nl> <nl> - / / Functions are actually stored as a Pointer or a FunctionPairTy : <nl> - / / { i8 * , % swift . refcounted * } <nl> auto FnTy = DBuilder . createSubroutineType ( Params , Flags ) ; <nl> - auto DITy = createPointerSizedStruct ( Scope , MangledName , FnTy , <nl> - MainFile , 0 , Flags , MangledName ) ; <nl> + llvm : : DIType * DITy ; <nl> + if ( FunTy - > getRepresentation ( ) = = SILFunctionType : : Representation : : Thick ) { <nl> + if ( SizeInBits = = 2 * CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ) <nl> + / / This is a FunctionPairTy : { i8 * , % swift . refcounted * } . <nl> + DITy = createDoublePointerSizedStruct ( Scope , MangledName , FnTy , <nl> + MainFile , 0 , Flags , MangledName ) ; <nl> + else <nl> + / / This is a generic function as noted above . <nl> + DITy = createOpaqueStruct ( Scope , MangledName , MainFile , 0 , SizeInBits , <nl> + AlignInBits , Flags , MangledName ) ; <nl> + } else { <nl> + assert ( SizeInBits = = CI . getTargetInfo ( ) . getPointerWidth ( 0 ) ) ; <nl> + DITy = createPointerSizedStruct ( Scope , MangledName , FnTy , MainFile , 0 , <nl> + Flags , MangledName ) ; <nl> + } <nl> DBuilder . replaceTemporary ( std : : move ( FwdDecl ) , DITy ) ; <nl> return DITy ; <nl> } <nl> mmm a / lib / IRGen / IRGenDebugInfo . h <nl> ppp b / lib / IRGen / IRGenDebugInfo . h <nl> class IRGenDebugInfo { <nl> llvm : : DIType * PointeeTy , <nl> llvm : : DIFile * File , unsigned Line , <nl> unsigned Flags , StringRef MangledName ) ; <nl> + llvm : : DIType * createDoublePointerSizedStruct ( <nl> + llvm : : DIScope * Scope , StringRef Name , llvm : : DIType * PointeeTy , <nl> + llvm : : DIFile * File , unsigned Line , unsigned Flags , StringRef MangledName ) ; <nl> + llvm : : DIType * createOpaqueStruct ( llvm : : DIScope * Scope , StringRef Name , <nl> + llvm : : DIFile * File , unsigned Line , <nl> + unsigned SizeInBits , unsigned AlignInBits , <nl> + unsigned Flags , StringRef MangledName ) ; <nl> uint64_t getSizeOfBasicType ( DebugTypeInfo DbgTy ) ; <nl> TypeAliasDecl * getMetadataType ( ) ; <nl> } ; <nl> mmm a / test / DebugInfo / fnptr . swift <nl> ppp b / test / DebugInfo / fnptr . swift <nl> func main ( ) - > Int64 { <nl> / / CHECK - DAG : ! DILocalVariable ( name : " bar_function_pointer " , { { . * } } line : [ [ @ LINE + 1 ] ] , { { . * } } type : ! " [ [ BARPT : [ ^ , ] + ] ] " <nl> var bar_function_pointer = bar <nl> / / CHECK - DAG : ! DICompositeType ( tag : DW_TAG_structure_type , name : " [ [ BARPT ] ] " , { { . * } } elements : ! [ [ BARMEMBERS : [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : ! [ [ BARMEMBERS ] ] = ! { ! [ [ BARMEMBER : . * ] ] } <nl> + / / CHECK - DAG : ! [ [ BARMEMBERS ] ] = ! { ! [ [ BARMEMBER : . * ] ] , { { . * } } } <nl> / / CHECK - DAG : ! [ [ BARMEMBER ] ] = ! DIDerivedType ( tag : DW_TAG_member , { { . * } } baseType : ! [ [ BARPTR : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BARPTR ] ] = ! DIDerivedType ( tag : DW_TAG_pointer_type , { { . * } } baseType : ! [ [ BART : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BART ] ] = ! DISubroutineType ( types : ! [ [ BARARGS : [ 0 - 9 ] + ] ] ) <nl> func main ( ) - > Int64 { <nl> <nl> / / CHECK - DAG : ! DILocalVariable ( name : " baz_function_pointer " , { { . * } } type : ! " [ [ BAZPT : [ ^ , ] + ] ] " <nl> / / CHECK - DAG : ! DICompositeType ( tag : DW_TAG_structure_type , name : " [ [ BAZPT ] ] " , { { . * } } elements : ! [ [ BAZMEMBERS : [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : ! [ [ BAZMEMBERS ] ] = ! { ! [ [ BAZMEMBER : . * ] ] } <nl> + / / CHECK - DAG : ! [ [ BAZMEMBERS ] ] = ! { ! [ [ BAZMEMBER : . * ] ] , { { . * } } } <nl> / / CHECK - DAG : ! [ [ BAZMEMBER ] ] = ! DIDerivedType ( tag : DW_TAG_member , { { . * } } baseType : ! [ [ BAZPTR : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BAZPTR ] ] = ! DIDerivedType ( tag : DW_TAG_pointer_type , { { . * } } baseType : ! [ [ BAZT : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BAZT ] ] = ! DISubroutineType ( types : ! [ [ BAZARGS : . * ] ] ) <nl> func main ( ) - > Int64 { <nl> <nl> / / CHECK - DAG : ! DILocalVariable ( name : " barz_function_pointer " , { { . * } } type : ! " [ [ BARZPT : [ ^ , ] + ] ] " <nl> / / CHECK - DAG : ! DICompositeType ( tag : DW_TAG_structure_type , name : " [ [ BARZPT ] ] " , { { . * } } elements : ! [ [ BARZMEMBERS : [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : ! [ [ BARZMEMBERS ] ] = ! { ! [ [ BARZMEMBER : . * ] ] } <nl> + / / CHECK - DAG : ! [ [ BARZMEMBERS ] ] = ! { ! [ [ BARZMEMBER : . * ] ] , { { . * } } } <nl> / / CHECK - DAG : ! [ [ BARZMEMBER ] ] = ! DIDerivedType ( tag : DW_TAG_member , { { . * } } baseType : ! [ [ BARZPTR : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BARZPTR ] ] = ! DIDerivedType ( tag : DW_TAG_pointer_type , { { . * } } baseType : ! [ [ BARZT : [ 0 - 9 ] + ] ] <nl> / / CHECK - DAG : ! [ [ BARZT ] ] = ! DISubroutineType ( types : ! [ [ BARZARGS : . * ] ] ) <nl> | Debug Info : Retrieve the layout information of exploded values from the | apple/swift | 1f48b05f1fcab6dc33625c35de94df491326388b | 2016-03-10T18:16:02Z |
mmm a / html / admin / js / views / queryView . js <nl> ppp b / html / admin / js / views / queryView . js <nl> var queryView = Backbone . View . extend ( { <nl> } , <nl> listenKey : function ( e ) { <nl> if ( e . keyCode = = = 13 ) { <nl> - this . saveAQL ( ) ; <nl> + this . saveAQL ( e ) ; <nl> } <nl> } , <nl> <nl> var queryView = Backbone . View . extend ( { <nl> var queryName = $ ( ' # new - query - name ' ) . val ( ) ; <nl> var content = editor . getValue ( ) ; <nl> <nl> - if ( e . target . id = = = ' save - edit - query ' ) { <nl> - content = $ ( ' # edit - aql - textarea ' ) . val ( ) ; <nl> - queryName = $ ( ' # queryModalSelect ' ) . val ( ) ; <nl> + if ( e ) { <nl> + if ( e . target . id = = = ' save - edit - query ' ) { <nl> + content = $ ( ' # edit - aql - textarea ' ) . val ( ) ; <nl> + queryName = $ ( ' # queryModalSelect ' ) . val ( ) ; <nl> + } <nl> } <nl> <nl> / / check for already existing entry <nl> | event bugfix | arangodb/arangodb | 58b7a2ddf3751596f6afd1a441cb66c53d6bb5f5 | 2013-07-18T12:02:19Z |
mmm a / xbmc / addons / AddonDll . h <nl> ppp b / xbmc / addons / AddonDll . h <nl> namespace ADDON <nl> virtual void SaveSettings ( ) ; <nl> virtual CStdString GetSetting ( const CStdString & key ) ; <nl> <nl> - bool Create ( ) ; <nl> + ADDON_STATUS Create ( ) ; <nl> virtual void Stop ( ) ; <nl> void Destroy ( ) ; <nl> <nl> bool CAddonDll < TheDll , TheStruct , TheProps > : : LoadDll ( ) <nl> } <nl> <nl> template < class TheDll , typename TheStruct , typename TheProps > <nl> - bool CAddonDll < TheDll , TheStruct , TheProps > : : Create ( ) <nl> + ADDON_STATUS CAddonDll < TheDll , TheStruct , TheProps > : : Create ( ) <nl> { <nl> + ADDON_STATUS status ( ADDON_STATUS_UNKNOWN ) ; <nl> CLog : : Log ( LOGDEBUG , " ADDON : Dll Initializing - % s " , Name ( ) . c_str ( ) ) ; <nl> m_initialized = false ; <nl> <nl> if ( ! LoadDll ( ) ) <nl> - return false ; <nl> + return ADDON_STATUS_PERMANENT_FAILURE ; <nl> <nl> / * Allocate the helper function class to allow crosstalk over <nl> helper libraries * / <nl> bool CAddonDll < TheDll , TheStruct , TheProps > : : Create ( ) <nl> needed to become the AddOn running * / <nl> try <nl> { <nl> - ADDON_STATUS status = m_pDll - > Create ( m_pHelpers - > GetCallbacks ( ) , m_pInfo ) ; <nl> + status = m_pDll - > Create ( m_pHelpers - > GetCallbacks ( ) , m_pInfo ) ; <nl> if ( status = = ADDON_STATUS_OK ) <nl> m_initialized = true ; <nl> else if ( ( status = = ADDON_STATUS_NEED_SETTINGS ) | | ( status = = ADDON_STATUS_NEED_SAVEDSETTINGS ) ) <nl> { <nl> m_needsavedsettings = ( status = = ADDON_STATUS_NEED_SAVEDSETTINGS ) ; <nl> - if ( TransferSettings ( ) = = ADDON_STATUS_OK ) <nl> + if ( ( status = TransferSettings ( ) ) = = ADDON_STATUS_OK ) <nl> m_initialized = true ; <nl> else <nl> new CAddonStatusHandler ( ID ( ) , status , " " , false ) ; <nl> bool CAddonDll < TheDll , TheStruct , TheProps > : : Create ( ) <nl> HandleException ( e , " m_pDll - > Create " ) ; <nl> } <nl> <nl> - if ( ! m_initialized ) <nl> + if ( ! m_initialized ) <nl> SAFE_DELETE ( m_pHelpers ) ; <nl> <nl> - return m_initialized ; <nl> + return status ; <nl> } <nl> <nl> template < class TheDll , typename TheStruct , typename TheProps > <nl> mmm a / xbmc / addons / ScreenSaver . cpp <nl> ppp b / xbmc / addons / ScreenSaver . cpp <nl> bool CScreenSaver : : CreateScreenSaver ( ) <nl> m_pInfo - > presets = strdup ( CSpecialProtocol : : TranslatePath ( Path ( ) ) . c_str ( ) ) ; <nl> m_pInfo - > profile = strdup ( CSpecialProtocol : : TranslatePath ( Profile ( ) ) . c_str ( ) ) ; <nl> <nl> - if ( CAddonDll < DllScreenSaver , ScreenSaver , SCR_PROPS > : : Create ( ) ) <nl> + if ( CAddonDll < DllScreenSaver , ScreenSaver , SCR_PROPS > : : Create ( ) = = ADDON_STATUS_OK ) <nl> return true ; <nl> <nl> return false ; <nl> mmm a / xbmc / addons / Visualisation . cpp <nl> ppp b / xbmc / addons / Visualisation . cpp <nl> bool CVisualisation : : Create ( int x , int y , int w , int h ) <nl> m_pInfo - > profile = strdup ( CSpecialProtocol : : TranslatePath ( Profile ( ) ) . c_str ( ) ) ; <nl> m_pInfo - > submodule = NULL ; <nl> <nl> - if ( CAddonDll < DllVisualisation , Visualisation , VIS_PROPS > : : Create ( ) ) <nl> + if ( CAddonDll < DllVisualisation , Visualisation , VIS_PROPS > : : Create ( ) = = ADDON_STATUS_OK ) <nl> { <nl> / / Start the visualisation <nl> CStdString strFile = URIUtils : : GetFileName ( g_application . CurrentFile ( ) ) ; <nl> mmm a / xbmc / pvr / addons / PVRClient . cpp <nl> ppp b / xbmc / pvr / addons / PVRClient . cpp <nl> void CPVRClient : : ResetProperties ( int iClientId / * = PVR_INVALID_CLIENT_ID * / ) <nl> m_bCanSeekStream = false ; <nl> } <nl> <nl> - bool CPVRClient : : Create ( int iClientId ) <nl> + ADDON_STATUS CPVRClient : : Create ( int iClientId ) <nl> { <nl> + ADDON_STATUS status ( ADDON_STATUS_UNKNOWN ) ; <nl> if ( iClientId < = PVR_INVALID_CLIENT_ID | | iClientId = = PVR_VIRTUAL_CLIENT_ID ) <nl> - return false ; <nl> + return status ; <nl> <nl> / * ensure that a previous instance is destroyed * / <nl> Destroy ( ) ; <nl> bool CPVRClient : : Create ( int iClientId ) <nl> CLog : : Log ( LOGDEBUG , " PVR - % s - creating PVR add - on instance ' % s ' " , __FUNCTION__ , Name ( ) . c_str ( ) ) ; <nl> try <nl> { <nl> - bReadyToUse = CAddonDll < DllPVRClient , PVRClient , PVR_PROPERTIES > : : Create ( ) & & <nl> - GetAddonProperties ( ) ; <nl> + if ( ( status = CAddonDll < DllPVRClient , PVRClient , PVR_PROPERTIES > : : Create ( ) ) = = ADDON_STATUS_OK ) <nl> + bReadyToUse = GetAddonProperties ( ) ; <nl> } <nl> catch ( exception & e ) { LogException ( e , __FUNCTION__ ) ; } <nl> <nl> bool CPVRClient : : Create ( int iClientId ) <nl> if ( ! bReadyToUse ) <nl> ResetProperties ( iClientId ) ; <nl> <nl> - return bReadyToUse ; <nl> + return status ; <nl> } <nl> <nl> bool CPVRClient : : DllLoaded ( void ) const <nl> mmm a / xbmc / pvr / addons / PVRClient . h <nl> ppp b / xbmc / pvr / addons / PVRClient . h <nl> namespace PVR <nl> * @ brief Initialise the instance of this add - on . <nl> * @ param iClientId The ID of this add - on . <nl> * / <nl> - bool Create ( int iClientId ) ; <nl> + ADDON_STATUS Create ( int iClientId ) ; <nl> <nl> / * ! <nl> * @ return True when the dll for this add - on was loaded , false otherwise ( e . g . unresolved symbols ) <nl> mmm a / xbmc / pvr / addons / PVRClients . cpp <nl> ppp b / xbmc / pvr / addons / PVRClients . cpp <nl> bool CPVRClients : : UpdateAndInitialiseClients ( bool bInitialiseAllClients / * = fal <nl> } <nl> else <nl> { <nl> + ADDON_STATUS status ( ADDON_STATUS_UNKNOWN ) ; <nl> CSingleLock lock ( m_critSection ) ; <nl> <nl> PVR_CLIENT addon ; <nl> bool CPVRClients : : UpdateAndInitialiseClients ( bool bInitialiseAllClients / * = fal <nl> bDisabled = true ; <nl> } <nl> / / re - check the enabled status . newly installed clients get disabled when they ' re added to the db <nl> - else if ( addon - > Enabled ( ) & & ! addon - > Create ( iClientId ) ) <nl> + else if ( addon - > Enabled ( ) & & ( status = addon - > Create ( iClientId ) ) ! = ADDON_STATUS_OK ) <nl> { <nl> CLog : : Log ( LOGWARNING , " % s - failed to create add - on % s " , __FUNCTION__ , clientAddon - > Name ( ) . c_str ( ) ) ; <nl> - if ( ! addon . get ( ) | | ! addon - > DllLoaded ( ) ) <nl> + if ( ! addon . get ( ) | | ! addon - > DllLoaded ( ) | | status = = ADDON_STATUS_PERMANENT_FAILURE ) <nl> { <nl> / / failed to load the dll of this add - on , disable it <nl> CLog : : Log ( LOGWARNING , " % s - failed to load the dll for add - on % s , disabling it " , __FUNCTION__ , clientAddon - > Name ( ) . c_str ( ) ) ; <nl> | [ pvr / addons ] fixed - return the exact ADDON_STATUS in CAddonDll : : Create ( ) instead of a bool , and disable PVR add - ons that failed with ADDON_STATUS_PERMANENT_FAILURE | xbmc/xbmc | fb203573baff146834948673ccfbd9d27a0cec68 | 2012-10-09T23:39:39Z |
mmm a / lib / Sema / TypeCheckDecl . cpp <nl> ppp b / lib / Sema / TypeCheckDecl . cpp <nl> static void inferDynamic ( ASTContext & ctx , ValueDecl * D ) { <nl> if ( D - > isFinal ( ) & & ! isNSManaged ) <nl> return ; <nl> <nl> - / / Variables declared with ' let ' cannot be ' dynamic ' . <nl> - if ( auto VD = dyn_cast < VarDecl > ( D ) ) { <nl> - auto staticSpelling = VD - > getParentPatternBinding ( ) - > getStaticSpelling ( ) ; <nl> - <nl> - / / The presence of ' static ' blocks the inference of ' dynamic ' . <nl> - if ( staticSpelling = = StaticSpellingKind : : KeywordStatic ) <nl> - return ; <nl> - <nl> - if ( VD - > isLet ( ) & & ! isNSManaged ) <nl> - return ; <nl> - } <nl> - <nl> / / Accessors should not infer ' dynamic ' on their own ; they can get it from <nl> / / their storage decls . <nl> - if ( auto FD = dyn_cast < FuncDecl > ( D ) ) { <nl> - if ( isa < AccessorDecl > ( FD ) ) <nl> - return ; <nl> - <nl> - auto staticSpelling = FD - > getStaticSpelling ( ) ; <nl> - <nl> - / / The presence of ' static ' bocks the inference of ' dynamic ' . <nl> - if ( staticSpelling = = StaticSpellingKind : : KeywordStatic ) <nl> - return ; <nl> - } <nl> + if ( isa < AccessorDecl > ( D ) ) <nl> + return ; <nl> <nl> - / / The presence of ' final ' on a class prevents ' dynamic ' . <nl> + / / Only classes can use ' dynamic ' . <nl> auto classDecl = D - > getDeclContext ( ) - > getAsClassOrClassExtensionContext ( ) ; <nl> - if ( ! classDecl ) return ; <nl> - if ( ! isNSManaged & & classDecl - > isFinal ( ) & & <nl> - ! classDecl - > requiresStoredPropertyInits ( ) ) <nl> + if ( ! classDecl ) <nl> return ; <nl> <nl> / / Add the ' dynamic ' attribute . <nl> void TypeChecker : : validateDecl ( ValueDecl * D ) { <nl> } <nl> } <nl> <nl> - / / Infer ' dynamic ' before touching accessors . <nl> - inferDynamic ( Context , VD ) ; <nl> - <nl> / / If this variable is a class member , mark it final if the <nl> / / class is final , or if it was declared with ' let ' . <nl> auto staticSpelling = <nl> VD - > getParentPatternBinding ( ) - > getStaticSpelling ( ) ; <nl> inferFinalAndDiagnoseIfNeeded ( * this , VD , staticSpelling ) ; <nl> <nl> - if ( VD - > isLet ( ) & & <nl> - VD - > getDeclContext ( ) - > getAsClassOrClassExtensionContext ( ) ) { <nl> + if ( VD - > isLet ( ) & & isa < ClassDecl > ( nominalDecl ) ) { <nl> makeFinal ( Context , VD ) ; <nl> <nl> if ( VD - > getFormalAccess ( ) = = AccessLevel : : Open ) { <nl> void TypeChecker : : validateDecl ( ValueDecl * D ) { <nl> fixItAccess ( inFlightDiag , D , AccessLevel : : Public ) ; <nl> } <nl> } <nl> + <nl> + / / Infer ' dynamic ' after ' final ' but before touching accessors . <nl> + inferDynamic ( Context , VD ) ; <nl> } <nl> <nl> / / Perform accessor - related validation . <nl> void TypeChecker : : validateDecl ( ValueDecl * D ) { <nl> errorConvention ) ) ) <nl> isObjC = None ; <nl> markAsObjC ( * this , FD , isObjC , errorConvention ) ; <nl> + <nl> + inferFinalAndDiagnoseIfNeeded ( * this , FD , FD - > getStaticSpelling ( ) ) ; <nl> + inferDynamic ( Context , FD ) ; <nl> } <nl> <nl> / / If the function is exported to C , it must be representable in ( Obj - ) C . <nl> void TypeChecker : : validateDecl ( ValueDecl * D ) { <nl> } <nl> } <nl> <nl> - inferDynamic ( Context , FD ) ; <nl> - <nl> - / / If this is a class member , mark it final if the class is final . <nl> - inferFinalAndDiagnoseIfNeeded ( * this , FD , FD - > getStaticSpelling ( ) ) ; <nl> - <nl> checkDeclAttributes ( FD ) ; <nl> <nl> break ; <nl> | Simplify inferDynamic by assuming ' final ' has already been inferred | apple/swift | d6d26758573914950aa6a5ae6c24bfe1ada687f2 | 2018-04-19T20:16:36Z |
mmm a / AirLib / include / common / VectorMath . hpp <nl> ppp b / AirLib / include / common / VectorMath . hpp <nl> class VectorMathT { <nl> typedef common_utils : : Utils Utils ; <nl> / / use different seeds for each component <nl> / / TODO : below we are using double instead of RealT becaise of VC + + 2017 bug in random implementation <nl> - typedef common_utils : : RandomGenerator < RealT , std : : normal_distribution < double > , 1 > RandomGeneratorGausianXT ; <nl> + typedef common_utils : : RandomGenerator < RealT , std : : normal_distribution < double > , 1 > RandomGeneratorGausianXT ; <nl> typedef common_utils : : RandomGenerator < RealT , std : : normal_distribution < double > , 2 > RandomGeneratorGausianYT ; <nl> typedef common_utils : : RandomGenerator < RealT , std : : normal_distribution < double > , 3 > RandomGeneratorGausianZT ; <nl> typedef common_utils : : RandomGenerator < RealT , std : : uniform_real_distribution < RealT > , 1 > RandomGeneratorXT ; <nl> mmm a / AirLib / include / controllers / simple_flight / AirSimSimpleFlightBoard . hpp <nl> ppp b / AirLib / include / controllers / simple_flight / AirSimSimpleFlightBoard . hpp <nl> class AirSimSimpleFlightBoard : public simple_flight : : Board { <nl> <nl> virtual void readAccel ( float accel [ 3 ] ) const override <nl> { <nl> - const auto & linear_accel = kinematics_ - > accelerations . linear ; <nl> + const auto & linear_accel = VectorMath : : transformToBodyFrame ( kinematics_ - > accelerations . linear , kinematics_ - > pose . orientation ) ; <nl> accel [ 0 ] = linear_accel . x ( ) ; <nl> accel [ 1 ] = linear_accel . y ( ) ; <nl> accel [ 2 ] = linear_accel . z ( ) ; <nl> class AirSimSimpleFlightBoard : public simple_flight : : Board { <nl> <nl> virtual void readGyro ( float gyro [ 3 ] ) const override <nl> { <nl> - const auto & angula_vel = kinematics_ - > twist . angular ; <nl> - gyro [ 0 ] = angula_vel . x ( ) ; <nl> - gyro [ 1 ] = angula_vel . y ( ) ; <nl> - gyro [ 2 ] = angula_vel . z ( ) ; <nl> + const auto angular_vel = kinematics_ - > twist . angular ; / / angular velocity is already in body frame <nl> + gyro [ 0 ] = angular_vel . x ( ) ; <nl> + gyro [ 1 ] = angular_vel . y ( ) ; <nl> + gyro [ 2 ] = angular_vel . z ( ) ; <nl> } <nl> <nl> virtual void reset ( ) override <nl> mmm a / AirLib / include / controllers / simple_flight / AirSimSimpleFlightEstimator . hpp <nl> ppp b / AirLib / include / controllers / simple_flight / AirSimSimpleFlightEstimator . hpp <nl> class AirSimSimpleFlightEstimator : public simple_flight : : IAngleEstimator { <nl> virtual simple_flight : : Angles getAngles ( ) const override <nl> { <nl> simple_flight : : Angles angles ; <nl> - VectorMath : : toEulerianAngle ( kinematics_ - > pose . orientation , <nl> + VectorMath : : toEulerianAngle ( kinematics_ - > pose . orientation . conjugate ( ) , <nl> angles . pitch , angles . roll , angles . yaw ) ; <nl> <nl> return angles ; <nl> mmm a / AirLib / include / controllers / simple_flight / firmware / AngleStabilizer . hpp <nl> ppp b / AirLib / include / controllers / simple_flight / firmware / AngleStabilizer . hpp <nl> class AngleStabilizer { <nl> output_ . yaw = pid_angle_yaw_ . getOutput ( ) ; <nl> <nl> / / common_utils : : Utils : : log ( <nl> - / / common_utils : : Utils : : stringf ( " ( % f , % f , % f ) - ( % f , % f , % f ) - ( % f , % f , % f ) " , <nl> + / / common_utils : : Utils : : stringf ( " ANG ( % f , % f , % f ) - ( % f , % f , % f ) - ( % f , % f , % f ) " , <nl> / / pid_angle_pitch_ . getGoal ( ) , pid_angle_roll_ . getGoal ( ) , pid_angle_yaw_ . getGoal ( ) , <nl> / / pid_angle_pitch_ . getMeasured ( ) , pid_angle_roll_ . getMeasured ( ) , pid_angle_yaw_ . getMeasured ( ) , <nl> / / pid_angle_pitch_ . getOutput ( ) , pid_angle_roll_ . getOutput ( ) , pid_angle_yaw_ . getOutput ( ) <nl> mmm a / AirLib / include / controllers / simple_flight / firmware / Mixer . hpp <nl> ppp b / AirLib / include / controllers / simple_flight / firmware / Mixer . hpp <nl> class Mixer { <nl> <nl> / / only thing that this matrix does is change the sign <nl> const motorMixer_t mixerQuadX [ 4 ] = { / / QuadX config <nl> - { 1 . 0f , - 1 . 0f , 1 . 0f , 1 . 0f } , / / FRONT_R <nl> - { 1 . 0f , 1 . 0f , - 1 . 0f , 1 . 0f } , / / REAR_L <nl> + { 1 . 0f , - 1 . 0f , 1 . 0f , 1 . 0f } , / / FRONT_R <nl> + { 1 . 0f , 1 . 0f , - 1 . 0f , 1 . 0f } , / / REAR_L <nl> { 1 . 0f , 1 . 0f , 1 . 0f , - 1 . 0f } , / / FRONT_L <nl> { 1 . 0f , - 1 . 0f , - 1 . 0f , - 1 . 0f } , / / REAR_R <nl> } ; <nl> mmm a / AirLib / include / controllers / simple_flight / firmware / RateStabilizer . hpp <nl> ppp b / AirLib / include / controllers / simple_flight / firmware / RateStabilizer . hpp <nl> class RateStabilizer { <nl> output_ . yaw = pid_rate_yaw_ . getOutput ( ) ; <nl> <nl> / / common_utils : : Utils : : log ( <nl> - / / common_utils : : Utils : : stringf ( " ( % f , % f , % f ) - ( % f , % f , % f ) - ( % f , % f , % f ) " , <nl> + / / common_utils : : Utils : : stringf ( " RAT ( % f , % f , % f ) - ( % f , % f , % f ) - ( % f , % f , % f ) " , <nl> / / pid_rate_pitch_ . getGoal ( ) , pid_rate_roll_ . getGoal ( ) , pid_rate_yaw_ . getGoal ( ) , <nl> / / pid_rate_pitch_ . getMeasured ( ) , pid_rate_roll_ . getMeasured ( ) , pid_rate_yaw_ . getMeasured ( ) , <nl> / / pid_rate_pitch_ . getOutput ( ) , pid_rate_roll_ . getOutput ( ) , pid_rate_yaw_ . getOutput ( ) <nl> mmm a / Unreal / Plugins / AirSim / Source / MultiRotorConnector . cpp <nl> ppp b / Unreal / Plugins / AirSim / Source / MultiRotorConnector . cpp <nl> const msr : : airlib : : RCData & MultiRotorConnector : : getRCData ( ) <nl> rc_data_ . throttle = joyStickToRC ( joystick_state_ . left_y ) ; <nl> <nl> / / convert 0 to 1 - > - 1 to 1 <nl> - rc_data_ . yaw = - ( joyStickToRC ( joystick_state_ . left_x ) * 2 - 1 ) ; <nl> + rc_data_ . yaw = joyStickToRC ( joystick_state_ . left_x ) * 2 - 1 ; <nl> rc_data_ . roll = joyStickToRC ( joystick_state_ . right_x ) * 2 - 1 ; <nl> rc_data_ . pitch = joyStickToRC ( joystick_state_ . right_y ) * 2 - 1 ; <nl> <nl> mmm a / docs / linux_build . md <nl> ppp b / docs / linux_build . md <nl> It ' s super simple 1 - 2 - 3 ! <nl> 1 . Make sure you are [ registered with Epic Games ] ( https : / / docs . unrealengine . com / latest / INT / Platforms / Linux / BeginnerLinuxDeveloper / SettingUpAnUnrealWorkflow / 1 / index . html ) . This is required so you can get Unreal engine ' s source code . <nl> 2 . Clone Unreal in your favorite folder and run setup . sh ( this may take a while ! ) . Note : We only support Unreal 4 . 16 and newer . <nl> ` ` ` <nl> - mkdir - p GitHubSrc & & cd GitHubSrc <nl> + # go to folder where you clone GitHub projects <nl> git clone - b 4 . 16 https : / / github . com / EpicGames / UnrealEngine . git <nl> cd UnrealEngine <nl> . / Setup . sh <nl> It ' s super simple 1 - 2 - 3 ! <nl> ` ` ` <nl> 3 . Clone AirSim and run setup . sh : <nl> ` ` ` <nl> + # go to folder where you clone GitHub projects <nl> git clone https : / / github . com / Microsoft / AirSim . git <nl> cd AirSim <nl> . / setup . sh <nl> cd Unreal / Engine / Binaries / Linux <nl> UE4Editor <nl> ` ` ` <nl> <nl> - On first start you might not see any projects in UE4 editor . Click on Projects tab , Browse button and then select ` AirSim / Unreal / Environments / Blocks / Blocks . uproject ` . You will be then prompted by message " The following modules are missing or built with a different engine versions . . . " . Click Yes . Now it might take a while so go get some coffee : ) . <nl> + On first start you might not see any projects in UE4 editor . Click on Projects tab , Browse button and then navigate to ` AirSim / Unreal / Environments / Blocks / Blocks . uproject ` . You will be then prompted by message " The following modules are missing or built with a different engine versions . . . " . Click Yes . Now it might take a while so go get some coffee : ) . <nl> <nl> # # Changing Code and Rebuilding <nl> 1 . After making code changes in AirSim , run ` . / build . sh ` to rebuild . This step also copies the binary output to Blocks sample project . To clean and completely rebuild , first use ` . / clean . sh ` . <nl> Yes ! The ` * . Build . cs ` files are , however , no longer compatible ( you will get com <nl> # # # # Can I compile AirSim in BashOnWindows ? <nl> Yes , however you can ' t run Unreal from BashOnWindows . So this is kind of useful to check Linux compile , not for end - to - end run . See [ BashOnWindows install guide ] ( https : / / msdn . microsoft . com / en - us / commandline / wsl / install_guide ) . Make sure to have latest version ( Windows 10 Creators Edition ) as previous versions had various issues . Also don ' t invoke ` bash ` from ` Visual Studio Command Prompt ` otherwise cmake might find VC + + and try and use that ! <nl> <nl> + # # # # I made change in Visual Studio but there is no effect <nl> + Sometime Unreal + VS build system don ' t do recompile if you change only header file . So try making some cpp file dirty . <nl> + <nl> # # # # Where can I find more info on running Unreal on Linux ? <nl> * [ Start here - Unreal on Linux ] ( https : / / docs . unrealengine . com / latest / INT / Platforms / Linux / index . html ) <nl> * [ Building Unreal on Linux ] ( https : / / wiki . unrealengine . com / Building_On_Linux # Clang ) <nl> mmm a / setup . sh <nl> ppp b / setup . sh <nl> sudo usermod - a - G dialout $ USER <nl> # tar - xf " clang + llvm - 4 . 0 . 1 - x86_64 - linux - gnu - debian8 . tar . xz " - C llvm - build / output <nl> <nl> sudo apt - get install - y build - essential <nl> + sudo apt - get install cmake <nl> wget - O - http : / / apt . llvm . org / llvm - snapshot . gpg . key | sudo apt - key add - <nl> sudo apt - get update <nl> sudo apt - get install - y clang - 3 . 9 clang + + - 3 . 9 <nl> | rate controller working for simple_flight | microsoft/AirSim | a8661385ce540f042491220e4f19aab941bef9e4 | 2017-07-28T07:25:07Z |
mmm a / include / swift / AST / DiagnosticsSema . def <nl> ppp b / include / swift / AST / DiagnosticsSema . def <nl> ERROR ( attr_methods_only , none , <nl> " only methods can be declared % 0 " , ( DeclAttribute ) ) <nl> ERROR ( access_control_in_protocol , none , <nl> " % 0 modifier cannot be used in protocols " , ( DeclAttribute ) ) <nl> + NOTE ( access_control_in_protocol_detail , none , <nl> + " protocol requirements implicitly have the same access as the " <nl> + " protocol itself " , ( ) ) <nl> ERROR ( access_control_setter , none , <nl> " ' % select { private | fileprivate | internal | public | open } 0 ( set ) ' modifier can only " <nl> " be applied to variables and subscripts " , <nl> mmm a / include / swift / AST / Stmt . h <nl> ppp b / include / swift / AST / Stmt . h <nl> class alignas ( 8 ) Stmt { <nl> <nl> SWIFT_INLINE_BITFIELD_FULL ( BraceStmt , Stmt , 32 , <nl> : NumPadBits , <nl> - NumElements : 32 ; <nl> + NumElements : 32 <nl> ) ; <nl> <nl> SWIFT_INLINE_BITFIELD_FULL ( CaseStmt , Stmt , 32 , <nl> mmm a / lib / Sema / TypeCheckAttr . cpp <nl> ppp b / lib / Sema / TypeCheckAttr . cpp <nl> bool AttributeEarlyChecker : : visitAbstractAccessControlAttr ( <nl> / / Or within protocols . <nl> if ( isa < ProtocolDecl > ( D - > getDeclContext ( ) ) ) { <nl> diagnoseAndRemoveAttr ( attr , diag : : access_control_in_protocol , attr ) ; <nl> + TC . diagnose ( attr - > getLocation ( ) , diag : : access_control_in_protocol_detail ) ; <nl> return true ; <nl> } <nl> <nl> mmm a / test / attr / accessibility . swift <nl> ppp b / test / attr / accessibility . swift <nl> private enum TestEnum { <nl> private protocol TestProtocol { <nl> private associatedtype Foo / / expected - error { { ' private ' modifier cannot be applied to this declaration } } { { 3 - 11 = } } <nl> internal var Bar : Int { get } / / expected - error { { ' internal ' modifier cannot be used in protocols } } { { 3 - 12 = } } <nl> + / / expected - note @ - 1 { { protocol requirements implicitly have the same access as the protocol itself } } <nl> public func baz ( ) / / expected - error { { ' public ' modifier cannot be used in protocols } } { { 3 - 10 = } } <nl> + / / expected - note @ - 1 { { protocol requirements implicitly have the same access as the protocol itself } } <nl> } <nl> <nl> public ( set ) func publicSetFunc ( ) { } / / expected - error { { ' public ' modifier cannot be applied to this declaration } } { { 1 - 13 = } } <nl> mmm a / test / attr / attr_versioned . swift <nl> ppp b / test / attr / attr_versioned . swift <nl> protocol VersionedProtocol { <nl> <nl> public func publicRequirement ( ) - > T <nl> / / expected - error @ - 1 { { ' public ' modifier cannot be used in protocols } } <nl> + / / expected - note @ - 2 { { protocol requirements implicitly have the same access as the protocol itself } } <nl> <nl> @ _versioned func versionedRequirement ( ) - > T <nl> / / expected - error @ - 1 { { ' @ _versioned ' attribute cannot be used in protocols } } <nl> | Merge remote - tracking branch ' origin / master ' into master - next | apple/swift | d5ec7e963267bf44d801c6351a7eb29da50f81e5 | 2017-12-16T07:31:02Z |
mmm a / cocos / audio / win32 / AudioEngine - win32 . cpp <nl> ppp b / cocos / audio / win32 / AudioEngine - win32 . cpp <nl> bool AudioEngineImpl : : stop ( int audioID ) <nl> _alSourceUsed [ player . _alSource ] = false ; <nl> if ( player . _streamingSource ) <nl> { <nl> + player . _ready = false ; <nl> player . notifyExitThread ( ) ; <nl> } <nl> else <nl> void AudioEngineImpl : : stopAll ( ) <nl> auto & player = it - > second ; <nl> if ( player . _streamingSource ) <nl> { <nl> + player . _ready = false ; <nl> player . notifyExitThread ( ) ; <nl> + + it ; <nl> } <nl> void AudioEngineImpl : : update ( float dt ) <nl> auto & player = it - > second ; <nl> alGetSourcei ( player . _alSource , AL_SOURCE_STATE , & sourceState ) ; <nl> <nl> - if ( player . _readForRemove ) <nl> + if ( player . _readForRemove & & ! player . _ready ) <nl> { <nl> it = _audioPlayers . erase ( it ) ; <nl> } <nl> void AudioEngineImpl : : update ( float dt ) <nl> <nl> if ( player . _streamingSource ) <nl> { <nl> + player . _ready = false ; <nl> player . notifyExitThread ( ) ; <nl> + + it ; <nl> } <nl> | AudioEngine [ WIN32 ] : Fixed ` FinishCallback ` may not been invoked when the sound play completes . | cocos2d/cocos2d-x | 42fa33dc6ebe729b6b19c4c372fce06e4772ffd4 | 2015-11-17T15:44:52Z |
mmm a / R - package / R / lgb . Booster . R <nl> ppp b / R - package / R / lgb . Booster . R <nl> Booster < - R6 : : R6Class ( <nl> # ' number of columns corresponding to the number of trees . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> Booster < - R6 : : R6Class ( <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> # ' preds < - predict ( model , test $ data ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname predict . lgb . Booster <nl> # ' @ export <nl> predict . lgb . Booster < - function ( object , <nl> # ' @ return lgb . Booster <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> predict . lgb . Booster < - function ( object , <nl> # ' load_booster < - lgb . load ( filename = " model . txt " ) <nl> # ' model_string < - model $ save_model_to_string ( NULL ) # saves best iteration <nl> # ' load_booster_from_str < - lgb . load ( model_str = model_string ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . load <nl> # ' @ export <nl> lgb . load < - function ( filename = NULL , model_str = NULL ) { <nl> # ' @ return lgb . Booster <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . load < - function ( filename = NULL , model_str = NULL ) { <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> # ' lgb . save ( model , " model . txt " ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . save <nl> # ' @ export <nl> lgb . save < - function ( booster , filename , num_iteration = NULL ) { <nl> # ' @ return json format of model <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . save < - function ( booster , filename , num_iteration = NULL ) { <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> # ' json_model < - lgb . dump ( model ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . dump <nl> # ' @ export <nl> lgb . dump < - function ( booster , num_iteration = NULL ) { <nl> # ' @ return vector of evaluation result <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . dump < - function ( booster , num_iteration = NULL ) { <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> # ' lgb . get . eval . result ( model , " test " , " l2 " ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . get . eval . result <nl> # ' @ export <nl> mmm a / R - package / R / lgb . Dataset . R <nl> ppp b / R - package / R / lgb . Dataset . R <nl> Dataset < - R6 : : R6Class ( <nl> } else if ( is . matrix ( private $ raw_data ) | | methods : : is ( private $ raw_data , " dgCMatrix " ) ) { <nl> <nl> # Check if dgCMatrix ( sparse matrix column compressed ) <nl> + # NOTE : requires Matrix package <nl> dim ( private $ raw_data ) <nl> <nl> } else { <nl> Dataset < - R6 : : R6Class ( <nl> <nl> # Check for info name and handle <nl> if ( is . null ( private $ info [ [ name ] ] ) ) { <nl> + <nl> if ( lgb . is . null . handle ( private $ handle ) ) { <nl> - stop ( " Cannot perform getinfo before construct Dataset . " ) <nl> + stop ( " Cannot perform getinfo before constructing Dataset . " ) <nl> } <nl> + <nl> # Get field size of info <nl> info_len < - 0L <nl> info_len < - lgb . call ( " LGBM_DatasetGetFieldSize_R " , <nl> Dataset < - R6 : : R6Class ( <nl> # ' @ return constructed dataset <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> Dataset < - R6 : : R6Class ( <nl> # ' lgb . Dataset . save ( dtrain , " lgb . Dataset . data " ) <nl> # ' dtrain < - lgb . Dataset ( " lgb . Dataset . data " ) <nl> # ' lgb . Dataset . construct ( dtrain ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> lgb . Dataset < - function ( data , <nl> lgb . Dataset < - function ( data , <nl> # ' @ return constructed dataset <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . Dataset < - function ( data , <nl> # ' data ( agaricus . test , package = " lightgbm " ) <nl> # ' test < - agaricus . test <nl> # ' dtest < - lgb . Dataset . create . valid ( dtrain , test $ data , label = test $ label ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> lgb . Dataset . create . valid < - function ( dataset , data , info = list ( ) , . . . ) { <nl> lgb . Dataset . create . valid < - function ( dataset , data , info = list ( ) , . . . ) { <nl> # ' @ param dataset Object of class \ code { lgb . Dataset } <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> # ' lgb . Dataset . construct ( dtrain ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> lgb . Dataset . construct < - function ( dataset ) { <nl> lgb . Dataset . construct < - function ( dataset ) { <nl> # ' be directly used with an \ code { lgb . Dataset } object . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . Dataset . construct < - function ( dataset ) { <nl> # ' stopifnot ( nrow ( dtrain ) = = nrow ( train $ data ) ) <nl> # ' stopifnot ( ncol ( dtrain ) = = ncol ( train $ data ) ) <nl> # ' stopifnot ( all ( dim ( dtrain ) = = dim ( train $ data ) ) ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname dim <nl> # ' @ export <nl> dim . lgb . Dataset < - function ( x , . . . ) { <nl> # ' Since row names are irrelevant , it is recommended to use \ code { colnames } directly . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> dim . lgb . Dataset < - function ( x , . . . ) { <nl> # ' colnames ( dtrain ) <nl> # ' colnames ( dtrain ) < - make . names ( 1 : ncol ( train $ data ) ) <nl> # ' print ( dtrain , verbose = TRUE ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname dimnames . lgb . Dataset <nl> # ' @ export <nl> dimnames . lgb . Dataset < - function ( x ) { <nl> # ' @ return constructed sub dataset <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> # ' <nl> # ' dsub < - lightgbm : : slice ( dtrain , 1 : 42 ) <nl> + # ' lgb . Dataset . construct ( dsub ) <nl> # ' labels < - lightgbm : : getinfo ( dsub , " label " ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> slice < - function ( dataset , . . . ) { <nl> slice . lgb . Dataset < - function ( dataset , idxset , . . . ) { <nl> # ' } <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> slice . lgb . Dataset < - function ( dataset , idxset , . . . ) { <nl> # ' <nl> # ' labels2 < - lightgbm : : getinfo ( dtrain , " label " ) <nl> # ' stopifnot ( all ( labels2 = = 1 - labels ) ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> getinfo < - function ( dataset , . . . ) { <nl> getinfo . lgb . Dataset < - function ( dataset , name , . . . ) { <nl> # ' } <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> getinfo . lgb . Dataset < - function ( dataset , name , . . . ) { <nl> # ' <nl> # ' labels2 < - lightgbm : : getinfo ( dtrain , " label " ) <nl> # ' stopifnot ( all . equal ( labels2 , 1 - labels ) ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> setinfo < - function ( dataset , . . . ) { <nl> setinfo . lgb . Dataset < - function ( dataset , name , info , . . . ) { <nl> # ' @ return passed dataset <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> setinfo . lgb . Dataset < - function ( dataset , name , info , . . . ) { <nl> # ' lgb . Dataset . save ( dtrain , " lgb . Dataset . data " ) <nl> # ' dtrain < - lgb . Dataset ( " lgb . Dataset . data " ) <nl> # ' lgb . Dataset . set . categorical ( dtrain , 1 : 2 ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . Dataset . set . categorical <nl> # ' @ export <nl> lgb . Dataset . set . categorical < - function ( dataset , categorical_feature ) { <nl> # ' @ return passed dataset <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> lgb . Dataset . set . categorical < - function ( dataset , categorical_feature ) { <nl> # ' test < - agaricus . test <nl> # ' dtest < - lgb . Dataset ( test $ data , test = train $ label ) <nl> # ' lgb . Dataset . set . reference ( dtest , dtrain ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . Dataset . set . reference <nl> # ' @ export <nl> lgb . Dataset . set . reference < - function ( dataset , reference ) { <nl> # ' <nl> # ' @ examples <nl> # ' <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> # ' lgb . Dataset . save ( dtrain , " data . bin " ) <nl> - # ' } <nl> # ' <nl> # ' @ rdname lgb . Dataset . save <nl> # ' @ export <nl> mmm a / R - package / R / lgb . cv . R <nl> ppp b / R - package / R / lgb . cv . R <nl> CVBooster < - R6 : : R6Class ( <nl> # ' @ return a trained model \ code { lgb . CVBooster } . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> CVBooster < - R6 : : R6Class ( <nl> # ' min_data = 1 , <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> - # ' } <nl> # ' @ export <nl> lgb . cv < - function ( params = list ( ) , <nl> data , <nl> mmm a / R - package / R / lgb . importance . R <nl> ppp b / R - package / R / lgb . importance . R <nl> <nl> # ' } <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> # ' <nl> - # ' params = list ( objective = " binary " , <nl> + # ' params < - list ( objective = " binary " , <nl> # ' learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> # ' min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> # ' model < - lgb . train ( params , dtrain , 20 ) <nl> <nl> # ' <nl> # ' tree_imp1 < - lgb . importance ( model , percentage = TRUE ) <nl> # ' tree_imp2 < - lgb . importance ( model , percentage = FALSE ) <nl> - # ' } <nl> # ' <nl> # ' @ importFrom magrittr % > % % T > % <nl> # ' @ importFrom data . table : = <nl> mmm a / R - package / R / lgb . interprete . R <nl> ppp b / R - package / R / lgb . interprete . R <nl> <nl> # ' For multiclass classification , a \ code { list } of \ code { data . table } with the Feature column and Contribution columns to each class . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> - # ' library ( lightgbm ) <nl> # ' Sigmoid < - function ( x ) 1 / ( 1 + exp ( - x ) ) <nl> # ' Logit < - function ( x ) log ( x / ( 1 - x ) ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> <nl> # ' setinfo ( dtrain , " init_score " , rep ( Logit ( mean ( train $ label ) ) , length ( train $ label ) ) ) <nl> # ' data ( agaricus . test , package = " lightgbm " ) <nl> # ' test < - agaricus . test <nl> - # ' <nl> - # ' params = list ( objective = " binary " , <nl> - # ' learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> - # ' min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> - # ' model < - lgb . train ( params , dtrain , 20 ) <nl> + # ' <nl> + # ' params < - list ( <nl> + # ' objective = " binary " <nl> + # ' , learning_rate = 0 . 01 <nl> + # ' , num_leaves = 63 <nl> + # ' , max_depth = - 1 <nl> + # ' , min_data_in_leaf = 1 <nl> + # ' , min_sum_hessian_in_leaf = 1 <nl> + # ' ) <nl> # ' model < - lgb . train ( params , dtrain , 20 ) <nl> - # ' <nl> + # ' <nl> # ' tree_interpretation < - lgb . interprete ( model , test $ data , 1 : 5 ) <nl> - # ' } <nl> # ' <nl> # ' @ importFrom magrittr % > % % T > % <nl> # ' @ export <nl> mmm a / R - package / R / lgb . model . dt . tree . R <nl> ppp b / R - package / R / lgb . model . dt . tree . R <nl> <nl> # ' } <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> - # ' library ( lightgbm ) <nl> # ' <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> # ' <nl> - # ' params = list ( objective = " binary " , <nl> + # ' params < - list ( objective = " binary " , <nl> # ' learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> # ' min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> # ' model < - lgb . train ( params , dtrain , 20 ) <nl> # ' model < - lgb . train ( params , dtrain , 20 ) <nl> # ' <nl> # ' tree_dt < - lgb . model . dt . tree ( model ) <nl> - # ' } <nl> # ' <nl> # ' @ importFrom magrittr % > % <nl> # ' @ importFrom data . table : = data . table rbindlist <nl> mmm a / R - package / R / lgb . plot . importance . R <nl> ppp b / R - package / R / lgb . plot . importance . R <nl> <nl> # ' and silently returns a processed data . table with \ code { top_n } features sorted by defined importance . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> - # ' data ( agaricus . train , package = " lightgbm " ) <nl> - # ' train < - agaricus . train <nl> - # ' dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> - # ' <nl> - # ' params = list ( objective = " binary " , <nl> - # ' learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> - # ' min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> - # ' model < - lgb . train ( params , dtrain , 20 ) <nl> - # ' model < - lgb . train ( params , dtrain , 20 ) <nl> - # ' <nl> - # ' tree_imp < - lgb . importance ( model , percentage = TRUE ) <nl> - # ' lgb . plot . importance ( tree_imp , top_n = 10 , measure = " Gain " ) <nl> - # ' } <nl> + # data ( agaricus . train , package = " lightgbm " ) <nl> + # train < - agaricus . train <nl> + # dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> + # <nl> + # params < - list ( <nl> + # objective = " binary " <nl> + # , learning_rate = 0 . 01 <nl> + # , num_leaves = 63 <nl> + # , max_depth = - 1 <nl> + # , min_data_in_leaf = 1 <nl> + # , min_sum_hessian_in_leaf = 1 <nl> + # ) <nl> + # <nl> + # model < - lgb . train ( params , dtrain , 20 ) <nl> + # <nl> + # tree_imp < - lgb . importance ( model , percentage = TRUE ) <nl> + # lgb . plot . importance ( tree_imp , top_n = 10 , measure = " Gain " ) <nl> # ' @ importFrom graphics barplot par <nl> # ' @ export <nl> lgb . plot . importance < - function ( tree_imp , <nl> mmm a / R - package / R / lgb . plot . interpretation . R <nl> ppp b / R - package / R / lgb . plot . interpretation . R <nl> <nl> # ' The \ code { lgb . plot . interpretation } function creates a \ code { barplot } . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' Sigmoid < - function ( x ) { 1 / ( 1 + exp ( - x ) ) } <nl> # ' Logit < - function ( x ) { log ( x / ( 1 - x ) ) } <nl> <nl> # ' data ( agaricus . test , package = " lightgbm " ) <nl> # ' test < - agaricus . test <nl> # ' <nl> - # ' params = list ( objective = " binary " , <nl> + # ' params < - list ( objective = " binary " , <nl> # ' learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> # ' min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> # ' model < - lgb . train ( params , dtrain , 20 ) <nl> <nl> # ' <nl> # ' tree_interpretation < - lgb . interprete ( model , test $ data , 1 : 5 ) <nl> # ' lgb . plot . interpretation ( tree_interpretation [ [ 1 ] ] , top_n = 10 ) <nl> - # ' } <nl> # ' @ importFrom graphics barplot par <nl> # ' @ export <nl> lgb . plot . interpretation < - function ( tree_interpretation_dt , <nl> mmm a / R - package / R / lgb . prepare . R <nl> ppp b / R - package / R / lgb . prepare . R <nl> <nl> - # ' Data preparator for LightGBM datasets ( numeric ) <nl> - # ' <nl> - # ' Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric without integers . Please use \ code { lgb . prepare_rules } if you want to apply this transformation to other datasets . <nl> - # ' <nl> - # ' @ param data A data . frame or data . table to prepare . <nl> - # ' <nl> - # ' @ return The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> - # ' <nl> - # ' @ examples <nl> - # ' \ dontrun { <nl> - # ' library ( lightgbm ) <nl> - # ' data ( iris ) <nl> - # ' <nl> - # ' str ( iris ) <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : Factor w / 3 levels " setosa " , " versicolor " , . . : 1 1 1 1 . . . <nl> - # ' <nl> - # ' str ( lgb . prepare ( data = iris ) ) # Convert all factors / chars to numeric <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : num 1 1 1 1 1 1 1 1 1 1 . . . <nl> - # ' <nl> - # ' # When lightgbm package is installed , and you do not want to load it <nl> - # ' # You can still use the function ! <nl> - # ' lgb . unloader ( ) <nl> - # ' str ( lightgbm : : lgb . prepare ( data = iris ) ) <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : num 1 1 1 1 1 1 1 1 1 1 . . . <nl> - # ' } <nl> - # ' <nl> - # ' @ export <nl> - lgb . prepare < - function ( data ) { <nl> - <nl> - # data . table not behaving like data . frame <nl> - if ( " data . table " % in % class ( data ) ) { <nl> - <nl> - # Get data classes <nl> - list_classes < - sapply ( data , class ) <nl> - <nl> - # Convert characters to factors only ( we can change them to numeric after ) <nl> - is_char < - which ( list_classes = = " character " ) <nl> - if ( length ( is_char ) > 0 ) { <nl> - data [ , ( is_char ) : = lapply ( . SD , function ( x ) { as . numeric ( as . factor ( x ) ) } ) , . SDcols = is_char ] <nl> - } <nl> - <nl> - # Convert factors to numeric ( integer is more efficient actually ) <nl> - is_fact < - c ( which ( list_classes = = " factor " ) , is_char ) <nl> - if ( length ( is_fact ) > 0 ) { <nl> - data [ , ( is_fact ) : = lapply ( . SD , function ( x ) { as . numeric ( x ) } ) , . SDcols = is_fact ] <nl> - } <nl> - <nl> - } else { <nl> - <nl> - # Default routine ( data . frame ) <nl> - if ( " data . frame " % in % class ( data ) ) { <nl> - <nl> - # Get data classes <nl> - list_classes < - sapply ( data , class ) <nl> - <nl> - # Convert characters to factors to numeric ( integer is more efficient actually ) <nl> - is_char < - which ( list_classes = = " character " ) <nl> - if ( length ( is_char ) > 0 ) { <nl> - data [ is_char ] < - lapply ( data [ is_char ] , function ( x ) { as . numeric ( as . factor ( x ) ) } ) <nl> - } <nl> - <nl> - # Convert factors to numeric ( integer is more efficient actually ) <nl> - is_fact < - which ( list_classes = = " factor " ) <nl> - if ( length ( is_fact ) > 0 ) { <nl> - data [ is_fact ] < - lapply ( data [ is_fact ] , function ( x ) { as . numeric ( x ) } ) <nl> - } <nl> - <nl> - } else { <nl> - <nl> - # What do you think you are doing here ? Throw error . <nl> - stop ( " lgb . prepare2 : you provided " , paste ( class ( data ) , collapse = " & " ) , " but data should have class data . frame " ) <nl> - <nl> - } <nl> - <nl> - } <nl> - <nl> - return ( data ) <nl> - <nl> - } <nl> + # ' Data preparator for LightGBM datasets ( numeric ) <nl> + # ' <nl> + # ' Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric without integers . Please use \ code { lgb . prepare_rules } if you want to apply this transformation to other datasets . <nl> + # ' <nl> + # ' @ param data A data . frame or data . table to prepare . <nl> + # ' <nl> + # ' @ return The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> + # ' <nl> + # ' @ examples <nl> + # ' library ( lightgbm ) <nl> + # ' data ( iris ) <nl> + # ' <nl> + # ' str ( iris ) <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : Factor w / 3 levels " setosa " , " versicolor " , . . : 1 1 1 1 . . . <nl> + # ' <nl> + # ' str ( lgb . prepare ( data = iris ) ) # Convert all factors / chars to numeric <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : num 1 1 1 1 1 1 1 1 1 1 . . . <nl> + # ' <nl> + # ' # When lightgbm package is installed , and you do not want to load it <nl> + # ' # You can still use the function ! <nl> + # ' lgb . unloader ( ) <nl> + # ' str ( lightgbm : : lgb . prepare ( data = iris ) ) <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : num 1 1 1 1 1 1 1 1 1 1 . . . <nl> + # ' <nl> + # ' @ export <nl> + lgb . prepare < - function ( data ) { <nl> + <nl> + # data . table not behaving like data . frame <nl> + if ( " data . table " % in % class ( data ) ) { <nl> + <nl> + # Get data classes <nl> + list_classes < - sapply ( data , class ) <nl> + <nl> + # Convert characters to factors only ( we can change them to numeric after ) <nl> + is_char < - which ( list_classes = = " character " ) <nl> + if ( length ( is_char ) > 0 ) { <nl> + data [ , ( is_char ) : = lapply ( . SD , function ( x ) { as . numeric ( as . factor ( x ) ) } ) , . SDcols = is_char ] <nl> + } <nl> + <nl> + # Convert factors to numeric ( integer is more efficient actually ) <nl> + is_fact < - c ( which ( list_classes = = " factor " ) , is_char ) <nl> + if ( length ( is_fact ) > 0 ) { <nl> + data [ , ( is_fact ) : = lapply ( . SD , function ( x ) { as . numeric ( x ) } ) , . SDcols = is_fact ] <nl> + } <nl> + <nl> + } else { <nl> + <nl> + # Default routine ( data . frame ) <nl> + if ( " data . frame " % in % class ( data ) ) { <nl> + <nl> + # Get data classes <nl> + list_classes < - sapply ( data , class ) <nl> + <nl> + # Convert characters to factors to numeric ( integer is more efficient actually ) <nl> + is_char < - which ( list_classes = = " character " ) <nl> + if ( length ( is_char ) > 0 ) { <nl> + data [ is_char ] < - lapply ( data [ is_char ] , function ( x ) { as . numeric ( as . factor ( x ) ) } ) <nl> + } <nl> + <nl> + # Convert factors to numeric ( integer is more efficient actually ) <nl> + is_fact < - which ( list_classes = = " factor " ) <nl> + if ( length ( is_fact ) > 0 ) { <nl> + data [ is_fact ] < - lapply ( data [ is_fact ] , function ( x ) { as . numeric ( x ) } ) <nl> + } <nl> + <nl> + } else { <nl> + <nl> + # What do you think you are doing here ? Throw error . <nl> + stop ( " lgb . prepare2 : you provided " , paste ( class ( data ) , collapse = " & " ) , " but data should have class data . frame " ) <nl> + <nl> + } <nl> + <nl> + } <nl> + <nl> + return ( data ) <nl> + <nl> + } <nl> mmm a / R - package / R / lgb . prepare2 . R <nl> ppp b / R - package / R / lgb . prepare2 . R <nl> <nl> - # ' Data preparator for LightGBM datasets ( integer ) <nl> - # ' <nl> - # ' Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric ( specifically : integer ) . Please use \ code { lgb . prepare_rules2 } if you want to apply this transformation to other datasets . This is useful if you have a specific need for integer dataset instead of numeric dataset . Note that there are programs which do not support integer - only input . Consider this as a half memory technique which is dangerous , especially for LightGBM . <nl> - # ' <nl> - # ' @ param data A data . frame or data . table to prepare . <nl> - # ' <nl> - # ' @ return The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> - # ' <nl> - # ' @ examples <nl> - # ' \ dontrun { <nl> - # ' library ( lightgbm ) <nl> - # ' data ( iris ) <nl> - # ' <nl> - # ' str ( iris ) <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : Factor w / 3 levels " setosa " , " versicolor " , . . : 1 1 1 1 . . . <nl> - # ' <nl> - # ' str ( lgb . prepare2 ( data = iris ) ) # Convert all factors / chars to integer <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : int 1 1 1 1 1 1 1 1 1 1 . . . <nl> - # ' <nl> - # ' # When lightgbm package is installed , and you do not want to load it <nl> - # ' # You can still use the function ! <nl> - # ' lgb . unloader ( ) <nl> - # ' str ( lightgbm : : lgb . prepare2 ( data = iris ) ) <nl> - # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> - # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> - # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> - # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> - # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> - # ' # $ Species : int 1 1 1 1 1 1 1 1 1 1 . . . <nl> - # ' <nl> - # ' } <nl> - # ' <nl> - # ' @ export <nl> - lgb . prepare2 < - function ( data ) { <nl> - <nl> - # data . table not behaving like data . frame <nl> - if ( inherits ( data , " data . table " ) ) { <nl> - <nl> - # Get data classes <nl> - list_classes < - vapply ( data , class , character ( 1 ) ) <nl> - <nl> - # Convert characters to factors only ( we can change them to numeric after ) <nl> - is_char < - which ( list_classes = = " character " ) <nl> - if ( length ( is_char ) > 0 ) { <nl> - data [ , ( is_char ) : = lapply ( . SD , function ( x ) { as . integer ( as . factor ( x ) ) } ) , . SDcols = is_char ] <nl> - } <nl> - <nl> - # Convert factors to numeric ( integer is more efficient actually ) <nl> - is_fact < - c ( which ( list_classes = = " factor " ) , is_char ) <nl> - if ( length ( is_fact ) > 0 ) { <nl> - data [ , ( is_fact ) : = lapply ( . SD , function ( x ) { as . integer ( x ) } ) , . SDcols = is_fact ] <nl> - } <nl> - <nl> - } else { <nl> - <nl> - # Default routine ( data . frame ) <nl> - if ( inherits ( data , " data . frame " ) ) { <nl> - <nl> - # Get data classes <nl> - list_classes < - vapply ( data , class , character ( 1 ) ) <nl> - <nl> - # Convert characters to factors to numeric ( integer is more efficient actually ) <nl> - is_char < - which ( list_classes = = " character " ) <nl> - if ( length ( is_char ) > 0 ) { <nl> - data [ is_char ] < - lapply ( data [ is_char ] , function ( x ) { as . integer ( as . factor ( x ) ) } ) <nl> - } <nl> - <nl> - # Convert factors to numeric ( integer is more efficient actually ) <nl> - is_fact < - which ( list_classes = = " factor " ) <nl> - if ( length ( is_fact ) > 0 ) { <nl> - data [ is_fact ] < - lapply ( data [ is_fact ] , function ( x ) { as . integer ( x ) } ) <nl> - } <nl> - <nl> - } else { <nl> - <nl> - # What do you think you are doing here ? Throw error . <nl> - stop ( " lgb . prepare : you provided " , paste ( class ( data ) , collapse = " & " ) , " but data should have class data . frame " ) <nl> - <nl> - } <nl> - <nl> - } <nl> - <nl> - return ( data ) <nl> - <nl> - } <nl> + # ' Data preparator for LightGBM datasets ( integer ) <nl> + # ' <nl> + # ' Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric ( specifically : integer ) . Please use \ code { lgb . prepare_rules2 } if you want to apply this transformation to other datasets . This is useful if you have a specific need for integer dataset instead of numeric dataset . Note that there are programs which do not support integer - only input . Consider this as a half memory technique which is dangerous , especially for LightGBM . <nl> + # ' <nl> + # ' @ param data A data . frame or data . table to prepare . <nl> + # ' <nl> + # ' @ return The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> + # ' <nl> + # ' @ examples <nl> + # ' library ( lightgbm ) <nl> + # ' data ( iris ) <nl> + # ' <nl> + # ' str ( iris ) <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : Factor w / 3 levels " setosa " , " versicolor " , . . : 1 1 1 1 . . . <nl> + # ' <nl> + # ' # Convert all factors / chars to integer <nl> + # ' str ( lgb . prepare2 ( data = iris ) ) <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : int 1 1 1 1 1 1 1 1 1 1 . . . <nl> + # ' <nl> + # ' # When lightgbm package is installed , and you do not want to load it <nl> + # ' # You can still use the function ! <nl> + # ' lgb . unloader ( ) <nl> + # ' str ( lightgbm : : lgb . prepare2 ( data = iris ) ) <nl> + # ' # ' data . frame ' : 150 obs . of 5 variables : <nl> + # ' # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> + # ' # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> + # ' # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> + # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> + # ' # $ Species : int 1 1 1 1 1 1 1 1 1 1 . . . <nl> + # ' <nl> + # ' @ export <nl> + lgb . prepare2 < - function ( data ) { <nl> + <nl> + # data . table not behaving like data . frame <nl> + if ( inherits ( data , " data . table " ) ) { <nl> + <nl> + # Get data classes <nl> + list_classes < - vapply ( data , class , character ( 1 ) ) <nl> + <nl> + # Convert characters to factors only ( we can change them to numeric after ) <nl> + is_char < - which ( list_classes = = " character " ) <nl> + if ( length ( is_char ) > 0 ) { <nl> + data [ , ( is_char ) : = lapply ( . SD , function ( x ) { as . integer ( as . factor ( x ) ) } ) , . SDcols = is_char ] <nl> + } <nl> + <nl> + # Convert factors to numeric ( integer is more efficient actually ) <nl> + is_fact < - c ( which ( list_classes = = " factor " ) , is_char ) <nl> + if ( length ( is_fact ) > 0 ) { <nl> + data [ , ( is_fact ) : = lapply ( . SD , function ( x ) { as . integer ( x ) } ) , . SDcols = is_fact ] <nl> + } <nl> + <nl> + } else { <nl> + <nl> + # Default routine ( data . frame ) <nl> + if ( inherits ( data , " data . frame " ) ) { <nl> + <nl> + # Get data classes <nl> + list_classes < - vapply ( data , class , character ( 1 ) ) <nl> + <nl> + # Convert characters to factors to numeric ( integer is more efficient actually ) <nl> + is_char < - which ( list_classes = = " character " ) <nl> + if ( length ( is_char ) > 0 ) { <nl> + data [ is_char ] < - lapply ( data [ is_char ] , function ( x ) { as . integer ( as . factor ( x ) ) } ) <nl> + } <nl> + <nl> + # Convert factors to numeric ( integer is more efficient actually ) <nl> + is_fact < - which ( list_classes = = " factor " ) <nl> + if ( length ( is_fact ) > 0 ) { <nl> + data [ is_fact ] < - lapply ( data [ is_fact ] , function ( x ) { as . integer ( x ) } ) <nl> + } <nl> + <nl> + } else { <nl> + <nl> + # What do you think you are doing here ? Throw error . <nl> + stop ( " lgb . prepare : you provided " , paste ( class ( data ) , collapse = " & " ) , " but data should have class data . frame " ) <nl> + <nl> + } <nl> + <nl> + } <nl> + <nl> + return ( data ) <nl> + <nl> + } <nl> mmm a / R - package / R / lgb . prepare_rules . R <nl> ppp b / R - package / R / lgb . prepare_rules . R <nl> <nl> # ' @ return A list with the cleaned dataset ( \ code { data } ) and the rules ( \ code { rules } ) . The data must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( iris ) <nl> # ' <nl> <nl> # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> # ' # $ Species : num 3 3 3 3 3 3 3 3 3 3 . . . <nl> # ' <nl> - # ' } <nl> - # ' <nl> # ' @ importFrom data . table set <nl> # ' @ export <nl> lgb . prepare_rules < - function ( data , rules = NULL ) { <nl> mmm a / R - package / R / lgb . prepare_rules2 . R <nl> ppp b / R - package / R / lgb . prepare_rules2 . R <nl> <nl> # ' @ return A list with the cleaned dataset ( \ code { data } ) and the rules ( \ code { rules } ) . The data must be converted to a matrix format ( \ code { as . matrix } ) for input in lgb . Dataset . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( iris ) <nl> # ' <nl> <nl> # ' # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> # ' # $ Species : int 3 3 3 3 3 3 3 3 3 3 . . . <nl> # ' <nl> - # ' } <nl> - # ' <nl> # ' @ importFrom data . table set <nl> # ' @ export <nl> lgb . prepare_rules2 < - function ( data , rules = NULL ) { <nl> mmm a / R - package / R / lgb . train . R <nl> ppp b / R - package / R / lgb . train . R <nl> <nl> # ' @ return a trained booster model \ code { lgb . Booster } . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> <nl> # ' min_data = 1 , <nl> # ' learning_rate = 1 , <nl> # ' early_stopping_rounds = 10 ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> lgb . train < - function ( params = list ( ) , <nl> mmm a / R - package / R / lgb . unloader . R <nl> ppp b / R - package / R / lgb . unloader . R <nl> <nl> # ' @ return NULL invisibly . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> <nl> # ' <nl> # ' library ( lightgbm ) <nl> # ' # Do whatever you want again with LightGBM without object clashing <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> lgb . unloader < - function ( restore = TRUE , wipe = FALSE , envir = . GlobalEnv ) { <nl> mmm a / R - package / R / readRDS . lgb . Booster . R <nl> ppp b / R - package / R / readRDS . lgb . Booster . R <nl> <nl> # ' @ return lgb . Booster . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> <nl> # ' early_stopping_rounds = 10 ) <nl> # ' saveRDS . lgb . Booster ( model , " model . rds " ) <nl> # ' new_model < - readRDS . lgb . Booster ( " model . rds " ) <nl> - # ' } <nl> # ' <nl> # ' @ export <nl> readRDS . lgb . Booster < - function ( file = " " , refhook = NULL ) { <nl> mmm a / R - package / R / saveRDS . lgb . Booster . R <nl> ppp b / R - package / R / saveRDS . lgb . Booster . R <nl> <nl> # ' @ return NULL invisibly . <nl> # ' <nl> # ' @ examples <nl> - # ' \ dontrun { <nl> # ' library ( lightgbm ) <nl> # ' data ( agaricus . train , package = " lightgbm " ) <nl> # ' train < - agaricus . train <nl> <nl> # ' dtest < - lgb . Dataset . create . valid ( dtrain , test $ data , label = test $ label ) <nl> # ' params < - list ( objective = " regression " , metric = " l2 " ) <nl> # ' valids < - list ( test = dtest ) <nl> - # ' model < - lgb . train ( params , <nl> - # ' dtrain , <nl> - # ' 100 , <nl> - # ' valids , <nl> - # ' min_data = 1 , <nl> - # ' learning_rate = 1 , <nl> - # ' early_stopping_rounds = 10 ) <nl> + # ' model < - lgb . train ( <nl> + # ' params <nl> + # ' , dtrain <nl> + # ' , 100 <nl> + # ' , valids <nl> + # ' , min_data = 1 <nl> + # ' , learning_rate = 1 <nl> + # ' , early_stopping_rounds = 10 <nl> + # ' ) <nl> # ' saveRDS . lgb . Booster ( model , " model . rds " ) <nl> - # ' } <nl> - # ' <nl> # ' @ export <nl> saveRDS . lgb . Booster < - function ( object , <nl> file = " " , <nl> mmm a / R - package / man / dim . Rd <nl> ppp b / R - package / man / dim . Rd <nl> Note : since \ code { nrow } and \ code { ncol } internally use \ code { dim } , they can also <nl> be directly used with an \ code { lgb . Dataset } object . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> stopifnot ( nrow ( dtrain ) = = nrow ( train $ data ) ) <nl> stopifnot ( ncol ( dtrain ) = = ncol ( train $ data ) ) <nl> stopifnot ( all ( dim ( dtrain ) = = dim ( train $ data ) ) ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / dimnames . lgb . Dataset . Rd <nl> ppp b / R - package / man / dimnames . lgb . Dataset . Rd <nl> Generic \ code { dimnames } methods are used by \ code { colnames } . <nl> Since row names are irrelevant , it is recommended to use \ code { colnames } directly . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dimnames ( dtrain ) <nl> colnames ( dtrain ) <nl> colnames ( dtrain ) < - make . names ( 1 : ncol ( train $ data ) ) <nl> print ( dtrain , verbose = TRUE ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / getinfo . Rd <nl> ppp b / R - package / man / getinfo . Rd <nl> The \ code { name } field can be one of the following : <nl> } <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> lightgbm : : setinfo ( dtrain , " label " , 1 - labels ) <nl> <nl> labels2 < - lightgbm : : getinfo ( dtrain , " label " ) <nl> stopifnot ( all ( labels2 = = 1 - labels ) ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . Rd <nl> ppp b / R - package / man / lgb . Dataset . Rd <nl> Construct lgb . Dataset object from dense matrix , sparse matrix <nl> or local file ( that was created previously by saving an \ code { lgb . Dataset } ) . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> lgb . Dataset . save ( dtrain , " lgb . Dataset . data " ) <nl> dtrain < - lgb . Dataset ( " lgb . Dataset . data " ) <nl> lgb . Dataset . construct ( dtrain ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . construct . Rd <nl> ppp b / R - package / man / lgb . Dataset . construct . Rd <nl> lgb . Dataset . construct ( dataset ) <nl> Construct Dataset explicitly <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> lgb . Dataset . construct ( dtrain ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . create . valid . Rd <nl> ppp b / R - package / man / lgb . Dataset . create . valid . Rd <nl> constructed dataset <nl> Construct validation data according to training data <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> data ( agaricus . test , package = " lightgbm " ) <nl> test < - agaricus . test <nl> dtest < - lgb . Dataset . create . valid ( dtrain , test $ data , label = test $ label ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . save . Rd <nl> ppp b / R - package / man / lgb . Dataset . save . Rd <nl> Save \ code { lgb . Dataset } to a binary file <nl> } <nl> \ examples { <nl> <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> lgb . Dataset . save ( dtrain , " data . bin " ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . set . categorical . Rd <nl> ppp b / R - package / man / lgb . Dataset . set . categorical . Rd <nl> passed dataset <nl> Set categorical feature of \ code { lgb . Dataset } <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> lgb . Dataset . save ( dtrain , " lgb . Dataset . data " ) <nl> dtrain < - lgb . Dataset ( " lgb . Dataset . data " ) <nl> lgb . Dataset . set . categorical ( dtrain , 1 : 2 ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . Dataset . set . reference . Rd <nl> ppp b / R - package / man / lgb . Dataset . set . reference . Rd <nl> passed dataset <nl> If you want to use validation data , you should set reference to training data <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> data ( agaricus . test , package = " lightgbm " ) <nl> test < - agaricus . test <nl> dtest < - lgb . Dataset ( test $ data , test = train $ label ) <nl> lgb . Dataset . set . reference ( dtest , dtrain ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . cv . Rd <nl> ppp b / R - package / man / lgb . cv . Rd <nl> a trained model \ code { lgb . CVBooster } . <nl> Cross validation logic used by LightGBM <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . cv ( params , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> } <nl> - } <nl> mmm a / R - package / man / lgb . dump . Rd <nl> ppp b / R - package / man / lgb . dump . Rd <nl> json format of model <nl> Dump LightGBM model to json <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> json_model < - lgb . dump ( model ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . get . eval . result . Rd <nl> ppp b / R - package / man / lgb . get . eval . result . Rd <nl> vector of evaluation result <nl> Get record evaluation result from booster <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> lgb . get . eval . result ( model , " test " , " l2 " ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . importance . Rd <nl> ppp b / R - package / man / lgb . importance . Rd <nl> For a tree model , a \ code { data . table } with the following columns : <nl> Creates a \ code { data . table } of feature importances in a model . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> <nl> - params = list ( objective = " binary " , <nl> + params < - list ( objective = " binary " , <nl> learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> <nl> tree_imp1 < - lgb . importance ( model , percentage = TRUE ) <nl> tree_imp2 < - lgb . importance ( model , percentage = FALSE ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . interprete . Rd <nl> ppp b / R - package / man / lgb . interprete . Rd <nl> For multiclass classification , a \ code { list } of \ code { data . table } with the Featu <nl> Computes feature contribution components of rawscore prediction . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> - library ( lightgbm ) <nl> Sigmoid < - function ( x ) 1 / ( 1 + exp ( - x ) ) <nl> Logit < - function ( x ) log ( x / ( 1 - x ) ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> setinfo ( dtrain , " init_score " , rep ( Logit ( mean ( train $ label ) ) , length ( train $ label ) ) <nl> data ( agaricus . test , package = " lightgbm " ) <nl> test < - agaricus . test <nl> <nl> - params = list ( objective = " binary " , <nl> - learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> - min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> - model < - lgb . train ( params , dtrain , 20 ) <nl> + params < - list ( <nl> + objective = " binary " <nl> + , learning_rate = 0 . 01 <nl> + , num_leaves = 63 <nl> + , max_depth = - 1 <nl> + , min_data_in_leaf = 1 <nl> + , min_sum_hessian_in_leaf = 1 <nl> + ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> <nl> tree_interpretation < - lgb . interprete ( model , test $ data , 1 : 5 ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . load . Rd <nl> ppp b / R - package / man / lgb . load . Rd <nl> Load LightGBM takes in either a file path or model string <nl> If both are provided , Load will default to loading from file <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> lgb . save ( model , " model . txt " ) <nl> load_booster < - lgb . load ( filename = " model . txt " ) <nl> model_string < - model $ save_model_to_string ( NULL ) # saves best iteration <nl> load_booster_from_str < - lgb . load ( model_str = model_string ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . model . dt . tree . Rd <nl> ppp b / R - package / man / lgb . model . dt . tree . Rd <nl> The columns of the \ code { data . table } are : <nl> Parse a LightGBM model json dump into a \ code { data . table } structure . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> - library ( lightgbm ) <nl> <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> <nl> - params = list ( objective = " binary " , <nl> + params < - list ( objective = " binary " , <nl> learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> <nl> tree_dt < - lgb . model . dt . tree ( model ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . plot . importance . Rd <nl> ppp b / R - package / man / lgb . plot . importance . Rd <nl> Plot previously calculated feature importance : Gain , Cover and Frequency , as a b <nl> The graph represents each feature as a horizontal bar of length proportional to the defined importance of a feature . <nl> Features are shown ranked in a decreasing importance order . <nl> } <nl> - \ examples { <nl> - \ dontrun { <nl> - data ( agaricus . train , package = " lightgbm " ) <nl> - train < - agaricus . train <nl> - dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> - <nl> - params = list ( objective = " binary " , <nl> - learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> - min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> - model < - lgb . train ( params , dtrain , 20 ) <nl> - model < - lgb . train ( params , dtrain , 20 ) <nl> - <nl> - tree_imp < - lgb . importance ( model , percentage = TRUE ) <nl> - lgb . plot . importance ( tree_imp , top_n = 10 , measure = " Gain " ) <nl> - } <nl> - } <nl> mmm a / R - package / man / lgb . plot . interpretation . Rd <nl> ppp b / R - package / man / lgb . plot . interpretation . Rd <nl> The graph represents each feature as a horizontal bar of length proportional to <nl> Features are shown ranked in a decreasing contribution order . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> Sigmoid < - function ( x ) { 1 / ( 1 + exp ( - x ) ) } <nl> Logit < - function ( x ) { log ( x / ( 1 - x ) ) } <nl> setinfo ( dtrain , " init_score " , rep ( Logit ( mean ( train $ label ) ) , length ( train $ label ) ) <nl> data ( agaricus . test , package = " lightgbm " ) <nl> test < - agaricus . test <nl> <nl> - params = list ( objective = " binary " , <nl> + params < - list ( objective = " binary " , <nl> learning_rate = 0 . 01 , num_leaves = 63 , max_depth = - 1 , <nl> min_data_in_leaf = 1 , min_sum_hessian_in_leaf = 1 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> model < - lgb . train ( params , dtrain , 20 ) <nl> tree_interpretation < - lgb . interprete ( model , test $ data , 1 : 5 ) <nl> lgb . plot . interpretation ( tree_interpretation [ [ 1 ] ] , top_n = 10 ) <nl> } <nl> - } <nl> mmm a / R - package / man / lgb . prepare . Rd <nl> ppp b / R - package / man / lgb . prepare . Rd <nl> The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) <nl> Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric without integers . Please use \ code { lgb . prepare_rules } if you want to apply this transformation to other datasets . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( iris ) <nl> <nl> str ( lightgbm : : lgb . prepare ( data = iris ) ) <nl> # $ Petal . Length : num 1 . 4 1 . 4 1 . 3 1 . 5 1 . 4 1 . 7 1 . 4 1 . 5 1 . 4 1 . 5 . . . <nl> # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> # $ Species : num 1 1 1 1 1 1 1 1 1 1 . . . <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . prepare2 . Rd <nl> ppp b / R - package / man / lgb . prepare2 . Rd <nl> The cleaned dataset . It must be converted to a matrix format ( \ code { as . matrix } ) <nl> Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric ( specifically : integer ) . Please use \ code { lgb . prepare_rules2 } if you want to apply this transformation to other datasets . This is useful if you have a specific need for integer dataset instead of numeric dataset . Note that there are programs which do not support integer - only input . Consider this as a half memory technique which is dangerous , especially for LightGBM . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( iris ) <nl> <nl> str ( iris ) <nl> # $ Petal . Width : num 0 . 2 0 . 2 0 . 2 0 . 2 0 . 2 0 . 4 0 . 3 0 . 2 0 . 2 0 . 1 . . . <nl> # $ Species : Factor w / 3 levels " setosa " , " versicolor " , . . : 1 1 1 1 . . . <nl> <nl> - str ( lgb . prepare2 ( data = iris ) ) # Convert all factors / chars to integer <nl> + # Convert all factors / chars to integer <nl> + str ( lgb . prepare2 ( data = iris ) ) <nl> # ' data . frame ' : 150 obs . of 5 variables : <nl> # $ Sepal . Length : num 5 . 1 4 . 9 4 . 7 4 . 6 5 5 . 4 4 . 6 5 4 . 4 4 . 9 . . . <nl> # $ Sepal . Width : num 3 . 5 3 3 . 2 3 . 1 3 . 6 3 . 9 3 . 4 3 . 4 2 . 9 3 . 1 . . . <nl> str ( lightgbm : : lgb . prepare2 ( data = iris ) ) <nl> # $ Species : int 1 1 1 1 1 1 1 1 1 1 . . . <nl> <nl> } <nl> - <nl> - } <nl> mmm a / R - package / man / lgb . prepare_rules . Rd <nl> ppp b / R - package / man / lgb . prepare_rules . Rd <nl> A list with the cleaned dataset ( \ code { data } ) and the rules ( \ code { rules } ) . The <nl> Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric . In addition , keeps rules created so you can convert other datasets using this converter . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( iris ) <nl> <nl> str ( newest_iris $ data ) # SUCCESS ! <nl> # $ Species : num 3 3 3 3 3 3 3 3 3 3 . . . <nl> <nl> } <nl> - <nl> - } <nl> mmm a / R - package / man / lgb . prepare_rules2 . Rd <nl> ppp b / R - package / man / lgb . prepare_rules2 . Rd <nl> A list with the cleaned dataset ( \ code { data } ) and the rules ( \ code { rules } ) . The <nl> Attempts to prepare a clean dataset to prepare to put in a lgb . Dataset . Factors and characters are converted to numeric ( specifically : integer ) . In addition , keeps rules created so you can convert other datasets using this converter . This is useful if you have a specific need for integer dataset instead of numeric dataset . Note that there are programs which do not support integer - only input . Consider this as a half memory technique which is dangerous , especially for LightGBM . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( iris ) <nl> <nl> str ( newest_iris $ data ) # SUCCESS ! <nl> # $ Species : int 3 3 3 3 3 3 3 3 3 3 . . . <nl> <nl> } <nl> - <nl> - } <nl> mmm a / R - package / man / lgb . save . Rd <nl> ppp b / R - package / man / lgb . save . Rd <nl> lgb . Booster <nl> Save LightGBM model <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> lgb . save ( model , " model . txt " ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . train . Rd <nl> ppp b / R - package / man / lgb . train . Rd <nl> a trained booster model \ code { lgb . Booster } . <nl> Logic to train with LightGBM <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> min_data = 1 , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / lgb . unloader . Rd <nl> ppp b / R - package / man / lgb . unloader . Rd <nl> NULL invisibly . <nl> Attempts to unload LightGBM packages so you can remove objects cleanly without having to restart R . This is useful for instance if an object becomes stuck for no apparent reason and you do not want to restart R to fix the lost object . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> gc ( ) # Not needed if wipe = TRUE <nl> <nl> library ( lightgbm ) <nl> # Do whatever you want again with LightGBM without object clashing <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / predict . lgb . Booster . Rd <nl> ppp b / R - package / man / predict . lgb . Booster . Rd <nl> number of columns corresponding to the number of trees . <nl> Predicted values based on class \ code { lgb . Booster } <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> learning_rate = 1 , <nl> early_stopping_rounds = 10 ) <nl> preds < - predict ( model , test $ data ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / readRDS . lgb . Booster . Rd <nl> ppp b / R - package / man / readRDS . lgb . Booster . Rd <nl> lgb . Booster . <nl> Attempts to load a model using RDS . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> model < - lgb . train ( params , <nl> early_stopping_rounds = 10 ) <nl> saveRDS . lgb . Booster ( model , " model . rds " ) <nl> new_model < - readRDS . lgb . Booster ( " model . rds " ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / saveRDS . lgb . Booster . Rd <nl> ppp b / R - package / man / saveRDS . lgb . Booster . Rd <nl> NULL invisibly . <nl> Attempts to save a model using RDS . Has an additional parameter ( \ code { raw } ) which decides whether to save the raw model or not . <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> test < - agaricus . test <nl> dtest < - lgb . Dataset . create . valid ( dtrain , test $ data , label = test $ label ) <nl> params < - list ( objective = " regression " , metric = " l2 " ) <nl> valids < - list ( test = dtest ) <nl> - model < - lgb . train ( params , <nl> - dtrain , <nl> - 100 , <nl> - valids , <nl> - min_data = 1 , <nl> - learning_rate = 1 , <nl> - early_stopping_rounds = 10 ) <nl> + model < - lgb . train ( <nl> + params <nl> + , dtrain <nl> + , 100 <nl> + , valids <nl> + , min_data = 1 <nl> + , learning_rate = 1 <nl> + , early_stopping_rounds = 10 <nl> + ) <nl> saveRDS . lgb . Booster ( model , " model . rds " ) <nl> } <nl> - <nl> - } <nl> mmm a / R - package / man / setinfo . Rd <nl> ppp b / R - package / man / setinfo . Rd <nl> The \ code { name } field can be one of the following : <nl> } <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> lightgbm : : setinfo ( dtrain , " label " , 1 - labels ) <nl> <nl> labels2 < - lightgbm : : getinfo ( dtrain , " label " ) <nl> stopifnot ( all . equal ( labels2 , 1 - labels ) ) <nl> - } <nl> <nl> } <nl> mmm a / R - package / man / slice . Rd <nl> ppp b / R - package / man / slice . Rd <nl> Get a new \ code { lgb . Dataset } containing the specified rows of <nl> original lgb . Dataset object <nl> } <nl> \ examples { <nl> - \ dontrun { <nl> library ( lightgbm ) <nl> data ( agaricus . train , package = " lightgbm " ) <nl> train < - agaricus . train <nl> dtrain < - lgb . Dataset ( train $ data , label = train $ label ) <nl> <nl> dsub < - lightgbm : : slice ( dtrain , 1 : 42 ) <nl> + lgb . Dataset . construct ( dsub ) <nl> labels < - lightgbm : : getinfo ( dsub , " label " ) <nl> - } <nl> <nl> } <nl> | [ R - package ] updated examples and removed dontrun guards on them in roxygen ( ) | microsoft/LightGBM | 029bcc42ced7eb9f9873306b4144b9df5b89d93a | 2018-08-31T08:09:53Z |
mmm a / xbmc / TextureDatabase . cpp <nl> ppp b / xbmc / TextureDatabase . cpp <nl> typedef struct <nl> <nl> static const translateField fields [ ] = { <nl> { " none " , TF_None , CDatabaseQueryRule : : TEXT_FIELD } , <nl> - { " textureid " , TF_Id , CDatabaseQueryRule : : NUMERIC_FIELD } , <nl> + { " textureid " , TF_Id , CDatabaseQueryRule : : REAL_FIELD } , <nl> { " url " , TF_Url , CDatabaseQueryRule : : TEXT_FIELD } , <nl> { " cachedurl " , TF_CachedUrl , CDatabaseQueryRule : : TEXT_FIELD } , <nl> { " lasthashcheck " , TF_LastHashCheck , CDatabaseQueryRule : : TEXT_FIELD } , <nl> { " imagehash " , TF_ImageHash , CDatabaseQueryRule : : TEXT_FIELD } , <nl> - { " width " , TF_Width , CDatabaseQueryRule : : NUMERIC_FIELD } , <nl> - { " height " , TF_Height , CDatabaseQueryRule : : NUMERIC_FIELD } , <nl> - { " usecount " , TF_UseCount , CDatabaseQueryRule : : NUMERIC_FIELD } , <nl> + { " width " , TF_Width , CDatabaseQueryRule : : REAL_FIELD } , <nl> + { " height " , TF_Height , CDatabaseQueryRule : : REAL_FIELD } , <nl> + { " usecount " , TF_UseCount , CDatabaseQueryRule : : REAL_FIELD } , <nl> { " lastused " , TF_LastUsed , CDatabaseQueryRule : : TEXT_FIELD } <nl> } ; <nl> <nl> mmm a / xbmc / dbwrappers / DatabaseQuery . cpp <nl> ppp b / xbmc / dbwrappers / DatabaseQuery . cpp <nl> void CDatabaseQueryRule : : SetParameter ( const std : : vector < std : : string > & values ) <nl> <nl> std : : string CDatabaseQueryRule : : ValidateParameter ( const std : : string & parameter ) const <nl> { <nl> - if ( ( GetFieldType ( m_field ) = = NUMERIC_FIELD | | <nl> + if ( ( GetFieldType ( m_field ) = = REAL_FIELD | | GetFieldType ( m_field ) = = NUMERIC_FIELD | | <nl> GetFieldType ( m_field ) = = SECONDS_FIELD ) & & parameter . empty ( ) ) <nl> return " 0 " ; / / interpret empty fields as 0 <nl> return parameter ; <nl> std : : string CDatabaseQueryRule : : GetOperatorString ( SEARCH_OPERATOR op ) const <nl> case OPERATOR_DOES_NOT_CONTAIN : <nl> operatorString = " LIKE ' % % % s % % ' " ; break ; <nl> case OPERATOR_EQUALS : <nl> - if ( GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> + if ( GetFieldType ( m_field ) = = REAL_FIELD | | GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> operatorString = " = % s " ; <nl> else <nl> operatorString = " LIKE ' % s ' " ; <nl> break ; <nl> case OPERATOR_DOES_NOT_EQUAL : <nl> - if ( GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> + if ( GetFieldType ( m_field ) = = REAL_FIELD | | GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> operatorString = " ! = % s " ; <nl> else <nl> operatorString = " LIKE ' % s ' " ; <nl> std : : string CDatabaseQueryRule : : GetOperatorString ( SEARCH_OPERATOR op ) const <nl> case OPERATOR_GREATER_THAN : <nl> case OPERATOR_IN_THE_LAST : <nl> operatorString = " > " ; <nl> - if ( GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> + if ( GetFieldType ( m_field ) = = REAL_FIELD | | GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> operatorString + = " % s " ; <nl> else <nl> operatorString + = " ' % s ' " ; <nl> std : : string CDatabaseQueryRule : : GetOperatorString ( SEARCH_OPERATOR op ) const <nl> case OPERATOR_LESS_THAN : <nl> case OPERATOR_NOT_IN_THE_LAST : <nl> operatorString = " < " ; <nl> - if ( GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> + if ( GetFieldType ( m_field ) = = REAL_FIELD | | GetFieldType ( m_field ) = = NUMERIC_FIELD | | GetFieldType ( m_field ) = = SECONDS_FIELD ) <nl> operatorString + = " % s " ; <nl> else <nl> operatorString + = " ' % s ' " ; <nl> std : : string CDatabaseQueryRule : : GetWhereClause ( const CDatabase & db , const std : : s <nl> std : : string operatorString = GetOperatorString ( op ) ; <nl> std : : string negate ; <nl> if ( op = = OPERATOR_DOES_NOT_CONTAIN | | op = = OPERATOR_FALSE | | <nl> - ( op = = OPERATOR_DOES_NOT_EQUAL & & GetFieldType ( m_field ) ! = NUMERIC_FIELD & & GetFieldType ( m_field ) ! = SECONDS_FIELD ) ) <nl> + ( op = = OPERATOR_DOES_NOT_EQUAL & & GetFieldType ( m_field ) ! = REAL_FIELD & & GetFieldType ( m_field ) ! = NUMERIC_FIELD & & <nl> + GetFieldType ( m_field ) ! = SECONDS_FIELD ) ) <nl> negate = " NOT " ; <nl> <nl> / / boolean operators don ' t have any values in m_parameter , they work on the operator <nl> std : : string CDatabaseQueryRule : : GetWhereClause ( const CDatabase & db , const std : : s <nl> return " " ; <nl> <nl> FIELD_TYPE fieldType = GetFieldType ( m_field ) ; <nl> - if ( fieldType = = NUMERIC_FIELD ) <nl> + if ( fieldType = = REAL_FIELD ) <nl> + return db . PrepareSQL ( " % s BETWEEN % s AND % s " , GetField ( m_field , strType ) . c_str ( ) , m_parameter [ 0 ] . c_str ( ) , m_parameter [ 1 ] . c_str ( ) ) ; <nl> + else if ( fieldType = = NUMERIC_FIELD ) <nl> return db . PrepareSQL ( " CAST ( % s as DECIMAL ( 5 , 1 ) ) BETWEEN % s AND % s " , GetField ( m_field , strType ) . c_str ( ) , m_parameter [ 0 ] . c_str ( ) , m_parameter [ 1 ] . c_str ( ) ) ; <nl> else if ( fieldType = = SECONDS_FIELD ) <nl> return db . PrepareSQL ( " CAST ( % s as INTEGER ) BETWEEN % s AND % s " , GetField ( m_field , strType ) . c_str ( ) , m_parameter [ 0 ] . c_str ( ) , m_parameter [ 1 ] . c_str ( ) ) ; <nl> mmm a / xbmc / dbwrappers / DatabaseQuery . h <nl> ppp b / xbmc / dbwrappers / DatabaseQuery . h <nl> class CDatabaseQueryRule <nl> } ; <nl> <nl> enum FIELD_TYPE { TEXT_FIELD = 0 , <nl> + REAL_FIELD , <nl> NUMERIC_FIELD , <nl> DATE_FIELD , <nl> PLAYLIST_FIELD , <nl> mmm a / xbmc / dialogs / GUIDialogSmartPlaylistRule . cpp <nl> ppp b / xbmc / dialogs / GUIDialogSmartPlaylistRule . cpp <nl> void CGUIDialogSmartPlaylistRule : : UpdateButtons ( ) <nl> labels . push_back ( OperatorLabel ( CDatabaseQueryRule : : OPERATOR_ENDS_WITH ) ) ; <nl> break ; <nl> <nl> + case CDatabaseQueryRule : : REAL_FIELD : <nl> case CDatabaseQueryRule : : NUMERIC_FIELD : <nl> case CDatabaseQueryRule : : SECONDS_FIELD : <nl> / / numerical fields - less than greater than <nl> void CGUIDialogSmartPlaylistRule : : UpdateButtons ( ) <nl> case CDatabaseQueryRule : : TEXT_FIELD : <nl> case CDatabaseQueryRule : : PLAYLIST_FIELD : <nl> case CDatabaseQueryRule : : TEXTIN_FIELD : <nl> + case CDatabaseQueryRule : : REAL_FIELD : <nl> case CDatabaseQueryRule : : NUMERIC_FIELD : <nl> type = CGUIEditControl : : INPUT_TYPE_TEXT ; <nl> break ; <nl> mmm a / xbmc / playlists / SmartPlayList . cpp <nl> ppp b / xbmc / playlists / SmartPlayList . cpp <nl> static const translateField fields [ ] = { <nl> { " playcount " , FieldPlaycount , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 567 } , <nl> { " lastplayed " , FieldLastPlayed , CDatabaseQueryRule : : DATE_FIELD , NULL , false , 568 } , <nl> { " inprogress " , FieldInProgress , CDatabaseQueryRule : : BOOLEAN_FIELD , NULL , false , 575 } , <nl> - { " rating " , FieldRating , CDatabaseQueryRule : : NUMERIC_FIELD , CSmartPlaylistRule : : ValidateRating , false , 563 } , <nl> - { " userrating " , FieldUserRating , CDatabaseQueryRule : : NUMERIC_FIELD , CSmartPlaylistRule : : ValidateMyRating , false , 38018 } , <nl> - { " votes " , FieldVotes , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 205 } , <nl> + { " rating " , FieldRating , CDatabaseQueryRule : : REAL_FIELD , CSmartPlaylistRule : : ValidateRating , false , 563 } , <nl> + { " userrating " , FieldUserRating , CDatabaseQueryRule : : REAL_FIELD , CSmartPlaylistRule : : ValidateMyRating , false , 38018 } , <nl> + { " votes " , FieldVotes , CDatabaseQueryRule : : REAL_FIELD , StringValidation : : IsPositiveInteger , false , 205 } , <nl> { " top250 " , FieldTop250 , CDatabaseQueryRule : : NUMERIC_FIELD , NULL , false , 13409 } , <nl> { " mpaarating " , FieldMPAA , CDatabaseQueryRule : : TEXT_FIELD , NULL , false , 20074 } , <nl> { " dateadded " , FieldDateAdded , CDatabaseQueryRule : : DATE_FIELD , NULL , false , 570 } , <nl> static const translateField fields [ ] = { <nl> { " status " , FieldTvShowStatus , CDatabaseQueryRule : : TEXT_FIELD , NULL , false , 126 } , <nl> { " season " , FieldSeason , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 20373 } , <nl> { " episode " , FieldEpisodeNumber , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 20359 } , <nl> - { " numepisodes " , FieldNumberOfEpisodes , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 20360 } , <nl> - { " numwatched " , FieldNumberOfWatchedEpisodes , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 21457 } , <nl> - { " videoresolution " , FieldVideoResolution , CDatabaseQueryRule : : NUMERIC_FIELD , NULL , false , 21443 } , <nl> + { " numepisodes " , FieldNumberOfEpisodes , CDatabaseQueryRule : : REAL_FIELD , StringValidation : : IsPositiveInteger , false , 20360 } , <nl> + { " numwatched " , FieldNumberOfWatchedEpisodes , CDatabaseQueryRule : : REAL_FIELD , StringValidation : : IsPositiveInteger , false , 21457 } , <nl> + { " videoresolution " , FieldVideoResolution , CDatabaseQueryRule : : REAL_FIELD , NULL , false , 21443 } , <nl> { " videocodec " , FieldVideoCodec , CDatabaseQueryRule : : TEXTIN_FIELD , NULL , false , 21445 } , <nl> - { " videoaspect " , FieldVideoAspectRatio , CDatabaseQueryRule : : NUMERIC_FIELD , NULL , false , 21374 } , <nl> - { " audiochannels " , FieldAudioChannels , CDatabaseQueryRule : : NUMERIC_FIELD , NULL , false , 21444 } , <nl> + { " videoaspect " , FieldVideoAspectRatio , CDatabaseQueryRule : : REAL_FIELD , NULL , false , 21374 } , <nl> + { " audiochannels " , FieldAudioChannels , CDatabaseQueryRule : : REAL_FIELD , NULL , false , 21444 } , <nl> { " audiocodec " , FieldAudioCodec , CDatabaseQueryRule : : TEXTIN_FIELD , NULL , false , 21446 } , <nl> { " audiolanguage " , FieldAudioLanguage , CDatabaseQueryRule : : TEXTIN_FIELD , NULL , false , 21447 } , <nl> - { " audiocount " , FieldAudioCount , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 21481 } , <nl> - { " subtitlecount " , FieldSubtitleCount , CDatabaseQueryRule : : NUMERIC_FIELD , StringValidation : : IsPositiveInteger , false , 21482 } , <nl> + { " audiocount " , FieldAudioCount , CDatabaseQueryRule : : REAL_FIELD , StringValidation : : IsPositiveInteger , false , 21481 } , <nl> + { " subtitlecount " , FieldSubtitleCount , CDatabaseQueryRule : : REAL_FIELD , StringValidation : : IsPositiveInteger , false , 21482 } , <nl> { " subtitlelanguage " , FieldSubtitleLanguage , CDatabaseQueryRule : : TEXTIN_FIELD , NULL , false , 21448 } , <nl> { " random " , FieldRandom , CDatabaseQueryRule : : TEXT_FIELD , NULL , false , 590 } , <nl> { " playlist " , FieldPlaylist , CDatabaseQueryRule : : PLAYLIST_FIELD , NULL , true , 559 } , <nl> | Add a numeric field type that doesn ' t need casting | xbmc/xbmc | c56883740cd6db62ef1da61e6b68d3ede94018ea | 2016-09-08T15:58:25Z |
mmm a / Marlin / src / module / stepper . cpp <nl> ppp b / Marlin / src / module / stepper . cpp <nl> uint32_t Stepper : : block_phase_isr ( ) { <nl> # if ENABLED ( ADAPTIVE_STEP_SMOOTHING ) <nl> uint8_t oversampling = 0 ; / / Assume no axis smoothing ( via oversampling ) <nl> / / Decide if axis smoothing is possible <nl> - uint32_t max_rate = current_block - > nominal_rate ; / / Get the maximum rate ( maximum event speed ) <nl> + uint32_t max_rate = current_block - > nominal_rate ; / / Get the step event rate <nl> while ( max_rate < MIN_STEP_ISR_FREQUENCY ) { / / As long as more ISRs are possible . . . <nl> max_rate < < = 1 ; / / Try to double the rate <nl> - if ( max_rate > = MAX_STEP_ISR_FREQUENCY_1X ) break ; / / Don ' t exceed the estimated ISR limit <nl> - + + oversampling ; / / Increase the oversampling ( used for left - shift ) <nl> + if ( max_rate < MIN_STEP_ISR_FREQUENCY ) / / Don ' t exceed the estimated ISR limit <nl> + + + oversampling ; / / Increase the oversampling ( used for left - shift ) <nl> } <nl> oversampling_factor = oversampling ; / / For all timer interval calculations <nl> # else <nl> mmm a / Marlin / src / module / stepper . h <nl> ppp b / Marlin / src / module / stepper . h <nl> <nl> # define MAX_STEP_ISR_FREQUENCY_2X ( ( F_CPU ) / ISR_EXECUTION_CYCLES ( 2 ) ) <nl> # define MAX_STEP_ISR_FREQUENCY_1X ( ( F_CPU ) / ISR_EXECUTION_CYCLES ( 1 ) ) <nl> <nl> - / / The minimum allowable frequency for step smoothing will be 1 / 10 of the maximum nominal frequency ( in Hz ) <nl> - # define MIN_STEP_ISR_FREQUENCY MAX_STEP_ISR_FREQUENCY_1X <nl> + / / The minimum step ISR rate used by ADAPTIVE_STEP_SMOOTHING to target 50 % CPU usage <nl> + / / This does not account for the possibility of multi - stepping . <nl> + / / Perhaps DISABLE_MULTI_STEPPING should be required with ADAPTIVE_STEP_SMOOTHING . <nl> + # define MIN_STEP_ISR_FREQUENCY ( MAX_STEP_ISR_FREQUENCY_1X / 2 ) <nl> <nl> / / <nl> / / Stepper class definition <nl> | Reduce Step Smoothing ceiling to 50 % CPU usage ( ) | MarlinFirmware/Marlin | a847f37d43e4571ddc848172caa71e49c087eebe | 2020-07-20T02:53:29Z |
mmm a / xbmc / FileSystem / FileCurl . cpp <nl> ppp b / xbmc / FileSystem / FileCurl . cpp <nl> int CFileCurl : : Stat ( const CURL & url , struct __stat64 * buffer ) <nl> <nl> CURLcode result = g_curlInterface . easy_perform ( m_state - > m_easyHandle ) ; <nl> <nl> + <nl> + if ( result = = CURLE_HTTP_RETURNED_ERROR ) <nl> + { <nl> + long code ; <nl> + if ( curl_easy_getinfo ( m_state - > m_easyHandle , CURLINFO_RESPONSE_CODE , & code ) = = CURLE_OK & & code = = 404 ) <nl> + return - 1 ; <nl> + } <nl> + <nl> if ( result = = CURLE_GOT_NOTHING <nl> | | result = = CURLE_HTTP_RETURNED_ERROR <nl> | | result = = CURLE_RECV_ERROR / * some silly shoutcast servers * / ) <nl> | fixed : don ' t try to fetch via http if we ' ve already received a 404 in the header . Thanks elupus . | xbmc/xbmc | d73f0a0bd1e68721f8e2da6e9c92930be41b2440 | 2010-10-09T22:24:36Z |
mmm a / DEPS <nl> ppp b / DEPS <nl> vars = { <nl> <nl> deps = { <nl> ' v8 / build ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / build . git ' + ' @ ' + ' f0fc706da16ad6e36a5ce8a1c821f447558db15e ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / build . git ' + ' @ ' + ' 65e3fac8944a981c72460822bf834815e0335a9d ' , <nl> ' v8 / third_party / depot_tools ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / tools / depot_tools . git ' + ' @ ' + ' a58287b8ee082bbbb0cf13286649f135a9137bdc ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / tools / depot_tools . git ' + ' @ ' + ' 98b332f2dbbd25add6b0904cc9d01608ceeaa52d ' , <nl> ' v8 / third_party / icu ' : <nl> Var ( ' chromium_url ' ) + ' / chromium / deps / icu . git ' + ' @ ' + ' 79326efe26e5440f530963704c3c0ff965b3a4ac ' , <nl> ' v8 / third_party / instrumented_libraries ' : <nl> Var ( ' chromium_url ' ) + ' / chromium / src / third_party / instrumented_libraries . git ' + ' @ ' + ' bb3f1802c237dd19105dd0f7919f99e536a39d10 ' , <nl> ' v8 / buildtools ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / buildtools . git ' + ' @ ' + ' 9e121212d42be62a7cce38072f925f8398d11e49 ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / buildtools . git ' + ' @ ' + ' 42e94619588fd2f0ceb6cc29785592640703e6a1 ' , <nl> ' v8 / buildtools / clang_format / script ' : <nl> Var ( ' chromium_url ' ) + ' / chromium / llvm - project / cfe / tools / clang - format . git ' + ' @ ' + ' 96636aa0e9f047f17447f2d45a094d0b59ed7917 ' , <nl> ' v8 / buildtools / linux64 ' : { <nl> deps = { <nl> ' dep_type ' : ' cipd ' , <nl> } , <nl> ' v8 / third_party / catapult ' : { <nl> - ' url ' : Var ( ' chromium_url ' ) + ' / catapult . git ' + ' @ ' + ' d292e89bd75acc18293f66a7a2c672f8456cb032 ' , <nl> + ' url ' : Var ( ' chromium_url ' ) + ' / catapult . git ' + ' @ ' + ' 2cd291aae824412377a3eb4cc76cb2ec7ee169c4 ' , <nl> ' condition ' : ' checkout_android ' , <nl> } , <nl> ' v8 / third_party / colorama / src ' : { <nl> deps = { <nl> ' packages ' : [ <nl> { <nl> ' package ' : ' fuchsia / third_party / aemu / linux - amd64 ' , <nl> - ' version ' : ' nz3cLclK4lWm6gzvGCOHPQAKJUO8EsMBr7EIUXwS9SEC ' <nl> + ' version ' : ' TfK3Whl6AfZifLOotcOS_jvckKztERlPvmVyZo16fN0C ' <nl> } , <nl> ] , <nl> ' condition ' : ' host_os = = " linux " and checkout_fuchsia ' , <nl> deps = { <nl> ' dep_type ' : ' cipd ' , <nl> } , <nl> ' v8 / tools / clang ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / tools / clang . git ' + ' @ ' + ' 2eaa59d3fb233c92e0f91103500315fcd738ebda ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / tools / clang . git ' + ' @ ' + ' fd3758a4eb22a0611bb3b16ade0f00b508cd47fa ' , <nl> ' v8 / tools / luci - go ' : { <nl> ' packages ' : [ <nl> { <nl> deps = { <nl> ' v8 / third_party / protobuf ' : <nl> Var ( ' chromium_url ' ) + ' / external / github . com / google / protobuf ' + ' @ ' + ' b68a347f56137b4b1a746e8c7438495a6ac1bd91 ' , <nl> ' v8 / third_party / zlib ' : <nl> - Var ( ' chromium_url ' ) + ' / chromium / src / third_party / zlib . git ' + ' @ ' + ' 103247f50c3784defcff27b316bf36d2c785bb77 ' , <nl> + Var ( ' chromium_url ' ) + ' / chromium / src / third_party / zlib . git ' + ' @ ' + ' a21a4e8f27567b7c36f8274bf16ebca78b9a68ab ' , <nl> ' v8 / third_party / jsoncpp / source ' : <nl> Var ( ' chromium_url ' ) + ' / external / github . com / open - source - parsers / jsoncpp . git ' + ' @ ' + ' 645250b6690785be60ab6780ce4b58698d884d11 ' , <nl> ' v8 / third_party / ittapi ' : { <nl> | Update V8 DEPS . | v8/v8 | 5277adef178c57d9dacfe9b39088e3045633396b | 2020-07-31T22:28:10Z |
mmm a / dbms / include / DB / Functions / IFunction . h <nl> ppp b / dbms / include / DB / Functions / IFunction . h <nl> class IFunction <nl> / / / Returns the copy of a given block in which each column specified in <nl> / / / the " arguments " parameter is replaced with its respective nested <nl> / / / column if it is nullable . <nl> - static Block extractNonNullableBlock ( const Block & block , const ColumnNumbers & arguments ) ; <nl> + static Block extractNonNullableBlock ( const Block & block , const ColumnNumbers args ) ; <nl> <nl> private : <nl> / / / Internal method used for implementing both the execute ( ) methods . <nl> mmm a / dbms / src / Functions / FunctionsConditional . cpp <nl> ppp b / dbms / src / Functions / FunctionsConditional . cpp <nl> void FunctionMultiIf : : executeImpl ( Block & block , const ColumnNumbers & args , siz <nl> { <nl> / / / Keep track of which columns are nullable . <nl> std : : vector < UInt8 > nullable_cols_map ; <nl> - nullable_cols_map . reserve ( args . size ( ) ) ; <nl> + nullable_cols_map . resize ( args . size ( ) ) ; <nl> for ( const auto & arg : args ) <nl> { <nl> const auto & col = block . unsafeGetByPosition ( arg ) . column ; <nl> bool may_have_null = col - > isNullable ( ) ; <nl> - nullable_cols_map . push_back ( static_cast < UInt8 > ( may_have_null ) ) ; <nl> + nullable_cols_map [ arg ] = may_have_null ? 1 : 0 ; <nl> } <nl> <nl> / / / Keep track of which columns are null . <nl> std : : vector < UInt8 > null_cols_map ; <nl> - null_cols_map . reserve ( args . size ( ) ) ; <nl> + null_cols_map . resize ( args . size ( ) ) ; <nl> for ( const auto & arg : args ) <nl> { <nl> const auto & col = block . unsafeGetByPosition ( arg ) . column ; <nl> bool has_null = col - > isNull ( ) ; <nl> - null_cols_map . push_back ( static_cast < UInt8 > ( has_null ) ) ; <nl> + null_cols_map [ arg ] = has_null ? 1 : 0 ; <nl> } <nl> <nl> auto null_map = std : : make_shared < ColumnUInt8 > ( row_count ) ; <nl> mmm a / dbms / src / Functions / IFunction . cpp <nl> ppp b / dbms / src / Functions / IFunction . cpp <nl> namespace DB <nl> namespace <nl> { <nl> <nl> - void createNullValuesByteMap ( Block & block , size_t result ) <nl> + void createNullValuesByteMap ( Block & block , const ColumnNumbers & args , size_t result ) <nl> { <nl> ColumnNullable & res_col = static_cast < ColumnNullable & > ( * block . unsafeGetByPosition ( result ) . column ) ; <nl> <nl> - for ( size_t i = 0 ; i < block . columns ( ) ; + + i ) <nl> + for ( const auto & arg : args ) <nl> { <nl> - if ( i = = result ) <nl> + if ( arg = = result ) <nl> continue ; <nl> <nl> - const ColumnWithTypeAndName & elem = block . unsafeGetByPosition ( i ) ; <nl> + const ColumnWithTypeAndName & elem = block . unsafeGetByPosition ( arg ) ; <nl> if ( elem . column & & elem . column . get ( ) - > isNullable ( ) ) <nl> { <nl> - const ColumnNullable & concrete_col = static_cast < const ColumnNullable & > ( * elem . column ) ; <nl> - res_col . updateNullValuesByteMap ( concrete_col ) ; <nl> + const ColumnNullable & nullable_col = static_cast < const ColumnNullable & > ( * elem . column ) ; <nl> + res_col . updateNullValuesByteMap ( nullable_col ) ; <nl> } <nl> } <nl> } <nl> void IFunction : : getLambdaArgumentTypes ( DataTypes & arguments ) const <nl> getLambdaArgumentTypesImpl ( arguments ) ; <nl> } <nl> <nl> - Block IFunction : : extractNonNullableBlock ( const Block & block , const ColumnNumbers & arguments ) <nl> + / / / Return a copy of a given block in which the specified columns are replaced by <nl> + / / / their respective nested columns if they are nullable . <nl> + Block IFunction : : extractNonNullableBlock ( const Block & block , const ColumnNumbers args ) <nl> { <nl> - Block non_nullable_block ; <nl> + std : : sort ( args . begin ( ) , args . end ( ) ) ; <nl> <nl> - ColumnNumbers args2 = arguments ; <nl> - std : : sort ( args2 . begin ( ) , args2 . end ( ) ) ; <nl> + Block non_nullable_block ; <nl> <nl> - size_t pos = 0 ; <nl> for ( size_t i = 0 ; i < block . columns ( ) ; + + i ) <nl> { <nl> const auto & col = block . unsafeGetByPosition ( i ) ; <nl> <nl> - bool found = std : : binary_search ( args2 . begin ( ) , args2 . end ( ) , pos ) & & col . column & & col . type ; <nl> + bool found = std : : binary_search ( args . begin ( ) , args . end ( ) , i ) & & col . column & & col . type ; <nl> <nl> if ( found & & col . column . get ( ) - > isNullable ( ) ) <nl> { <nl> Block IFunction : : extractNonNullableBlock ( const Block & block , const ColumnNumber <nl> auto nullable_type = static_cast < const DataTypeNullable * > ( col . type . get ( ) ) ; <nl> DataTypePtr nested_type = nullable_type - > getNestedType ( ) ; <nl> <nl> - non_nullable_block . insert ( pos , { nested_col , nested_type , col . name } ) ; <nl> + non_nullable_block . insert ( i , { nested_col , nested_type , col . name } ) ; <nl> } <nl> else <nl> - non_nullable_block . insert ( pos , col ) ; <nl> - <nl> - + + pos ; <nl> + non_nullable_block . insert ( i , col ) ; <nl> } <nl> <nl> return non_nullable_block ; <nl> void IFunction : : perform ( Block & block , const ColumnNumbers & arguments , size_t r <nl> ColumnWithTypeAndName & dest_col = block . getByPosition ( result ) ; <nl> dest_col . column = std : : make_shared < ColumnNullable > ( source_col . column ) ; <nl> ColumnNullable & nullable_col = static_cast < ColumnNullable & > ( * dest_col . column ) ; <nl> - nullable_col . getNullValuesByteMap ( ) = std : : make_shared < ColumnUInt8 > ( dest_col . column - > size ( ) ) ; <nl> - createNullValuesByteMap ( block , result ) ; <nl> + nullable_col . getNullValuesByteMap ( ) = std : : make_shared < ColumnUInt8 > ( dest_col . column - > size ( ) , 0 ) ; <nl> + createNullValuesByteMap ( block , arguments , result ) ; <nl> } <nl> else <nl> performer ( block , arguments , result ) ; <nl> | dbms : better [ # METR - 19266 ] | ClickHouse/ClickHouse | 87f330627e33c2a8f235879b9a75e2d88df53d6e | 2016-08-11T00:17:30Z |
mmm a / src / builtins / builtins - definitions . h <nl> ppp b / src / builtins / builtins - definitions . h <nl> namespace internal { <nl> CPP ( AtomicsNotify ) \ <nl> CPP ( AtomicsIsLockFree ) \ <nl> CPP ( AtomicsWait ) \ <nl> + CPP ( AtomicsWaitAsync ) \ <nl> CPP ( AtomicsWake ) \ <nl> \ <nl> / * String * / \ <nl> mmm a / src / builtins / builtins - sharedarraybuffer . cc <nl> ppp b / src / builtins / builtins - sharedarraybuffer . cc <nl> BUILTIN ( AtomicsNotify ) { <nl> RETURN_RESULT_OR_FAILURE ( isolate , AtomicsWake ( isolate , array , index , count ) ) ; <nl> } <nl> <nl> - / / ES # sec - atomics . wait <nl> - / / Atomics . wait ( typedArray , index , value , timeout ) <nl> - BUILTIN ( AtomicsWait ) { <nl> - HandleScope scope ( isolate ) ; <nl> - Handle < Object > array = args . atOrUndefined ( isolate , 1 ) ; <nl> - Handle < Object > index = args . atOrUndefined ( isolate , 2 ) ; <nl> - Handle < Object > value = args . atOrUndefined ( isolate , 3 ) ; <nl> - Handle < Object > timeout = args . atOrUndefined ( isolate , 4 ) ; <nl> - <nl> + Object DoWait ( Isolate * isolate , FutexEmulation : : WaitMode mode , <nl> + Handle < Object > array , Handle < Object > index , Handle < Object > value , <nl> + Handle < Object > timeout ) { <nl> + / / 1 . Let buffer be ? ValidateSharedIntegerTypedArray ( typedArray , true ) . <nl> Handle < JSTypedArray > sta ; <nl> ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( <nl> isolate , sta , ValidateSharedIntegerTypedArray ( isolate , array , true ) ) ; <nl> <nl> + / / 2 . Let i be ? ValidateAtomicAccess ( typedArray , index ) . <nl> Maybe < size_t > maybe_index = ValidateAtomicAccess ( isolate , sta , index ) ; <nl> if ( maybe_index . IsNothing ( ) ) return ReadOnlyRoots ( isolate ) . exception ( ) ; <nl> size_t i = maybe_index . FromJust ( ) ; <nl> <nl> - / / According to the spec , we have to check value ' s type before <nl> - / / looking at the timeout . <nl> + / / 3 . Let arrayTypeName be typedArray . [ [ TypedArrayName ] ] . <nl> + / / 4 . If arrayTypeName is " BigInt64Array " , let v be ? ToBigInt64 ( value ) . <nl> + / / 5 . Otherwise , let v be ? ToInt32 ( value ) . <nl> if ( sta - > type ( ) = = kExternalBigInt64Array ) { <nl> ASSIGN_RETURN_FAILURE_ON_EXCEPTION ( isolate , value , <nl> BigInt : : FromObject ( isolate , value ) ) ; <nl> BUILTIN ( AtomicsWait ) { <nl> Object : : ToInt32 ( isolate , value ) ) ; <nl> } <nl> <nl> + / / 6 . Let q be ? ToNumber ( timeout ) . <nl> + / / 7 . If q is NaN , let t be + ∞ , else let t be max ( q , 0 ) . <nl> double timeout_number ; <nl> if ( timeout - > IsUndefined ( isolate ) ) { <nl> timeout_number = ReadOnlyRoots ( isolate ) . infinity_value ( ) . Number ( ) ; <nl> BUILTIN ( AtomicsWait ) { <nl> timeout_number = 0 ; <nl> } <nl> <nl> - if ( ! isolate - > allow_atomics_wait ( ) ) { <nl> + / / 8 . If mode is sync , then <nl> + / / a . Let B be AgentCanSuspend ( ) . <nl> + / / b . If B is false , throw a TypeError exception . <nl> + if ( mode = = FutexEmulation : : WaitMode : : kSync & & <nl> + ! isolate - > allow_atomics_wait ( ) ) { <nl> THROW_NEW_ERROR_RETURN_FAILURE ( <nl> isolate , NewTypeError ( MessageTemplate : : kAtomicsWaitNotAllowed ) ) ; <nl> } <nl> BUILTIN ( AtomicsWait ) { <nl> <nl> if ( sta - > type ( ) = = kExternalBigInt64Array ) { <nl> return FutexEmulation : : WaitJs64 ( <nl> - isolate , array_buffer , GetAddress64 ( i , sta - > byte_offset ( ) ) , <nl> + isolate , mode , array_buffer , GetAddress64 ( i , sta - > byte_offset ( ) ) , <nl> Handle < BigInt > : : cast ( value ) - > AsInt64 ( ) , timeout_number ) ; <nl> } else { <nl> DCHECK ( sta - > type ( ) = = kExternalInt32Array ) ; <nl> - return FutexEmulation : : WaitJs32 ( isolate , array_buffer , <nl> + return FutexEmulation : : WaitJs32 ( isolate , mode , array_buffer , <nl> GetAddress32 ( i , sta - > byte_offset ( ) ) , <nl> NumberToInt32 ( * value ) , timeout_number ) ; <nl> } <nl> } <nl> <nl> + / / ES # sec - atomics . wait <nl> + / / Atomics . wait ( typedArray , index , value , timeout ) <nl> + BUILTIN ( AtomicsWait ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > array = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < Object > index = args . atOrUndefined ( isolate , 2 ) ; <nl> + Handle < Object > value = args . atOrUndefined ( isolate , 3 ) ; <nl> + Handle < Object > timeout = args . atOrUndefined ( isolate , 4 ) ; <nl> + <nl> + return DoWait ( isolate , FutexEmulation : : WaitMode : : kSync , array , index , value , <nl> + timeout ) ; <nl> + } <nl> + <nl> + BUILTIN ( AtomicsWaitAsync ) { <nl> + HandleScope scope ( isolate ) ; <nl> + Handle < Object > array = args . atOrUndefined ( isolate , 1 ) ; <nl> + Handle < Object > index = args . atOrUndefined ( isolate , 2 ) ; <nl> + Handle < Object > value = args . atOrUndefined ( isolate , 3 ) ; <nl> + Handle < Object > timeout = args . atOrUndefined ( isolate , 4 ) ; <nl> + <nl> + return DoWait ( isolate , FutexEmulation : : WaitMode : : kAsync , array , index , value , <nl> + timeout ) ; <nl> + } <nl> + <nl> } / / namespace internal <nl> } / / namespace v8 <nl> mmm a / src / execution / futex - emulation . cc <nl> ppp b / src / execution / futex - emulation . cc <nl> <nl> <nl> # include < limits > <nl> <nl> + # include " src / api / api - inl . h " <nl> + # include " src / base / logging . h " <nl> # include " src / base / macros . h " <nl> - # include " src / base / platform / time . h " <nl> # include " src / execution / isolate . h " <nl> # include " src / execution / vm - state - inl . h " <nl> # include " src / handles / handles - inl . h " <nl> # include " src / numbers / conversions . h " <nl> # include " src / objects / bigint . h " <nl> # include " src / objects / js - array - buffer - inl . h " <nl> + # include " src / objects / js - promise - inl . h " <nl> # include " src / objects / objects - inl . h " <nl> + # include " src / tasks / cancelable - task . h " <nl> <nl> namespace v8 { <nl> namespace internal { <nl> base : : LazyMutex FutexEmulation : : mutex_ = LAZY_MUTEX_INITIALIZER ; <nl> base : : LazyInstance < FutexWaitList > : : type FutexEmulation : : wait_list_ = <nl> LAZY_INSTANCE_INITIALIZER ; <nl> <nl> + FutexWaitListNode : : ~ FutexWaitListNode ( ) { <nl> + / / Assert that the timeout task was cancelled . <nl> + DCHECK_EQ ( CancelableTaskManager : : kInvalidTaskId , timeout_task_id_ ) ; <nl> + } <nl> + <nl> + bool FutexWaitListNode : : CancelTimeoutTask ( ) { <nl> + if ( timeout_task_id_ ! = CancelableTaskManager : : kInvalidTaskId ) { <nl> + auto return_value = cancelable_task_manager_ - > TryAbort ( timeout_task_id_ ) ; <nl> + timeout_task_id_ = CancelableTaskManager : : kInvalidTaskId ; <nl> + return return_value ! = TryAbortResult : : kTaskRunning ; <nl> + } <nl> + return true ; <nl> + } <nl> + <nl> void FutexWaitListNode : : NotifyWake ( ) { <nl> + DCHECK ( ! IsAsync ( ) ) ; <nl> / / Lock the FutexEmulation mutex before notifying . We know that the mutex <nl> / / will have been unlocked if we are currently waiting on the condition <nl> / / variable . The mutex will not be locked if FutexEmulation : : Wait hasn ' t <nl> void FutexWaitListNode : : NotifyWake ( ) { <nl> interrupted_ = true ; <nl> } <nl> <nl> - FutexWaitList : : FutexWaitList ( ) : head_ ( nullptr ) , tail_ ( nullptr ) { } <nl> + class ResolveAsyncWaiterPromisesTask : public CancelableTask { <nl> + public : <nl> + ResolveAsyncWaiterPromisesTask ( CancelableTaskManager * cancelable_task_manager , <nl> + Isolate * isolate ) <nl> + : CancelableTask ( cancelable_task_manager ) , isolate_ ( isolate ) { } <nl> + <nl> + void RunInternal ( ) override { <nl> + FutexEmulation : : ResolveAsyncWaiterPromises ( isolate_ ) ; <nl> + } <nl> + <nl> + private : <nl> + Isolate * isolate_ ; <nl> + } ; <nl> + <nl> + class AsyncWaiterTimeoutTask : public CancelableTask { <nl> + public : <nl> + AsyncWaiterTimeoutTask ( CancelableTaskManager * cancelable_task_manager , <nl> + FutexWaitListNode * node ) <nl> + : CancelableTask ( cancelable_task_manager ) , node_ ( node ) { } <nl> + <nl> + void RunInternal ( ) override { <nl> + FutexEmulation : : HandleAsyncWaiterTimeout ( node_ ) ; <nl> + } <nl> + <nl> + private : <nl> + FutexWaitListNode * node_ ; <nl> + } ; <nl> + <nl> + void FutexEmulation : : NotifyAsyncWaiter ( FutexWaitListNode * node ) { <nl> + / / This function can run in any thread . <nl> + <nl> + FutexEmulation : : mutex_ . Pointer ( ) - > AssertHeld ( ) ; <nl> + <nl> + / / Nullify the timeout time ; this distinguishes timed out waiters from <nl> + / / woken up ones . <nl> + node - > async_timeout_time_ = base : : TimeTicks ( ) ; <nl> + / / Try to cancel the timeout task . If cancelling fails , the task is already <nl> + / / running . In that case , it cannot proceed beyond waiting for the mutex , <nl> + / / since we ' re holding it . When it gets the mutex , it will see that waiting_ <nl> + / / is false , and ignore the FutexWaitListNode . <nl> + <nl> + / / Using the CancelableTaskManager here is OK since the Isolate is guaranteed <nl> + / / to be alive - FutexEmulation : : IsolateDeinit removes all FutexWaitListNodes <nl> + / / owned by an Isolate which is going to die . <nl> + node - > CancelTimeoutTask ( ) ; <nl> + <nl> + wait_list_ . Pointer ( ) - > RemoveNode ( node ) ; <nl> + <nl> + / / Schedule a task for resolving the Promise . <nl> + auto & isolate_map = wait_list_ . Pointer ( ) - > isolate_promises_to_resolve_ ; <nl> + auto it = isolate_map . find ( node - > isolate_for_async_waiters_ ) ; <nl> + if ( it = = isolate_map . end ( ) ) { <nl> + / / This Isolate doesn ' t have other Promises to resolve at the moment . <nl> + isolate_map . insert ( std : : make_pair ( node - > isolate_for_async_waiters_ , <nl> + FutexWaitList : : HeadAndTail { node , node } ) ) ; <nl> + auto task = std : : make_unique < ResolveAsyncWaiterPromisesTask > ( <nl> + node - > cancelable_task_manager_ , node - > isolate_for_async_waiters_ ) ; <nl> + node - > task_runner_ - > PostNonNestableTask ( std : : move ( task ) ) ; <nl> + } else { <nl> + / / Add this Node into the existing list . <nl> + node - > prev_ = it - > second . tail ; <nl> + it - > second . tail - > next_ = node ; <nl> + it - > second . tail = node ; <nl> + } <nl> + } <nl> <nl> void FutexWaitList : : AddNode ( FutexWaitListNode * node ) { <nl> - DCHECK ( node - > prev_ = = nullptr & & node - > next_ = = nullptr ) ; <nl> + DCHECK_NULL ( node - > prev_ ) ; <nl> + DCHECK_NULL ( node - > next_ ) ; <nl> if ( tail_ ) { <nl> tail_ - > next_ = node ; <nl> } else { <nl> void FutexWaitList : : AddNode ( FutexWaitListNode * node ) { <nl> } <nl> <nl> node - > prev_ = tail_ ; <nl> - node - > next_ = nullptr ; <nl> tail_ = node ; <nl> + <nl> + Verify ( ) ; <nl> } <nl> <nl> void FutexWaitList : : RemoveNode ( FutexWaitListNode * node ) { <nl> + DCHECK ( NodeIsOnList ( node , head_ ) ) ; <nl> + <nl> if ( node - > prev_ ) { <nl> node - > prev_ - > next_ = node - > next_ ; <nl> } else { <nl> + DCHECK_EQ ( node , head_ ) ; <nl> head_ = node - > next_ ; <nl> } <nl> <nl> if ( node - > next_ ) { <nl> node - > next_ - > prev_ = node - > prev_ ; <nl> } else { <nl> + DCHECK_EQ ( node , tail_ ) ; <nl> tail_ = node - > prev_ ; <nl> } <nl> <nl> node - > prev_ = node - > next_ = nullptr ; <nl> + <nl> + Verify ( ) ; <nl> } <nl> <nl> void AtomicsWaitWakeHandle : : Wake ( ) { <nl> Object WaitJsTranslateReturn ( Isolate * isolate , Object res ) { <nl> <nl> } / / namespace <nl> <nl> - Object FutexEmulation : : WaitJs32 ( Isolate * isolate , <nl> + Object FutexEmulation : : WaitJs32 ( Isolate * isolate , WaitMode mode , <nl> Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> int32_t value , double rel_timeout_ms ) { <nl> Object res = <nl> - Wait < int32_t > ( isolate , array_buffer , addr , value , rel_timeout_ms ) ; <nl> + Wait < int32_t > ( isolate , mode , array_buffer , addr , value , rel_timeout_ms ) ; <nl> return WaitJsTranslateReturn ( isolate , res ) ; <nl> } <nl> <nl> - Object FutexEmulation : : WaitJs64 ( Isolate * isolate , <nl> + Object FutexEmulation : : WaitJs64 ( Isolate * isolate , WaitMode mode , <nl> Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> int64_t value , double rel_timeout_ms ) { <nl> Object res = <nl> - Wait < int64_t > ( isolate , array_buffer , addr , value , rel_timeout_ms ) ; <nl> + Wait < int64_t > ( isolate , mode , array_buffer , addr , value , rel_timeout_ms ) ; <nl> return WaitJsTranslateReturn ( isolate , res ) ; <nl> } <nl> <nl> Object FutexEmulation : : WaitWasm32 ( Isolate * isolate , <nl> Handle < JSArrayBuffer > array_buffer , <nl> size_t addr , int32_t value , <nl> int64_t rel_timeout_ns ) { <nl> - return Wait < int32_t > ( isolate , array_buffer , addr , value , rel_timeout_ns > = 0 , <nl> - rel_timeout_ns ) ; <nl> + return Wait < int32_t > ( isolate , WaitMode : : kSync , array_buffer , addr , value , <nl> + rel_timeout_ns > = 0 , rel_timeout_ns ) ; <nl> } <nl> <nl> Object FutexEmulation : : WaitWasm64 ( Isolate * isolate , <nl> Handle < JSArrayBuffer > array_buffer , <nl> size_t addr , int64_t value , <nl> int64_t rel_timeout_ns ) { <nl> - return Wait < int64_t > ( isolate , array_buffer , addr , value , rel_timeout_ns > = 0 , <nl> - rel_timeout_ns ) ; <nl> + return Wait < int64_t > ( isolate , WaitMode : : kSync , array_buffer , addr , value , <nl> + rel_timeout_ns > = 0 , rel_timeout_ns ) ; <nl> } <nl> <nl> template < typename T > <nl> - Object FutexEmulation : : Wait ( Isolate * isolate , <nl> + Object FutexEmulation : : Wait ( Isolate * isolate , WaitMode mode , <nl> Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> T value , double rel_timeout_ms ) { <nl> DCHECK_LT ( addr , array_buffer - > byte_length ( ) ) ; <nl> Object FutexEmulation : : Wait ( Isolate * isolate , <nl> rel_timeout_ns = static_cast < int64_t > ( timeout_ns ) ; <nl> } <nl> } <nl> - return Wait ( isolate , array_buffer , addr , value , use_timeout , rel_timeout_ns ) ; <nl> + return Wait ( isolate , mode , array_buffer , addr , value , use_timeout , <nl> + rel_timeout_ns ) ; <nl> } <nl> <nl> namespace { <nl> double WaitTimeoutInMs ( double timeout_ns ) { <nl> } / / namespace <nl> <nl> template < typename T > <nl> - Object FutexEmulation : : Wait ( Isolate * isolate , <nl> + Object FutexEmulation : : Wait ( Isolate * isolate , WaitMode mode , <nl> Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> T value , bool use_timeout , int64_t rel_timeout_ns ) { <nl> + if ( mode = = WaitMode : : kSync ) { <nl> + return WaitSync ( isolate , array_buffer , addr , value , use_timeout , <nl> + rel_timeout_ns ) ; <nl> + } <nl> + DCHECK_EQ ( mode , WaitMode : : kAsync ) ; <nl> + return WaitAsync ( isolate , array_buffer , addr , value , use_timeout , <nl> + rel_timeout_ns ) ; <nl> + } <nl> + <nl> + template < typename T > <nl> + Object FutexEmulation : : WaitSync ( Isolate * isolate , <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> + T value , bool use_timeout , <nl> + int64_t rel_timeout_ns ) { <nl> VMState < ATOMICS_WAIT > state ( isolate ) ; <nl> base : : TimeDelta rel_timeout = <nl> base : : TimeDelta : : FromNanoseconds ( rel_timeout_ns ) ; <nl> Object FutexEmulation : : Wait ( Isolate * isolate , <nl> return * result ; <nl> } <nl> <nl> + FutexWaitListNode : : FutexWaitListNode ( <nl> + const std : : shared_ptr < BackingStore > & backing_store , size_t wait_addr , <nl> + Handle < JSObject > promise , Isolate * isolate ) <nl> + : isolate_for_async_waiters_ ( isolate ) , <nl> + backing_store_ ( backing_store ) , <nl> + wait_addr_ ( wait_addr ) , <nl> + waiting_ ( true ) { <nl> + auto v8_isolate = reinterpret_cast < v8 : : Isolate * > ( isolate ) ; <nl> + task_runner_ = V8 : : GetCurrentPlatform ( ) - > GetForegroundTaskRunner ( v8_isolate ) ; <nl> + cancelable_task_manager_ = isolate - > cancelable_task_manager ( ) ; <nl> + <nl> + v8 : : Local < v8 : : Promise > local_promise = Utils : : PromiseToLocal ( promise ) ; <nl> + promise_ . Reset ( v8_isolate , local_promise ) ; <nl> + promise_ . SetWeak ( ) ; <nl> + Handle < NativeContext > native_context ( isolate - > native_context ( ) ) ; <nl> + v8 : : Local < v8 : : Context > local_native_context = <nl> + Utils : : ToLocal ( Handle < Context > : : cast ( native_context ) ) ; <nl> + native_context_ . Reset ( v8_isolate , local_native_context ) ; <nl> + native_context_ . SetWeak ( ) ; <nl> + <nl> + / / Add the Promise into the NativeContext ' s atomics_waitasync_promises set , so <nl> + / / that the list keeps it alive . <nl> + Handle < OrderedHashSet > promises ( native_context - > atomics_waitasync_promises ( ) , <nl> + isolate ) ; <nl> + promises = OrderedHashSet : : Add ( isolate , promises , promise ) . ToHandleChecked ( ) ; <nl> + native_context - > set_atomics_waitasync_promises ( * promises ) ; <nl> + } <nl> + <nl> + template < typename T > <nl> + Object FutexEmulation : : WaitAsync ( Isolate * isolate , <nl> + Handle < JSArrayBuffer > array_buffer , <nl> + size_t addr , T value , bool use_timeout , <nl> + int64_t rel_timeout_ns ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + base : : TimeDelta rel_timeout = <nl> + base : : TimeDelta : : FromNanoseconds ( rel_timeout_ns ) ; <nl> + <nl> + Factory * factory = isolate - > factory ( ) ; <nl> + Handle < JSObject > result = factory - > NewJSObject ( isolate - > object_function ( ) ) ; <nl> + <nl> + std : : shared_ptr < BackingStore > backing_store = array_buffer - > GetBackingStore ( ) ; <nl> + <nl> + / / 17 . Let w be ! AtomicLoad ( typedArray , i ) . <nl> + std : : atomic < T > * p = reinterpret_cast < std : : atomic < T > * > ( <nl> + static_cast < int8_t * > ( backing_store - > buffer_start ( ) ) + addr ) ; <nl> + if ( p - > load ( ) ! = value ) { <nl> + / / 18 . If v is not equal to w , then <nl> + / / a . Perform LeaveCriticalSection ( WL ) . <nl> + / / . . . <nl> + / / c . Perform ! CreateDataPropertyOrThrow ( resultObject , " async " , false ) . <nl> + / / d . Perform ! CreateDataPropertyOrThrow ( resultObject , " value " , <nl> + / / " not - equal " ) . <nl> + / / e . Return resultObject . <nl> + CHECK ( <nl> + JSReceiver : : CreateDataProperty ( isolate , result , factory - > async_string ( ) , <nl> + factory - > false_value ( ) , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + CHECK ( JSReceiver : : CreateDataProperty ( <nl> + isolate , result , factory - > value_string ( ) , <nl> + factory - > not_equal_string ( ) , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + return * result ; <nl> + } <nl> + <nl> + if ( use_timeout & & rel_timeout_ns = = 0 ) { <nl> + / / 19 . If t is 0 and mode is async , then <nl> + / / . . . <nl> + / / b . Perform LeaveCriticalSection ( WL ) . <nl> + / / c . Perform ! CreateDataPropertyOrThrow ( resultObject , " async " , false ) . <nl> + / / d . Perform ! CreateDataPropertyOrThrow ( resultObject , " value " , <nl> + / / " timed - out " ) . <nl> + / / e . Return resultObject . <nl> + CHECK ( <nl> + JSReceiver : : CreateDataProperty ( isolate , result , factory - > async_string ( ) , <nl> + factory - > false_value ( ) , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + CHECK ( JSReceiver : : CreateDataProperty ( <nl> + isolate , result , factory - > value_string ( ) , <nl> + factory - > timed_out_string ( ) , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + return * result ; <nl> + } <nl> + <nl> + Handle < JSObject > promise_capability = factory - > NewJSPromise ( ) ; <nl> + FutexWaitListNode * node = <nl> + new FutexWaitListNode ( backing_store , addr , promise_capability , isolate ) ; <nl> + <nl> + { <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + wait_list_ . Pointer ( ) - > AddNode ( node ) ; <nl> + } <nl> + if ( use_timeout ) { <nl> + node - > async_timeout_time_ = base : : TimeTicks : : Now ( ) + rel_timeout ; <nl> + auto task = std : : make_unique < AsyncWaiterTimeoutTask > ( <nl> + node - > cancelable_task_manager_ , node ) ; <nl> + node - > timeout_task_id_ = task - > id ( ) ; <nl> + node - > task_runner_ - > PostNonNestableDelayedTask ( std : : move ( task ) , <nl> + rel_timeout . InSecondsF ( ) ) ; <nl> + } <nl> + <nl> + / / 26 . Perform ! CreateDataPropertyOrThrow ( resultObject , " async " , true ) . <nl> + / / 27 . Perform ! CreateDataPropertyOrThrow ( resultObject , " value " , <nl> + / / promiseCapability . [ [ Promise ] ] ) . <nl> + / / 28 . Return resultObject . <nl> + CHECK ( JSReceiver : : CreateDataProperty ( isolate , result , factory - > async_string ( ) , <nl> + factory - > true_value ( ) , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + CHECK ( JSReceiver : : CreateDataProperty ( isolate , result , factory - > value_string ( ) , <nl> + promise_capability , Just ( kDontThrow ) ) <nl> + . FromJust ( ) ) ; <nl> + return * result ; <nl> + } <nl> + <nl> Object FutexEmulation : : Wake ( Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> uint32_t num_waiters_to_wake ) { <nl> DCHECK_LT ( addr , array_buffer - > byte_length ( ) ) ; <nl> Object FutexEmulation : : Wake ( Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> FutexWaitListNode * node = wait_list_ . Pointer ( ) - > head_ ; <nl> while ( node & & num_waiters_to_wake > 0 ) { <nl> + bool delete_this_node = false ; <nl> std : : shared_ptr < BackingStore > node_backing_store = <nl> node - > backing_store_ . lock ( ) ; <nl> - DCHECK ( node_backing_store ) ; <nl> + <nl> + if ( ! node - > waiting_ ) { <nl> + node = node - > next_ ; <nl> + continue ; <nl> + } <nl> if ( backing_store . get ( ) = = node_backing_store . get ( ) & & <nl> - addr = = node - > wait_addr_ & & node - > waiting_ ) { <nl> + addr = = node - > wait_addr_ ) { <nl> node - > waiting_ = false ; <nl> - node - > cond_ . NotifyOne ( ) ; <nl> + <nl> + / / Retrieve the next node to iterate before calling NotifyAsyncWaiter , <nl> + / / since NotifyAsyncWaiter will take the node out of the linked list . <nl> + auto old_node = node ; <nl> + node = node - > next_ ; <nl> + if ( old_node - > IsAsync ( ) ) { <nl> + NotifyAsyncWaiter ( old_node ) ; <nl> + } else { <nl> + old_node - > cond_ . NotifyOne ( ) ; <nl> + } <nl> if ( num_waiters_to_wake ! = kWakeAll ) { <nl> - - num_waiters_to_wake ; <nl> } <nl> waiters_woken + + ; <nl> + continue ; <nl> + } <nl> + if ( node_backing_store . get ( ) = = nullptr & & <nl> + node - > async_timeout_time_ = = base : : TimeTicks ( ) ) { <nl> + / / Backing store has been deleted and the node is still waiting , and <nl> + / / there ' s no timeout . It ' s never going to be woken up , so we can clean <nl> + / / it up now . We don ' t need to cancel the timeout task , because there is <nl> + / / none . <nl> + DCHECK ( node - > IsAsync ( ) ) ; <nl> + DCHECK_EQ ( CancelableTaskManager : : kInvalidTaskId , node - > timeout_task_id_ ) ; <nl> + delete_this_node = true ; <nl> + } else if ( node - > IsAsync ( ) & & node - > native_context_ . IsEmpty ( ) ) { <nl> + / / The NativeContext related to the async waiter has been deleted . <nl> + / / Ditto , clean up now . <nl> + <nl> + / / Using the CancelableTaskManager here is OK since the Isolate is <nl> + / / guaranteed to be alive - FutexEmulation : : IsolateDeinit removes all <nl> + / / FutexWaitListNodes owned by an Isolate which is going to die . <nl> + if ( node - > CancelTimeoutTask ( ) ) { <nl> + delete_this_node = true ; <nl> + } <nl> + / / If cancelling the timeout task failed , the timeout task is already <nl> + / / running and will clean up the node . <nl> } <nl> <nl> - node = node - > next_ ; <nl> + if ( delete_this_node ) { <nl> + auto old_node = node ; <nl> + node = node - > next_ ; <nl> + wait_list_ . Pointer ( ) - > RemoveNode ( old_node ) ; <nl> + delete old_node ; <nl> + } else { <nl> + node = node - > next_ ; <nl> + } <nl> } <nl> <nl> return Smi : : FromInt ( waiters_woken ) ; <nl> } <nl> <nl> + void FutexEmulation : : CleanupAsyncWaiterPromise ( FutexWaitListNode * node ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + DCHECK ( node - > IsAsync ( ) ) ; <nl> + <nl> + Isolate * isolate = node - > isolate_for_async_waiters_ ; <nl> + auto v8_isolate = reinterpret_cast < v8 : : Isolate * > ( isolate ) ; <nl> + <nl> + / / This function must run in the main thread of node ' s Isolate . <nl> + DCHECK_EQ ( isolate - > thread_id ( ) , ThreadId : : Current ( ) ) ; <nl> + <nl> + if ( ! node - > promise_ . IsEmpty ( ) ) { <nl> + Handle < JSPromise > promise = Handle < JSPromise > : : cast ( <nl> + Utils : : OpenHandle ( * node - > promise_ . Get ( v8_isolate ) ) ) ; <nl> + / / Promise keeps the NativeContext alive . <nl> + DCHECK ( ! node - > native_context_ . IsEmpty ( ) ) ; <nl> + Handle < NativeContext > native_context = Handle < NativeContext > : : cast ( <nl> + Utils : : OpenHandle ( * node - > native_context_ . Get ( v8_isolate ) ) ) ; <nl> + <nl> + / / Remove the Promise from the NativeContext ' s set . <nl> + Handle < OrderedHashSet > promises ( <nl> + native_context - > atomics_waitasync_promises ( ) , isolate ) ; <nl> + bool was_deleted = OrderedHashSet : : Delete ( isolate , * promises , * promise ) ; <nl> + DCHECK ( was_deleted ) ; <nl> + USE ( was_deleted ) ; <nl> + promises = OrderedHashSet : : Shrink ( isolate , promises ) ; <nl> + native_context - > set_atomics_waitasync_promises ( * promises ) ; <nl> + } else { <nl> + / / NativeContext keeps the Promise alive ; if the Promise is dead then <nl> + / / surely NativeContext is too . <nl> + DCHECK ( node - > native_context_ . IsEmpty ( ) ) ; <nl> + } <nl> + } <nl> + <nl> + FutexWaitListNode * FutexEmulation : : DeleteAsyncWaiterNode ( <nl> + FutexWaitListNode * node ) { <nl> + auto next = node - > next_ ; <nl> + delete node ; <nl> + return next ; <nl> + } <nl> + <nl> + void FutexEmulation : : ResolveAsyncWaiterPromise ( FutexWaitListNode * node ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + <nl> + / / This function must run in the main thread of node ' s Isolate . <nl> + DCHECK_EQ ( node - > isolate_for_async_waiters_ - > thread_id ( ) , ThreadId : : Current ( ) ) ; <nl> + <nl> + auto v8_isolate = <nl> + reinterpret_cast < v8 : : Isolate * > ( node - > isolate_for_async_waiters_ ) ; <nl> + <nl> + if ( ! node - > promise_ . IsEmpty ( ) ) { <nl> + Handle < JSPromise > promise = Handle < JSPromise > : : cast ( <nl> + Utils : : OpenHandle ( * node - > promise_ . Get ( v8_isolate ) ) ) ; <nl> + Handle < String > result_string ; <nl> + / / When waiters are notified , their async_timeout_time_ is reset . Having a <nl> + / / non - zero async_timeout_time_ here means the waiter timed out . <nl> + if ( node - > async_timeout_time_ ! = base : : TimeTicks ( ) ) { <nl> + DCHECK ( node - > waiting_ ) ; <nl> + result_string = <nl> + node - > isolate_for_async_waiters_ - > factory ( ) - > timed_out_string ( ) ; <nl> + } else { <nl> + DCHECK ( ! node - > waiting_ ) ; <nl> + result_string = node - > isolate_for_async_waiters_ - > factory ( ) - > ok_string ( ) ; <nl> + } <nl> + MaybeHandle < Object > resolve_result = <nl> + JSPromise : : Resolve ( promise , result_string ) ; <nl> + DCHECK ( ! resolve_result . is_null ( ) ) ; <nl> + USE ( resolve_result ) ; <nl> + } <nl> + } <nl> + <nl> + void FutexEmulation : : ResolveAsyncWaiterPromises ( Isolate * isolate ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + <nl> + / / This function must run in the main thread of isolate . <nl> + DCHECK_EQ ( isolate - > thread_id ( ) , ThreadId : : Current ( ) ) ; <nl> + <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + FutexWaitListNode * node ; <nl> + { <nl> + auto & isolate_map = wait_list_ . Pointer ( ) - > isolate_promises_to_resolve_ ; <nl> + auto it = isolate_map . find ( isolate ) ; <nl> + DCHECK_NE ( isolate_map . end ( ) , it ) ; <nl> + <nl> + node = it - > second . head ; <nl> + isolate_map . erase ( it ) ; <nl> + } <nl> + <nl> + HandleScope handle_scope ( isolate ) ; <nl> + while ( node ) { <nl> + DCHECK_EQ ( isolate , node - > isolate_for_async_waiters_ ) ; <nl> + DCHECK ( ! node - > waiting_ ) ; <nl> + ResolveAsyncWaiterPromise ( node ) ; <nl> + CleanupAsyncWaiterPromise ( node ) ; <nl> + / / We ' ve already tried to cancel the timeout task for the node ; since we ' re <nl> + / / now in the same thread the timeout task is supposed to run , we know the <nl> + / / timeout task will never happen , and it ' s safe to delete the node here . <nl> + DCHECK_EQ ( CancelableTaskManager : : kInvalidTaskId , node - > timeout_task_id_ ) ; <nl> + node = DeleteAsyncWaiterNode ( node ) ; <nl> + } <nl> + } <nl> + <nl> + void FutexEmulation : : HandleAsyncWaiterTimeout ( FutexWaitListNode * node ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + DCHECK ( node - > IsAsync ( ) ) ; <nl> + <nl> + / / This function must run in the main thread of node ' s Isolate . <nl> + DCHECK_EQ ( node - > isolate_for_async_waiters_ - > thread_id ( ) , ThreadId : : Current ( ) ) ; <nl> + <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + <nl> + if ( ! node - > waiting_ ) { <nl> + / / If the Node is not waiting , it ' s already scheduled to have its Promise <nl> + / / resolved . Ignore the timeout . <nl> + return ; <nl> + } <nl> + node - > timeout_task_id_ = CancelableTaskManager : : kInvalidTaskId ; <nl> + wait_list_ . Pointer ( ) - > RemoveNode ( node ) ; <nl> + HandleScope handle_scope ( node - > isolate_for_async_waiters_ ) ; <nl> + ResolveAsyncWaiterPromise ( node ) ; <nl> + CleanupAsyncWaiterPromise ( node ) ; <nl> + delete node ; <nl> + } <nl> + <nl> + void FutexEmulation : : IsolateDeinit ( Isolate * isolate ) { <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + <nl> + FutexWaitListNode * node = wait_list_ . Pointer ( ) - > head_ ; <nl> + while ( node ) { <nl> + if ( node - > isolate_for_async_waiters_ = = isolate ) { <nl> + / / The Isolate is going away ; don ' t bother cleaning up the Promises in the <nl> + / / NativeContext . Also we don ' t need to cancel the timeout task , since it <nl> + / / will be cancelled by Isolate : : Deinit . <nl> + node - > timeout_task_id_ = CancelableTaskManager : : kInvalidTaskId ; <nl> + wait_list_ . Pointer ( ) - > RemoveNode ( node ) ; <nl> + node = DeleteAsyncWaiterNode ( node ) ; <nl> + } else { <nl> + node = node - > next_ ; <nl> + } <nl> + } <nl> + <nl> + auto & isolate_map = wait_list_ . Pointer ( ) - > isolate_promises_to_resolve_ ; <nl> + auto it = isolate_map . find ( isolate ) ; <nl> + if ( it ! = isolate_map . end ( ) ) { <nl> + node = it - > second . head ; <nl> + while ( node ) { <nl> + DCHECK_EQ ( isolate , node - > isolate_for_async_waiters_ ) ; <nl> + node = DeleteAsyncWaiterNode ( node ) ; <nl> + } <nl> + isolate_map . erase ( it ) ; <nl> + } <nl> + <nl> + wait_list_ . Pointer ( ) - > Verify ( ) ; <nl> + } <nl> + <nl> Object FutexEmulation : : NumWaitersForTesting ( Handle < JSArrayBuffer > array_buffer , <nl> size_t addr ) { <nl> DCHECK_LT ( addr , array_buffer - > byte_length ( ) ) ; <nl> Object FutexEmulation : : NumWaitersForTesting ( Handle < JSArrayBuffer > array_buffer , <nl> while ( node ) { <nl> std : : shared_ptr < BackingStore > node_backing_store = <nl> node - > backing_store_ . lock ( ) ; <nl> - DCHECK ( node_backing_store ) ; <nl> if ( backing_store . get ( ) = = node_backing_store . get ( ) & & <nl> addr = = node - > wait_addr_ & & node - > waiting_ ) { <nl> waiters + + ; <nl> Object FutexEmulation : : NumWaitersForTesting ( Handle < JSArrayBuffer > array_buffer , <nl> return Smi : : FromInt ( waiters ) ; <nl> } <nl> <nl> + Object FutexEmulation : : NumAsyncWaitersForTesting ( Isolate * isolate ) { <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + <nl> + int waiters = 0 ; <nl> + FutexWaitListNode * node = wait_list_ . Pointer ( ) - > head_ ; <nl> + while ( node ) { <nl> + if ( node - > isolate_for_async_waiters_ = = isolate & & node - > waiting_ ) { <nl> + waiters + + ; <nl> + } <nl> + node = node - > next_ ; <nl> + } <nl> + <nl> + return Smi : : FromInt ( waiters ) ; <nl> + } <nl> + <nl> + Object FutexEmulation : : NumUnresolvedAsyncPromisesForTesting ( <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr ) { <nl> + DCHECK_LT ( addr , array_buffer - > byte_length ( ) ) ; <nl> + std : : shared_ptr < BackingStore > backing_store = array_buffer - > GetBackingStore ( ) ; <nl> + <nl> + base : : MutexGuard lock_guard ( mutex_ . Pointer ( ) ) ; <nl> + <nl> + int waiters = 0 ; <nl> + <nl> + auto & isolate_map = wait_list_ . Pointer ( ) - > isolate_promises_to_resolve_ ; <nl> + for ( auto it : isolate_map ) { <nl> + FutexWaitListNode * node = it . second . head ; <nl> + while ( node ) { <nl> + std : : shared_ptr < BackingStore > node_backing_store = <nl> + node - > backing_store_ . lock ( ) ; <nl> + if ( backing_store . get ( ) = = node_backing_store . get ( ) & & <nl> + addr = = node - > wait_addr_ & & ! node - > waiting_ ) { <nl> + waiters + + ; <nl> + } <nl> + <nl> + node = node - > next_ ; <nl> + } <nl> + } <nl> + <nl> + return Smi : : FromInt ( waiters ) ; <nl> + } <nl> + <nl> + void FutexWaitList : : VerifyNode ( FutexWaitListNode * node , FutexWaitListNode * head , <nl> + FutexWaitListNode * tail ) { <nl> + # ifdef DEBUG <nl> + if ( node - > next_ ) { <nl> + DCHECK_NE ( node , tail ) ; <nl> + DCHECK_EQ ( node , node - > next_ - > prev_ ) ; <nl> + } else { <nl> + DCHECK_EQ ( node , tail ) ; <nl> + } <nl> + if ( node - > prev_ ) { <nl> + DCHECK_NE ( node , head ) ; <nl> + DCHECK_EQ ( node , node - > prev_ - > next_ ) ; <nl> + } else { <nl> + DCHECK_EQ ( node , head ) ; <nl> + } <nl> + <nl> + if ( node - > async_timeout_time_ ! = base : : TimeTicks ( ) ) { <nl> + DCHECK ( FLAG_harmony_atomics_waitasync ) ; <nl> + DCHECK ( node - > IsAsync ( ) ) ; <nl> + } <nl> + <nl> + DCHECK ( NodeIsOnList ( node , head ) ) ; <nl> + # endif / / DEBUG <nl> + } <nl> + <nl> + void FutexWaitList : : Verify ( ) { <nl> + # ifdef DEBUG <nl> + FutexWaitListNode * node = head_ ; <nl> + while ( node ) { <nl> + VerifyNode ( node , head_ , tail_ ) ; <nl> + node = node - > next_ ; <nl> + } <nl> + <nl> + for ( auto it : isolate_promises_to_resolve_ ) { <nl> + auto node = it . second . head ; <nl> + while ( node ) { <nl> + VerifyNode ( node , it . second . head , it . second . tail ) ; <nl> + DCHECK_EQ ( it . first , node - > isolate_for_async_waiters_ ) ; <nl> + node = node - > next_ ; <nl> + } <nl> + } <nl> + # endif / / DEBUG <nl> + } <nl> + <nl> + bool FutexWaitList : : NodeIsOnList ( FutexWaitListNode * node , <nl> + FutexWaitListNode * head ) { <nl> + auto n = head ; <nl> + while ( n ! = nullptr ) { <nl> + if ( n = = node ) { <nl> + return true ; <nl> + } <nl> + n = n - > next_ ; <nl> + } <nl> + return false ; <nl> + } <nl> + <nl> } / / namespace internal <nl> } / / namespace v8 <nl> mmm a / src / execution / futex - emulation . h <nl> ppp b / src / execution / futex - emulation . h <nl> <nl> <nl> # include < stdint . h > <nl> <nl> + # include < map > <nl> + <nl> + # include " include / v8 . h " <nl> # include " src / base / atomicops . h " <nl> # include " src / base / lazy - instance . h " <nl> # include " src / base / macros . h " <nl> # include " src / base / platform / condition - variable . h " <nl> # include " src / base / platform / mutex . h " <nl> + # include " src / base / platform / time . h " <nl> + # include " src / tasks / cancelable - task . h " <nl> # include " src / utils / allocation . h " <nl> <nl> / / Support for emulating futexes , a low - level synchronization primitive . They <nl> class AtomicsWaitWakeHandle { <nl> <nl> class FutexWaitListNode { <nl> public : <nl> - FutexWaitListNode ( ) <nl> - : prev_ ( nullptr ) , <nl> - next_ ( nullptr ) , <nl> - wait_addr_ ( 0 ) , <nl> - waiting_ ( false ) , <nl> - interrupted_ ( false ) { } <nl> + / / Create a sync FutexWaitListNode . <nl> + FutexWaitListNode ( ) = default ; <nl> + <nl> + / / Create an async FutexWaitListNode . <nl> + FutexWaitListNode ( const std : : shared_ptr < BackingStore > & backing_store , <nl> + size_t wait_addr , Handle < JSObject > promise_capability , <nl> + Isolate * isolate ) ; <nl> + ~ FutexWaitListNode ( ) ; <nl> <nl> void NotifyWake ( ) ; <nl> <nl> + bool IsAsync ( ) const { return isolate_for_async_waiters_ ! = nullptr ; } <nl> + <nl> + / / Returns false if the cancelling failed , true otherwise . <nl> + bool CancelTimeoutTask ( ) ; <nl> + <nl> private : <nl> friend class FutexEmulation ; <nl> friend class FutexWaitList ; <nl> friend class ResetWaitingOnScopeExit ; <nl> <nl> + / / Set only for async FutexWaitListNodes . <nl> + Isolate * isolate_for_async_waiters_ = nullptr ; <nl> + std : : shared_ptr < TaskRunner > task_runner_ ; <nl> + CancelableTaskManager * cancelable_task_manager_ = nullptr ; <nl> + <nl> base : : ConditionVariable cond_ ; <nl> / / prev_ and next_ are protected by FutexEmulation : : mutex_ . <nl> - FutexWaitListNode * prev_ ; <nl> - FutexWaitListNode * next_ ; <nl> + FutexWaitListNode * prev_ = nullptr ; <nl> + FutexWaitListNode * next_ = nullptr ; <nl> + <nl> std : : weak_ptr < BackingStore > backing_store_ ; <nl> - size_t wait_addr_ ; <nl> + size_t wait_addr_ = 0 ; <nl> / / waiting_ and interrupted_ are protected by FutexEmulation : : mutex_ <nl> / / if this node is currently contained in FutexEmulation : : wait_list_ <nl> / / or an AtomicsWaitWakeHandle has access to it . <nl> - bool waiting_ ; <nl> - bool interrupted_ ; <nl> + bool waiting_ = false ; <nl> + bool interrupted_ = false ; <nl> + <nl> + / / Only for async FutexWaitListNodes . Weak Global handle . Must not be <nl> + / / synchronously resolved by a non - owner Isolate . <nl> + v8 : : Global < v8 : : Promise > promise_ ; <nl> + <nl> + / / Only for async FutexWaitListNodes . Weak Global handle . <nl> + v8 : : Global < v8 : : Context > native_context_ ; <nl> + <nl> + / / Only for async FutexWaitListNodes . If async_timeout_time_ is <nl> + / / base : : TimeTicks ( ) , this async waiter doesn ' t have a timeout or has already <nl> + / / been notified . Values other than base : : TimeTicks ( ) are used for async <nl> + / / waiters with an active timeout . <nl> + base : : TimeTicks async_timeout_time_ ; <nl> + <nl> + CancelableTaskManager : : Id timeout_task_id_ = <nl> + CancelableTaskManager : : kInvalidTaskId ; <nl> <nl> DISALLOW_COPY_AND_ASSIGN ( FutexWaitListNode ) ; <nl> } ; <nl> <nl> class FutexWaitList { <nl> public : <nl> - FutexWaitList ( ) ; <nl> + FutexWaitList ( ) = default ; <nl> <nl> void AddNode ( FutexWaitListNode * node ) ; <nl> void RemoveNode ( FutexWaitListNode * node ) ; <nl> <nl> + / / For checking the internal consistency of the FutexWaitList . <nl> + void Verify ( ) ; <nl> + / / Verifies the local consistency of | node | . If it ' s the first node of its <nl> + / / list , it must be | head | , and if it ' s the last node , it must be | tail | . <nl> + void VerifyNode ( FutexWaitListNode * node , FutexWaitListNode * head , <nl> + FutexWaitListNode * tail ) ; <nl> + / / Returns true if | node | is on the linked list starting with | head | . <nl> + static bool NodeIsOnList ( FutexWaitListNode * node , FutexWaitListNode * head ) ; <nl> + <nl> private : <nl> friend class FutexEmulation ; <nl> <nl> - FutexWaitListNode * head_ ; <nl> - FutexWaitListNode * tail_ ; <nl> + FutexWaitListNode * head_ = nullptr ; <nl> + FutexWaitListNode * tail_ = nullptr ; <nl> + <nl> + struct HeadAndTail { <nl> + FutexWaitListNode * head ; <nl> + FutexWaitListNode * tail ; <nl> + } ; <nl> + / / Isolate * - > linked list of Nodes which are waiting for their Promises to <nl> + / / be resolved . <nl> + std : : map < Isolate * , HeadAndTail > isolate_promises_to_resolve_ ; <nl> <nl> DISALLOW_COPY_AND_ASSIGN ( FutexWaitList ) ; <nl> } ; <nl> class ResetWaitingOnScopeExit { <nl> <nl> class FutexEmulation : public AllStatic { <nl> public : <nl> + enum WaitMode { kSync = 0 , kAsync } ; <nl> + <nl> / / Pass to Wake ( ) to wake all waiters . <nl> static const uint32_t kWakeAll = UINT32_MAX ; <nl> <nl> class FutexEmulation : public AllStatic { <nl> / / | rel_timeout_ms | can be Infinity . <nl> / / If woken , return " ok " , otherwise return " timed - out " . The initial check and <nl> / / the decision to wait happen atomically . <nl> - static Object WaitJs32 ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> - size_t addr , int32_t value , double rel_timeout_ms ) ; <nl> + static Object WaitJs32 ( Isolate * isolate , WaitMode mode , <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> + int32_t value , double rel_timeout_ms ) ; <nl> <nl> / / An version of WaitJs32 for int64_t values . <nl> - static Object WaitJs64 ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> - size_t addr , int64_t value , double rel_timeout_ms ) ; <nl> + static Object WaitJs64 ( Isolate * isolate , WaitMode mode , <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr , <nl> + int64_t value , double rel_timeout_ms ) ; <nl> <nl> / / Same as WaitJs above except it returns 0 ( ok ) , 1 ( not equal ) and 2 ( timed <nl> / / out ) as expected by Wasm . <nl> class FutexEmulation : public AllStatic { <nl> size_t addr , <nl> uint32_t num_waiters_to_wake ) ; <nl> <nl> - / / Return the number of threads waiting on | addr | . Should only be used for <nl> - / / testing . <nl> + / / Called before | isolate | dies . Removes async waiters owned by | isolate | . <nl> + static void IsolateDeinit ( Isolate * isolate ) ; <nl> + <nl> + / / Return the number of threads or async waiters waiting on | addr | . Should <nl> + / / only be used for testing . <nl> static Object NumWaitersForTesting ( Handle < JSArrayBuffer > array_buffer , <nl> size_t addr ) ; <nl> <nl> + / / Return the number of async waiters ( which belong to | isolate | ) waiting . <nl> + / / Should only be used for testing . <nl> + static Object NumAsyncWaitersForTesting ( Isolate * isolate ) ; <nl> + <nl> + / / Return the number of async waiters which were waiting for | addr | and are <nl> + / / now waiting for the Promises to be resolved . Should only be used for <nl> + / / testing . <nl> + static Object NumUnresolvedAsyncPromisesForTesting ( <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr ) ; <nl> + <nl> private : <nl> friend class FutexWaitListNode ; <nl> friend class AtomicsWaitWakeHandle ; <nl> + friend class ResolveAsyncWaiterPromisesTask ; <nl> + friend class AsyncWaiterTimeoutTask ; <nl> + <nl> + template < typename T > <nl> + static Object Wait ( Isolate * isolate , WaitMode mode , <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr , T value , <nl> + double rel_timeout_ms ) ; <nl> + <nl> + template < typename T > <nl> + static Object Wait ( Isolate * isolate , WaitMode mode , <nl> + Handle < JSArrayBuffer > array_buffer , size_t addr , T value , <nl> + bool use_timeout , int64_t rel_timeout_ns ) ; <nl> <nl> template < typename T > <nl> - static Object Wait ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> - size_t addr , T value , double rel_timeout_ms ) ; <nl> + static Object WaitSync ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> + size_t addr , T value , bool use_timeout , <nl> + int64_t rel_timeout_ns ) ; <nl> <nl> template < typename T > <nl> - static Object Wait ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> - size_t addr , T value , bool use_timeout , <nl> - int64_t rel_timeout_ns ) ; <nl> + static Object WaitAsync ( Isolate * isolate , Handle < JSArrayBuffer > array_buffer , <nl> + size_t addr , T value , bool use_timeout , <nl> + int64_t rel_timeout_ns ) ; <nl> + <nl> + / / Resolve the Promises of the async waiters which belong to | isolate | . <nl> + static void ResolveAsyncWaiterPromises ( Isolate * isolate ) ; <nl> + <nl> + static void ResolveAsyncWaiterPromise ( FutexWaitListNode * node ) ; <nl> + <nl> + static void HandleAsyncWaiterTimeout ( FutexWaitListNode * node ) ; <nl> + <nl> + static void NotifyAsyncWaiter ( FutexWaitListNode * node ) ; <nl> + <nl> + / / Remove the node ' s Promise from the NativeContext ' s Promise set . <nl> + static void CleanupAsyncWaiterPromise ( FutexWaitListNode * node ) ; <nl> + <nl> + / / Deletes | node | and returns the next node of its list . <nl> + static FutexWaitListNode * DeleteAsyncWaiterNode ( FutexWaitListNode * node ) ; <nl> <nl> / / ` mutex_ ` protects the composition of ` wait_list_ ` ( i . e . no elements may be <nl> / / added or removed without holding this mutex ) , as well as the ` waiting_ ` <nl> mmm a / src / execution / isolate . cc <nl> ppp b / src / execution / isolate . cc <nl> void Isolate : : Deinit ( ) { <nl> } <nl> # endif / / V8_OS_WIN64 <nl> <nl> + FutexEmulation : : IsolateDeinit ( this ) ; <nl> + <nl> debug ( ) - > Unload ( ) ; <nl> <nl> wasm_engine ( ) - > DeleteCompileJobsOnIsolate ( this ) ; <nl> mmm a / src / flags / flag - definitions . h <nl> ppp b / src / flags / flag - definitions . h <nl> DEFINE_IMPLICATION ( harmony_weak_refs_with_cleanup_some , harmony_weak_refs ) <nl> V ( harmony_regexp_sequence , " RegExp Unicode sequence properties " ) \ <nl> V ( harmony_weak_refs_with_cleanup_some , \ <nl> " harmony weak references with FinalizationRegistry . prototype . cleanupSome " ) \ <nl> - V ( harmony_regexp_match_indices , " harmony regexp match indices " ) <nl> + V ( harmony_regexp_match_indices , " harmony regexp match indices " ) \ <nl> + V ( harmony_atomics_waitasync , " harmony Atomics . waitAsync " ) <nl> <nl> # ifdef V8_INTL_SUPPORT <nl> # define HARMONY_INPROGRESS ( V ) \ <nl> mmm a / src / init / bootstrapper . cc <nl> ppp b / src / init / bootstrapper . cc <nl> <nl> # include " src / objects / js - segmenter . h " <nl> # endif / / V8_INTL_SUPPORT <nl> # include " src / objects / js - weak - refs . h " <nl> + # include " src / objects / ordered - hash - table . h " <nl> # include " src / objects / property - cell . h " <nl> # include " src / objects / slots - inl . h " <nl> # include " src / objects / templates . h " <nl> EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE ( <nl> <nl> # undef EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE <nl> <nl> + void Genesis : : InitializeGlobal_harmony_atomics_waitasync ( ) { <nl> + if ( ! FLAG_harmony_atomics_waitasync ) return ; <nl> + SimpleInstallFunction ( isolate ( ) , isolate ( ) - > atomics_object ( ) , " waitAsync " , <nl> + Builtins : : kAtomicsWaitAsync , 4 , true ) ; <nl> + } <nl> + <nl> void Genesis : : InitializeGlobal_harmony_sharedarraybuffer ( ) { <nl> if ( ! FLAG_harmony_sharedarraybuffer ) return ; <nl> <nl> bool Genesis : : InstallABunchOfRandomThings ( ) { <nl> map - > AppendDescriptor ( isolate ( ) , & d ) ; <nl> } <nl> } <nl> + { <nl> + Handle < OrderedHashSet > promises = <nl> + OrderedHashSet : : Allocate ( isolate ( ) , 0 ) . ToHandleChecked ( ) ; <nl> + native_context ( ) - > set_atomics_waitasync_promises ( * promises ) ; <nl> + } <nl> <nl> return true ; <nl> } <nl> mmm a / src / objects / contexts - inl . h <nl> ppp b / src / objects / contexts - inl . h <nl> <nl> # ifndef V8_OBJECTS_CONTEXTS_INL_H_ <nl> # define V8_OBJECTS_CONTEXTS_INL_H_ <nl> <nl> - # include " src / objects / contexts . h " <nl> - <nl> # include " src / heap / heap - write - barrier . h " <nl> + # include " src / objects / contexts . h " <nl> # include " src / objects / dictionary - inl . h " <nl> # include " src / objects / fixed - array - inl . h " <nl> # include " src / objects / js - objects - inl . h " <nl> # include " src / objects / map - inl . h " <nl> # include " src / objects / objects - inl . h " <nl> + # include " src / objects / ordered - hash - table - inl . h " <nl> # include " src / objects / osr - optimized - code - cache - inl . h " <nl> # include " src / objects / regexp - match - info . h " <nl> # include " src / objects / scope - info . h " <nl> mmm a / src / objects / contexts . h <nl> ppp b / src / objects / contexts . h <nl> <nl> <nl> # include " src / objects / fixed - array . h " <nl> # include " src / objects / function - kind . h " <nl> + # include " src / objects / ordered - hash - table . h " <nl> # include " src / objects / osr - optimized - code - cache . h " <nl> # include " torque - generated / field - offsets - tq . h " <nl> / / Has to be the last include ( doesn ' t have include guards ) : <nl> enum ContextLookupFlags { <nl> slow_object_with_object_prototype_map ) \ <nl> V ( SLOW_TEMPLATE_INSTANTIATIONS_CACHE_INDEX , SimpleNumberDictionary , \ <nl> slow_template_instantiations_cache ) \ <nl> + V ( ATOMICS_WAITASYNC_PROMISES , OrderedHashSet , atomics_waitasync_promises ) \ <nl> / * Fast Path Protectors * / \ <nl> V ( REGEXP_SPECIES_PROTECTOR_INDEX , PropertyCell , regexp_species_protector ) \ <nl> / * All * _FUNCTION_MAP_INDEX definitions used by Context : : FunctionMapIndex * / \ <nl> mmm a / src / runtime / runtime - futex . cc <nl> ppp b / src / runtime / runtime - futex . cc <nl> RUNTIME_FUNCTION ( Runtime_AtomicsNumWaitersForTesting ) { <nl> return FutexEmulation : : NumWaitersForTesting ( array_buffer , addr ) ; <nl> } <nl> <nl> + RUNTIME_FUNCTION ( Runtime_AtomicsNumAsyncWaitersForTesting ) { <nl> + DCHECK_EQ ( 0 , args . length ( ) ) ; <nl> + return FutexEmulation : : NumAsyncWaitersForTesting ( isolate ) ; <nl> + } <nl> + <nl> + RUNTIME_FUNCTION ( Runtime_AtomicsNumUnresolvedAsyncPromisesForTesting ) { <nl> + HandleScope scope ( isolate ) ; <nl> + DCHECK_EQ ( 2 , args . length ( ) ) ; <nl> + CONVERT_ARG_HANDLE_CHECKED ( JSTypedArray , sta , 0 ) ; <nl> + CONVERT_SIZE_ARG_CHECKED ( index , 1 ) ; <nl> + CHECK ( ! sta - > WasDetached ( ) ) ; <nl> + CHECK ( sta - > GetBuffer ( ) - > is_shared ( ) ) ; <nl> + CHECK_LT ( index , sta - > length ( ) ) ; <nl> + CHECK_EQ ( sta - > type ( ) , kExternalInt32Array ) ; <nl> + <nl> + Handle < JSArrayBuffer > array_buffer = sta - > GetBuffer ( ) ; <nl> + size_t addr = ( index < < 2 ) + sta - > byte_offset ( ) ; <nl> + <nl> + return FutexEmulation : : NumUnresolvedAsyncPromisesForTesting ( array_buffer , <nl> + addr ) ; <nl> + } <nl> + <nl> RUNTIME_FUNCTION ( Runtime_SetAllowAtomicsWait ) { <nl> HandleScope scope ( isolate ) ; <nl> DCHECK_EQ ( 1 , args . length ( ) ) ; <nl> mmm a / src / runtime / runtime . h <nl> ppp b / src / runtime / runtime . h <nl> namespace internal { <nl> F ( TransitionElementsKind , 2 , 1 ) \ <nl> F ( TransitionElementsKindWithKind , 2 , 1 ) <nl> <nl> - # define FOR_EACH_INTRINSIC_ATOMICS ( F , I ) \ <nl> - F ( AtomicsLoad64 , 2 , 1 ) \ <nl> - F ( AtomicsStore64 , 3 , 1 ) \ <nl> - F ( AtomicsAdd , 3 , 1 ) \ <nl> - F ( AtomicsAnd , 3 , 1 ) \ <nl> - F ( AtomicsCompareExchange , 4 , 1 ) \ <nl> - F ( AtomicsExchange , 3 , 1 ) \ <nl> - F ( AtomicsNumWaitersForTesting , 2 , 1 ) \ <nl> - F ( AtomicsOr , 3 , 1 ) \ <nl> - F ( AtomicsSub , 3 , 1 ) \ <nl> - F ( AtomicsXor , 3 , 1 ) \ <nl> + # define FOR_EACH_INTRINSIC_ATOMICS ( F , I ) \ <nl> + F ( AtomicsLoad64 , 2 , 1 ) \ <nl> + F ( AtomicsStore64 , 3 , 1 ) \ <nl> + F ( AtomicsAdd , 3 , 1 ) \ <nl> + F ( AtomicsAnd , 3 , 1 ) \ <nl> + F ( AtomicsCompareExchange , 4 , 1 ) \ <nl> + F ( AtomicsExchange , 3 , 1 ) \ <nl> + F ( AtomicsNumWaitersForTesting , 2 , 1 ) \ <nl> + F ( AtomicsNumAsyncWaitersForTesting , 0 , 1 ) \ <nl> + F ( AtomicsNumUnresolvedAsyncPromisesForTesting , 2 , 1 ) \ <nl> + F ( AtomicsOr , 3 , 1 ) \ <nl> + F ( AtomicsSub , 3 , 1 ) \ <nl> + F ( AtomicsXor , 3 , 1 ) \ <nl> F ( SetAllowAtomicsWait , 1 , 1 ) <nl> <nl> # define FOR_EACH_INTRINSIC_BIGINT ( F , I ) \ <nl> mmm a / src / tasks / cancelable - task . h <nl> ppp b / src / tasks / cancelable - task . h <nl> enum class TryAbortResult { kTaskRemoved , kTaskRunning , kTaskAborted } ; <nl> class V8_EXPORT_PRIVATE CancelableTaskManager { <nl> public : <nl> using Id = uint64_t ; <nl> + static constexpr Id kInvalidTaskId = 0 ; <nl> <nl> CancelableTaskManager ( ) ; <nl> <nl> class V8_EXPORT_PRIVATE CancelableTaskManager { <nl> bool canceled ( ) const { return canceled_ ; } <nl> <nl> private : <nl> - static constexpr Id kInvalidTaskId = 0 ; <nl> - <nl> / / Only called by { Cancelable } destructor . The task is done with executing , <nl> / / but needs to be removed . <nl> void RemoveFinishedTask ( Id id ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 8df56a5771f <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - 2timeout . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + load ( " test / mjsunit / harmony / atomics - waitasync - helpers . js " ) ; <nl> + <nl> + const script = ` <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + onmessage = function ( ) { <nl> + / / Create a waiter with a long timeout . <nl> + const result_slow = Atomics . waitAsync ( i32a , 0 , 0 , 200000 ) ; <nl> + / / Create a waiter with a short timeout . <nl> + const result_fast = Atomics . waitAsync ( i32a , 0 , 0 , 1 ) ; <nl> + <nl> + result_slow . value . then ( <nl> + ( value ) = > { postMessage ( " slow " + value ) ; } , <nl> + ( ) = > { postMessage ( " unexpected " ) ; } ) ; <nl> + <nl> + result_fast . value . then ( <nl> + ( value ) = > { <nl> + postMessage ( " fast " + value ) ; <nl> + / / Wake up the waiter with the long time out . <nl> + const notify_return_value = Atomics . notify ( i32a , 0 , 1 ) ; <nl> + postMessage ( " notify return value " + notify_return_value ) ; <nl> + } , <nl> + ( ) = > { postMessage ( " unexpected " ) ; } ) ; <nl> + } ` ; <nl> + <nl> + const expected_messages = [ <nl> + " fast timed - out " , <nl> + " notify return value 1 " , <nl> + " slow ok " <nl> + ] ; <nl> + <nl> + runTestWithWorker ( script , expected_messages ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 78a339acf04 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - buffer - out - of - scope - timeout . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - harmony - sharedarraybuffer - - harmony - atomics - waitasync - - expose - gc - - no - stress - opt <nl> + <nl> + load ( " test / mjsunit / harmony / atomics - waitasync - helpers . js " ) ; <nl> + <nl> + const script = ` <nl> + onmessage = function ( ) { <nl> + ( function ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + / / Create a waiter with a timeout . <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 , 1 ) ; <nl> + result . value . then ( <nl> + ( value ) = > { postMessage ( " result " + value ) ; } , <nl> + ( ) = > { postMessage ( " unexpected " ) ; } ) ; <nl> + } ) ( ) ; <nl> + / / Make sure sab , ia32 and result get gc ( ) d . <nl> + gc ( ) ; <nl> + <nl> + / / Even if the buffer went out of scope , we keep the waitAsync alive so that it can still time out . <nl> + let resolved = false ; <nl> + const sab2 = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a2 = new Int32Array ( sab2 ) ; <nl> + const result2 = Atomics . waitAsync ( i32a2 , 0 , 0 ) ; <nl> + result2 . value . then ( <nl> + ( value ) = > { postMessage ( " result2 " + value ) ; } , <nl> + ( ) = > { postMessage ( " unexpected " ) ; } ) ; <nl> + <nl> + const notify_return_value = Atomics . notify ( i32a2 , 0 ) ; <nl> + postMessage ( " notify return value " + notify_return_value ) ; <nl> + } ` ; <nl> + <nl> + const expected_messages = [ <nl> + " notify return value 1 " , <nl> + " result2 ok " , <nl> + " result timed - out " <nl> + ] ; <nl> + <nl> + runTestWithWorker ( script , expected_messages ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 09decbfcbb6 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - promise - out - of - scope . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync - - expose - gc <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + let resolved = false ; <nl> + ( function ( ) { <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + result . value . then ( <nl> + ( value ) = > { assertEquals ( " ok " , value ) ; resolved = true ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + } ) ( ) ; <nl> + / / Make sure result gets gc ( ) d . <nl> + gc ( ) ; <nl> + <nl> + const notify_return_value = Atomics . notify ( i32a , 0 , 1 ) ; <nl> + assertEquals ( 1 , notify_return_value ) ; <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 1 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + setTimeout ( ( ) = > { <nl> + assertTrue ( resolved ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + } , 0 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 98af45f73f6 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - timeout . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + load ( " test / mjsunit / harmony / atomics - waitasync - helpers . js " ) ; <nl> + <nl> + const script = ` <nl> + onmessage = function ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + / / Create a waiter with a timeout . <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 , 1 ) ; <nl> + <nl> + result . value . then ( <nl> + ( value ) = > { postMessage ( " result " + value ) ; } , <nl> + ( ) = > { postMessage ( " unexpected " ) ; } ) ; <nl> + } ` ; <nl> + <nl> + const expected_messages = [ <nl> + " result timed - out " <nl> + ] ; <nl> + <nl> + runTestWithWorker ( script , expected_messages ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 8c34e193434 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - timeouts - and - no - timeouts . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + const N = 10 ; <nl> + let log = [ ] ; <nl> + <nl> + / / Create N async waiters ; the even ones without timeout and the odd ones <nl> + / / with timeout . <nl> + for ( let i = 0 ; i < N ; + + i ) { <nl> + let result ; <nl> + if ( i % 2 = = 0 ) { <nl> + result = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + } else { <nl> + result = Atomics . waitAsync ( i32a , 0 , 0 , i ) ; <nl> + } <nl> + assertEquals ( true , result . async ) ; <nl> + result . value . then ( <nl> + ( value ) = > { log . push ( value + " " + i ) ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + } <nl> + assertEquals ( N , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + / / Wait until the timed out waiters time out . <nl> + let rounds = 10000 ; <nl> + let previous_length = 0 ; <nl> + function wait ( ) { <nl> + - - rounds ; <nl> + assertTrue ( rounds > 0 ) ; <nl> + if ( log . length > previous_length ) { <nl> + / / Made progress . Give the test more time . <nl> + previous_length = log . length ; <nl> + rounds = 10000 ; <nl> + } <nl> + if ( log . length < N / 2 ) { <nl> + setTimeout ( wait , 0 ) ; <nl> + } else { <nl> + continuation1 ( ) ; <nl> + } <nl> + } <nl> + setTimeout ( wait , 0 ) ; <nl> + <nl> + function continuation1 ( ) { <nl> + / / Verify that all timed out waiters timed out in FIFO order . <nl> + assertEquals ( N / 2 , log . length ) ; <nl> + let waiter_no = 1 ; <nl> + for ( let i = 0 ; i < N / 2 ; + + i ) { <nl> + assertEquals ( " timed - out " + waiter_no , log [ i ] ) ; <nl> + waiter_no + = 2 ; <nl> + } <nl> + / / Wake up all waiters <nl> + let notify_return_value = Atomics . notify ( i32a , 0 ) ; <nl> + assertEquals ( N / 2 , notify_return_value ) ; <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( N / 2 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + setTimeout ( continuation2 , 0 ) ; <nl> + } <nl> + <nl> + function continuation2 ( ) { <nl> + / / Verify that the waiters woke up in FIFO order . <nl> + assertEquals ( N , log . length ) ; <nl> + let waiter_no = 0 ; <nl> + for ( let i = N / 2 ; i < N ; + + i ) { <nl> + assertEquals ( " ok " + waiter_no , log [ i ] ) ; <nl> + waiter_no + = 2 ; <nl> + } <nl> + } <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . d0bc6452255 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - wake - up - all . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + const N = 10 ; <nl> + let log = [ ] ; <nl> + <nl> + / / Create N async waiters . <nl> + for ( let i = 0 ; i < N ; + + i ) { <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + assertEquals ( true , result . async ) ; <nl> + result . value . then ( <nl> + ( value ) = > { assertEquals ( " ok " , value ) ; log . push ( i ) ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + } <nl> + assertEquals ( N , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + / / Wake up all waiters . <nl> + let notify_return_value = Atomics . notify ( i32a , 0 ) ; <nl> + assertEquals ( N , notify_return_value ) ; <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( N , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + function continuation ( ) { <nl> + assertEquals ( N , log . length ) ; <nl> + for ( let i = 0 ; i < N ; + + i ) { <nl> + assertEquals ( i , log [ i ] ) ; <nl> + } <nl> + } <nl> + <nl> + setTimeout ( continuation , 0 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . e29c4c3eb23 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - wake - up - fifo . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + / / Create 2 async waiters . <nl> + const result1 = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + const result2 = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + <nl> + assertEquals ( true , result1 . async ) ; <nl> + assertEquals ( true , result2 . async ) ; <nl> + assertEquals ( 2 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + let log = [ ] ; <nl> + result1 . value . then ( <nl> + ( value ) = > { assertEquals ( " ok " , value ) ; log . push ( 1 ) ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + result2 . value . then ( <nl> + ( value ) = > { assertEquals ( " ok " , value ) ; log . push ( 2 ) ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + <nl> + / / Wake up one waiter . <nl> + const notify_return_value = Atomics . notify ( i32a , 0 , 1 ) ; <nl> + assertEquals ( 1 , notify_return_value ) ; <nl> + assertEquals ( 1 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 1 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + function continuation1 ( ) { <nl> + assertEquals ( 1 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( [ 1 ] , log ) ; <nl> + <nl> + / / Wake up one waiter . <nl> + const notify_return_value = Atomics . notify ( i32a , 0 , 1 ) ; <nl> + assertEquals ( 1 , notify_return_value ) ; <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 1 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + setTimeout ( continuation2 , 0 ) ; <nl> + } <nl> + <nl> + function continuation2 ( ) { <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( [ 1 , 2 ] , log ) ; <nl> + } <nl> + <nl> + setTimeout ( continuation1 , 0 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . b443f865174 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - 1thread - wake - up - simple . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + assertEquals ( true , result . async ) ; <nl> + assertTrue ( result . value instanceof Promise ) ; <nl> + assertEquals ( 1 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + let resolved = false ; <nl> + result . value . then ( <nl> + ( value ) = > { assertEquals ( " ok " , value ) ; resolved = true ; } , <nl> + ( ) = > { assertUnreachable ( ) ; } ) ; <nl> + const notify_return_value = Atomics . notify ( i32a , 0 , 1 ) ; <nl> + assertEquals ( 1 , notify_return_value ) ; <nl> + assertEquals ( 0 , % AtomicsNumWaitersForTesting ( i32a , 0 ) ) ; <nl> + assertEquals ( 1 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + <nl> + setTimeout ( ( ) = > { <nl> + assertTrue ( resolved ) ; <nl> + assertEquals ( 0 , % AtomicsNumUnresolvedAsyncPromisesForTesting ( i32a , 0 ) ) ; <nl> + } , 0 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 4fd35b4cddb <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - helpers . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + function runTestWithWorker ( script , expected_messages ) { <nl> + const w = new Worker ( script , { type : ' string ' } ) ; <nl> + w . postMessage ( ' start ' ) ; <nl> + let i = 0 ; <nl> + while ( i < expected_messages . length ) { <nl> + const m = w . getMessage ( ) ; <nl> + assertEquals ( expected_messages [ i ] , m ) ; <nl> + + + i ; <nl> + } <nl> + w . terminate ( ) ; <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . e698a6e3211 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - worker - shutdown - before - wait - finished - no - timeout . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync - - expose - gc <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + ( function createWorker ( ) { <nl> + const script = ` onmessage = function ( msg ) { <nl> + if ( msg . sab ) { <nl> + const i32a = new Int32Array ( msg . sab ) ; <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 ) ; <nl> + postMessage ( ' worker waiting ' ) ; <nl> + } <nl> + } ` ; <nl> + const w = new Worker ( script , { type : ' string ' } ) ; <nl> + w . postMessage ( { sab : sab } ) ; <nl> + const m = w . getMessage ( ) ; <nl> + assertEquals ( ' worker waiting ' , m ) ; <nl> + w . terminate ( ) ; <nl> + } ) ( ) ; <nl> + <nl> + gc ( ) ; <nl> + <nl> + Atomics . notify ( i32a , 0 , 1 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . 6db3ec77ab6 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync - worker - shutdown - before - wait - finished - timeout . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync - - expose - gc <nl> + <nl> + ( function test ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + ( function createWorker ( ) { <nl> + const script = ` onmessage = function ( msg ) { <nl> + if ( msg . sab ) { <nl> + const i32a = new Int32Array ( msg . sab ) ; <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 , 100000 ) ; <nl> + postMessage ( ' worker waiting ' ) ; <nl> + } <nl> + } ` ; <nl> + const w = new Worker ( script , { type : ' string ' } ) ; <nl> + w . postMessage ( { sab : sab } ) ; <nl> + const m = w . getMessage ( ) ; <nl> + assertEquals ( ' worker waiting ' , m ) ; <nl> + w . terminate ( ) ; <nl> + } ) ( ) ; <nl> + <nl> + gc ( ) ; <nl> + <nl> + Atomics . notify ( i32a , 0 , 1 ) ; <nl> + } ) ( ) ; <nl> new file mode 100644 <nl> index 00000000000 . . d5165b99c23 <nl> mmm / dev / null <nl> ppp b / test / mjsunit / harmony / atomics - waitasync . js <nl> <nl> + / / Copyright 2020 the V8 project authors . All rights reserved . <nl> + / / Use of this source code is governed by a BSD - style license that can be <nl> + / / found in the LICENSE file . <nl> + <nl> + / / Flags : - - allow - natives - syntax - - harmony - sharedarraybuffer - - harmony - atomics - waitasync <nl> + <nl> + ( function testOutOfBounds ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + assertThrows ( ( ) = > { <nl> + Atomics . waitAsync ( i32a , 20 , 0 , 1000 ) ; <nl> + } , RangeError ) ; <nl> + } ) ( ) ; <nl> + <nl> + ( function testValueNotEquals ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + const result = Atomics . waitAsync ( i32a , 0 , 1 , 1000 ) ; <nl> + assertEquals ( false , result . async ) ; <nl> + assertEquals ( " not - equal " , result . value ) ; <nl> + } ) ( ) ; <nl> + <nl> + ( function testZeroTimeout ( ) { <nl> + const sab = new SharedArrayBuffer ( 16 ) ; <nl> + const i32a = new Int32Array ( sab ) ; <nl> + <nl> + const result = Atomics . waitAsync ( i32a , 0 , 0 , 0 ) ; <nl> + assertEquals ( false , result . async ) ; <nl> + assertEquals ( " timed - out " , result . value ) ; <nl> + } ) ( ) ; <nl> mmm a / test / mjsunit / mjsunit . status <nl> ppp b / test / mjsunit / mjsunit . status <nl> <nl> # Tier down / up Wasm NativeModule in debugging is non - deterministic with <nl> # multiple isolates ( https : / / crbug . com / v8 / 10099 ) . <nl> ' wasm / tier - down - to - liftoff ' : [ SKIP ] , <nl> + <nl> + # waitAsync tests modify the global state ( across Isolates ) <nl> + ' harmony / atomics - waitasync ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - 2timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - promise - out - of - scope ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - wake - up - fifo ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - wake - up - simple ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - worker - shutdown - before - wait - finished - timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - worker - shutdown - before - wait - finished - no - timeout ' : [ SKIP ] , <nl> } ] , # ' isolates ' <nl> <nl> # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> <nl> ' regress / regress - trap - allocation - memento ' : [ SKIP ] , <nl> ' regress / regress - v8 - 9267 - * ' : [ SKIP ] , <nl> ' shared - function - tier - up - turbo ' : [ SKIP ] , <nl> + <nl> + # These tests are incompatible with stress_delay_tasks since they <nl> + # rely on setTimeout vs tasks working predictably . <nl> + ' harmony / atomics - waitasync - 1thread - 2timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - buffer - out - of - scope - timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - promise - out - of - scope ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - timeouts - and - no - timeouts ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - wake - up - all ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - wake - up - fifo ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - 1thread - wake - up - simple ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - worker - shutdown - before - wait - finished - no - timeout ' : [ SKIP ] , <nl> + ' harmony / atomics - waitasync - worker - shutdown - before - wait - finished - timeout ' : [ SKIP ] , <nl> } ] , # ' gc_fuzzer ' <nl> <nl> # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # <nl> <nl> ' compiler / serializer - feedback - propagation - 2 ' : [ SKIP ] , <nl> ' compiler / serializer - transition - propagation ' : [ SKIP ] , <nl> } ] , # variant = = nci <nl> - <nl> ] <nl> mmm a / test / test262 / test262 . status <nl> ppp b / test / test262 / test262 . status <nl> <nl> # https : / / bugs . chromium . org / p / v8 / issues / detail ? id = 10383 <nl> ' built - ins / RegExp / prototype / Symbol . replace / fn - invoke - args - empty - result ' : [ FAIL ] , <nl> <nl> - # http : / / crbug / v8 / 10239 <nl> - ' built - ins / Atomics / waitAsync / bad - range ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / bad - range ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / false - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / false - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / good - views ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / nan - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / negative - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / negative - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / negative - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - no - operation ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - add ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - and ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - compareExchange ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - exchange ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - or ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - store ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - sub ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / no - spurious - wakeup - on - xor ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / non - bigint64 - typedarray - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / non - shared - bufferdata - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / not - a - typedarray - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / not - an - object - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / null - bufferdata - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / null - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / null - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / object - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / object - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / out - of - range - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / poisoned - object - for - timeout - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / poisoned - object - for - timeout - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - index - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - timeout - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - timeout - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - value - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / symbol - for - value - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / true - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / true - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / undefined - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / undefined - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / undefined - index - defaults - to - zero - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / value - not - equal ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / value - not - equal - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / waiterlist - block - indexedposition - wake ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / bigint / was - woken - before - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / descriptor ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / false - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / is - function ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / length ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / name ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / nan - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / negative - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / negative - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / non - int32 - typedarray - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / non - shared - bufferdata - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / not - a - typedarray - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / not - an - object - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / null - bufferdata - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / null - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / object - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / out - of - range - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / poisoned - object - for - timeout - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / poisoned - object - for - timeout - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / returns - result - object - value - is - promise - resolves - to - ok ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / returns - result - object - value - is - promise - resolves - to - timed - out ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / returns - result - object - value - is - string - not - equal ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / returns - result - object - value - is - string - timed - out ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - index - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - index - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - timeout - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - timeout - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - value - throws ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / symbol - for - value - throws - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / true - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / undefined - for - timeout - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / undefined - for - timeout ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / undefined - index - defaults - to - zero - agent ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / validate - arraytype - before - index - coercion ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / validate - arraytype - before - timeout - coercion ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / validate - arraytype - before - value - coercion ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / value - not - equal ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / waiterlist - block - indexedposition - wake ' : [ FAIL ] , <nl> - ' built - ins / Atomics / waitAsync / was - woken - before - timeout ' : [ FAIL ] , <nl> - <nl> - # SKIP the following TIMEOUT tests instead of FAIL <nl> - ' built - ins / Atomics / waitAsync / false - for - timeout - agent ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / good - views ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / negative - timeout - agent ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - no - operation ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - add ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - and ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - compareExchange ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - exchange ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - or ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - store ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - sub ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / no - spurious - wakeup - on - xor ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / null - for - timeout - agent ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / object - for - timeout - agent ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / true - for - timeout - agent ' : [ SKIP ] , <nl> - ' built - ins / Atomics / waitAsync / value - not - equal - agent ' : [ SKIP ] , <nl> - <nl> # https : / / crbug . com / v8 / 10687 <nl> ' built - ins / Atomics / add / bigint / non - shared - bufferdata ' : [ FAIL ] , <nl> ' built - ins / Atomics / add / non - shared - bufferdata ' : [ FAIL ] , <nl> mmm a / test / test262 / testcfg . py <nl> ppp b / test / test262 / testcfg . py <nl> <nl> ' AggregateError ' : ' - - harmony - promise - any ' , <nl> ' logical - assignment - operators ' : ' - - harmony - logical - assignment ' , <nl> ' Promise . any ' : ' - - harmony - promise - any ' , <nl> + ' Atomics . waitAsync ' : ' - - harmony - atomics - waitasync ' , <nl> } <nl> <nl> SKIPPED_FEATURES = set ( [ ] ) <nl> | Reland2 : [ Atomics . waitAsync ] Implement Atomics . waitAsync | v8/v8 | 96c5916dd46aa9a81b40cbc8d5bc46765b98b303 | 2020-07-21T06:43:48Z |
new file mode 100644 <nl> index 000000000000 . . eba2af05746b <nl> mmm / dev / null <nl> ppp b / cocos / scripting / js - bindings / manual / experimental / jsb_cocos2dx_experimental_manual . cpp <nl> <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + Copyright ( c ) 2008 - 2010 Ricardo Quesada <nl> + Copyright ( c ) 2010 - 2012 cocos2d - x . org <nl> + Copyright ( c ) 2011 Zynga Inc . <nl> + Copyright ( c ) 2013 - 2014 Chukong Technologies Inc . <nl> + <nl> + http : / / www . cocos2d - x . org <nl> + <nl> + Permission is hereby granted , free of charge , to any person obtaining a copy <nl> + of this software and associated documentation files ( the " Software " ) , to deal <nl> + in the Software without restriction , including without limitation the rights <nl> + to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> + copies of the Software , and to permit persons to whom the Software is <nl> + furnished to do so , subject to the following conditions : <nl> + <nl> + The above copyright notice and this permission notice shall be included in <nl> + all copies or substantial portions of the Software . <nl> + <nl> + THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> + IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> + LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> + THE SOFTWARE . <nl> + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + <nl> + # include " jsb_cocos2dx_experimental_manual . h " <nl> + # include " cocos2d_specifics . hpp " <nl> + # include " 2d / SpritePolygon . h " <nl> + <nl> + bool jsval_to_vector_vec2 ( JSContext * cx , JS : : HandleValue v , std : : vector < cocos2d : : Vec2 > * ret ) <nl> + { <nl> + JS : : RootedObject jsArr ( cx ) ; <nl> + bool ok = v . isObject ( ) & & JS_ValueToObject ( cx , v , & jsArr ) ; <nl> + JSB_PRECONDITION3 ( ok , cx , false , " Error converting value to object " ) ; <nl> + JSB_PRECONDITION3 ( jsArr & & JS_IsArrayObject ( cx , jsArr ) , cx , false , " Object must be an array " ) ; <nl> + <nl> + uint32_t len = 0 ; <nl> + JS_GetArrayLength ( cx , jsArr , & len ) ; <nl> + ret - > reserve ( len ) ; <nl> + <nl> + for ( uint32_t i = 0 ; i < len ; i + + ) <nl> + { <nl> + JS : : RootedValue value ( cx ) ; <nl> + if ( JS_GetElement ( cx , jsArr , i , & value ) ) <nl> + { <nl> + cocos2d : : Vec2 vec2 ; <nl> + ok & = jsval_to_vector2 ( cx , value , & vec2 ) ; <nl> + ret - > push_back ( vec2 ) ; <nl> + } <nl> + } <nl> + return ok ; <nl> + } <nl> + <nl> + bool jsval_to_vector_unsigned_short ( JSContext * cx , JS : : HandleValue v , std : : vector < unsigned short > * ret ) <nl> + { <nl> + JS : : RootedObject jsArr ( cx ) ; <nl> + bool ok = v . isObject ( ) & & JS_ValueToObject ( cx , v , & jsArr ) ; <nl> + JSB_PRECONDITION3 ( ok , cx , false , " Error converting value to object " ) ; <nl> + JSB_PRECONDITION3 ( jsArr & & JS_IsArrayObject ( cx , jsArr ) , cx , false , " Object must be an array " ) ; <nl> + <nl> + uint32_t len = 0 ; <nl> + JS_GetArrayLength ( cx , jsArr , & len ) ; <nl> + ret - > reserve ( len ) ; <nl> + <nl> + for ( uint32_t i = 0 ; i < len ; i + + ) <nl> + { <nl> + JS : : RootedValue value ( cx ) ; <nl> + if ( JS_GetElement ( cx , jsArr , i , & value ) ) <nl> + { <nl> + unsigned short index ; <nl> + ok & = jsval_to_uint16 ( cx , value , & index ) ; <nl> + ret - > push_back ( index ) ; <nl> + } <nl> + } <nl> + return ok ; <nl> + } <nl> + <nl> + bool jsval_to_cctex2f ( JSContext * cx , JS : : HandleValue vp , cocos2d : : Tex2F * ret ) <nl> + { <nl> + JS : : RootedObject tmp ( cx ) ; <nl> + JS : : RootedValue jsx ( cx ) ; <nl> + JS : : RootedValue jsy ( cx ) ; <nl> + double x , y ; <nl> + bool ok = vp . isObject ( ) & & <nl> + JS_ValueToObject ( cx , vp , & tmp ) & & <nl> + JS_GetProperty ( cx , tmp , " x " , & jsx ) & & <nl> + JS_GetProperty ( cx , tmp , " y " , & jsy ) & & <nl> + JS : : ToNumber ( cx , jsx , & x ) & & <nl> + JS : : ToNumber ( cx , jsy , & y ) & & <nl> + ! isnan ( x ) & & ! isnan ( y ) ; <nl> + <nl> + JSB_PRECONDITION3 ( ok , cx , false , " Error processing arguments " ) ; <nl> + <nl> + ret - > u = ( GLfloat ) x ; <nl> + ret - > v = ( GLfloat ) y ; <nl> + return true ; <nl> + } <nl> + <nl> + bool jsval_to_v3fc4bt2f ( JSContext * cx , JS : : HandleValue v , cocos2d : : V3F_C4B_T2F * ret ) <nl> + { <nl> + JS : : RootedObject object ( cx , v . toObjectOrNull ( ) ) ; <nl> + <nl> + cocos2d : : Vec3 v3 ; <nl> + cocos2d : : Color4B color ; <nl> + cocos2d : : Tex2F t2 ; <nl> + <nl> + JS : : RootedValue jsv3 ( cx ) ; <nl> + JS : : RootedValue jscolor ( cx ) ; <nl> + JS : : RootedValue jst2 ( cx ) ; <nl> + <nl> + bool ok = JS_GetProperty ( cx , object , " v3f " , & jsv3 ) & & <nl> + JS_GetProperty ( cx , object , " c4b " , & jscolor ) & & <nl> + JS_GetProperty ( cx , object , " t2f " , & jst2 ) & & <nl> + jsval_to_vector3 ( cx , jsv3 , & v3 ) & & <nl> + jsval_to_cccolor4b ( cx , jscolor , & color ) & & <nl> + jsval_to_cctex2f ( cx , jst2 , & t2 ) ; <nl> + <nl> + JSB_PRECONDITION3 ( ok , cx , false , " Error processing arguments " ) ; <nl> + <nl> + ret - > vertices = v3 ; <nl> + ret - > colors = color ; <nl> + ret - > texCoords = t2 ; <nl> + return true ; <nl> + } <nl> + <nl> + bool jsval_to_vector_v3fc4bt2f ( JSContext * cx , JS : : HandleValue v , std : : vector < cocos2d : : V3F_C4B_T2F > * ret ) <nl> + { <nl> + JS : : RootedObject jsArr ( cx ) ; <nl> + bool ok = v . isObject ( ) & & JS_ValueToObject ( cx , v , & jsArr ) ; <nl> + JSB_PRECONDITION3 ( ok , cx , false , " Error converting value to object " ) ; <nl> + JSB_PRECONDITION3 ( jsArr & & JS_IsArrayObject ( cx , jsArr ) , cx , false , " Object must be an array " ) ; <nl> + <nl> + uint32_t len = 0 ; <nl> + JS_GetArrayLength ( cx , jsArr , & len ) ; <nl> + ret - > reserve ( len ) ; <nl> + <nl> + for ( uint32_t i = 0 ; i < len ; i + + ) <nl> + { <nl> + JS : : RootedValue value ( cx ) ; <nl> + if ( JS_GetElement ( cx , jsArr , i , & value ) ) <nl> + { <nl> + cocos2d : : V3F_C4B_T2F vert ; <nl> + ok & = jsval_to_v3fc4bt2f ( cx , value , & vert ) ; <nl> + ret - > push_back ( vert ) ; <nl> + } <nl> + } <nl> + return ok ; <nl> + } <nl> + <nl> + bool js_cocos2dx_SpritePolygon_create ( JSContext * cx , uint32_t argc , jsval * vp ) <nl> + { <nl> + if ( argc = = 1 ) <nl> + { <nl> + JS : : CallArgs args = JS : : CallArgsFromVp ( argc , vp ) ; <nl> + std : : string file ; <nl> + bool ok = jsval_to_std_string ( cx , args . get ( 0 ) , & file ) ; <nl> + JSB_PRECONDITION2 ( ok , cx , false , " js_cocos2dx_SpritePolygon_create : Error processing arguments " ) ; <nl> + <nl> + auto sprite = cocos2d : : experimental : : SpritePolygon : : create ( file ) ; <nl> + js_proxy_t * jsProxy = js_get_or_create_proxy < cocos2d : : experimental : : SpritePolygon > ( cx , sprite ) ; <nl> + JS : : RootedValue ret ( cx , OBJECT_TO_JSVAL ( jsProxy - > obj ) ) ; <nl> + <nl> + args . rval ( ) . set ( ret ) ; <nl> + return true ; <nl> + } <nl> + <nl> + <nl> + if ( argc = = 2 ) <nl> + { <nl> + JS : : CallArgs args = JS : : CallArgsFromVp ( argc , vp ) ; <nl> + std : : string file ; <nl> + std : : vector < cocos2d : : Vec2 > verts ; <nl> + bool ok = jsval_to_std_string ( cx , args . get ( 0 ) , & file ) ; <nl> + ok & = jsval_to_vector_vec2 ( cx , args . get ( 1 ) , & verts ) ; <nl> + <nl> + JSB_PRECONDITION2 ( ok , cx , false , " js_cocos2dx_SpritePolygon_create : Error processing arguments " ) ; <nl> + <nl> + auto sprite = cocos2d : : experimental : : SpritePolygon : : create ( file , verts ) ; <nl> + js_proxy_t * jsProxy = js_get_or_create_proxy < cocos2d : : experimental : : SpritePolygon > ( cx , sprite ) ; <nl> + JS : : RootedValue ret ( cx , OBJECT_TO_JSVAL ( jsProxy - > obj ) ) ; <nl> + <nl> + args . rval ( ) . set ( ret ) ; <nl> + return true ; <nl> + <nl> + } <nl> + <nl> + if ( argc = = 3 ) <nl> + { <nl> + do <nl> + { <nl> + JS : : CallArgs args = JS : : CallArgsFromVp ( argc , vp ) ; <nl> + std : : string file ; <nl> + std : : vector < cocos2d : : Vec2 > verts ; <nl> + std : : vector < unsigned short > indices ; <nl> + <nl> + bool ok = jsval_to_std_string ( cx , args . get ( 0 ) , & file ) ; <nl> + ok & = jsval_to_vector_vec2 ( cx , args . get ( 1 ) , & verts ) ; <nl> + ok & = jsval_to_vector_unsigned_short ( cx , args . get ( 2 ) , & indices ) ; <nl> + <nl> + if ( ! ok ) <nl> + break ; <nl> + <nl> + auto sprite = cocos2d : : experimental : : SpritePolygon : : create ( file , verts , indices ) ; <nl> + js_proxy_t * jsProxy = js_get_or_create_proxy < cocos2d : : experimental : : SpritePolygon > ( cx , sprite ) ; <nl> + JS : : RootedValue ret ( cx , OBJECT_TO_JSVAL ( jsProxy - > obj ) ) ; <nl> + <nl> + args . rval ( ) . set ( ret ) ; <nl> + return true ; <nl> + } while ( 0 ) ; <nl> + <nl> + do <nl> + { <nl> + JS : : CallArgs args = JS : : CallArgsFromVp ( argc , vp ) ; <nl> + std : : string file ; <nl> + std : : vector < cocos2d : : V3F_C4B_T2F > verts ; <nl> + std : : vector < unsigned short > indices ; <nl> + <nl> + bool ok = jsval_to_std_string ( cx , args . get ( 0 ) , & file ) ; <nl> + ok & = jsval_to_vector_v3fc4bt2f ( cx , args . get ( 1 ) , & verts ) ; <nl> + ok & = jsval_to_vector_unsigned_short ( cx , args . get ( 2 ) , & indices ) ; <nl> + <nl> + if ( ! ok ) <nl> + break ; <nl> + <nl> + auto sprite = cocos2d : : experimental : : SpritePolygon : : create ( file , verts , indices ) ; <nl> + js_proxy_t * jsProxy = js_get_or_create_proxy < cocos2d : : experimental : : SpritePolygon > ( cx , sprite ) ; <nl> + JS : : RootedValue ret ( cx , OBJECT_TO_JSVAL ( jsProxy - > obj ) ) ; <nl> + <nl> + args . rval ( ) . set ( ret ) ; <nl> + return true ; <nl> + } while ( 0 ) ; <nl> + <nl> + JS_ReportError ( cx , " js_cocos2dx_SpritePolygon_create : Error processing arguments " ) ; <nl> + } <nl> + <nl> + JS_ReportError ( cx , " js_cocos2dx_SpritePolygon_create : wrong number of arguments : % d " , argc ) ; <nl> + return false ; <nl> + } <nl> + <nl> + void register_all_cocos2dx_experimental_manual ( JSContext * cx , JS : : HandleObject global ) <nl> + { <nl> + <nl> + JS : : RootedObject ccObj ( cx ) ; <nl> + get_or_create_js_obj ( cx , global , " ccexp " , & ccObj ) ; <nl> + <nl> + JS : : RootedValue tmpVal ( cx ) ; <nl> + JS_GetProperty ( cx , ccObj , " SpritePolygon " , & tmpVal ) ; <nl> + JS : : RootedObject tmpObj ( cx , tmpVal . toObjectOrNull ( ) ) ; <nl> + JS_DefineFunction ( cx , tmpObj , " create " , js_cocos2dx_SpritePolygon_create , 1 , JSPROP_READONLY | JSPROP_PERMANENT ) ; <nl> + <nl> + } <nl> \ No newline at end of file <nl> new file mode 100644 <nl> index 000000000000 . . d75484d69708 <nl> mmm / dev / null <nl> ppp b / cocos / scripting / js - bindings / manual / experimental / jsb_cocos2dx_experimental_manual . h <nl> <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + Copyright ( c ) 2008 - 2010 Ricardo Quesada <nl> + Copyright ( c ) 2010 - 2012 cocos2d - x . org <nl> + Copyright ( c ) 2011 Zynga Inc . <nl> + Copyright ( c ) 2013 - 2014 Chukong Technologies Inc . <nl> + <nl> + http : / / www . cocos2d - x . org <nl> + <nl> + Permission is hereby granted , free of charge , to any person obtaining a copy <nl> + of this software and associated documentation files ( the " Software " ) , to deal <nl> + in the Software without restriction , including without limitation the rights <nl> + to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> + copies of the Software , and to permit persons to whom the Software is <nl> + furnished to do so , subject to the following conditions : <nl> + <nl> + The above copyright notice and this permission notice shall be included in <nl> + all copies or substantial portions of the Software . <nl> + <nl> + THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> + IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> + LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> + THE SOFTWARE . <nl> + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + # ifndef __jsb_cococs2dx_experimental_h__ <nl> + # define __jsb_cococs2dx_experimental_h__ <nl> + <nl> + <nl> + # include " jsapi . h " <nl> + <nl> + void register_all_cocos2dx_experimental_manual ( JSContext * cx , JS : : HandleObject global ) ; <nl> + <nl> + # endif <nl> \ No newline at end of file <nl> mmm a / cocos / scripting / js - bindings / manual / js_manual_conversions . cpp <nl> ppp b / cocos / scripting / js - bindings / manual / js_manual_conversions . cpp <nl> bool jsval_to_vector2 ( JSContext * cx , JS : : HandleValue vp , cocos2d : : Vec2 * ret ) <nl> JS_GetProperty ( cx , tmp , " x " , & jsx ) & & <nl> JS_GetProperty ( cx , tmp , " y " , & jsy ) & & <nl> JS : : ToNumber ( cx , jsx , & x ) & & <nl> - JS : : ToNumber ( cx , jsy , & y ) ; <nl> + JS : : ToNumber ( cx , jsy , & y ) & & <nl> + ! isnan ( x ) & & ! isnan ( y ) ; <nl> <nl> JSB_PRECONDITION3 ( ok , cx , false , " Error processing arguments " ) ; <nl> <nl> mmm a / cocos / scripting / js - bindings / proj . ios_mac / cocos2d_js_bindings . xcodeproj / project . pbxproj <nl> ppp b / cocos / scripting / js - bindings / proj . ios_mac / cocos2d_js_bindings . xcodeproj / project . pbxproj <nl> <nl> 420BBCF81AA48EE900493976 / * jsb_cocos2dx_3d_manual . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 420BBCF51AA48EE900493976 / * jsb_cocos2dx_3d_manual . cpp * / ; } ; <nl> 420BBCF91AA48EE900493976 / * jsb_cocos2dx_3d_manual . h in Headers * / = { isa = PBXBuildFile ; fileRef = 420BBCF61AA48EE900493976 / * jsb_cocos2dx_3d_manual . h * / ; } ; <nl> 420BBCFA1AA48EE900493976 / * jsb_cocos2dx_3d_manual . h in Headers * / = { isa = PBXBuildFile ; fileRef = 420BBCF61AA48EE900493976 / * jsb_cocos2dx_3d_manual . h * / ; } ; <nl> + 42AD256C1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 42AD256A1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp * / ; } ; <nl> + 42AD256D1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 42AD256A1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp * / ; } ; <nl> + 42AD256E1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp in Headers * / = { isa = PBXBuildFile ; fileRef = 42AD256B1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp * / ; } ; <nl> + 42AD256F1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp in Headers * / = { isa = PBXBuildFile ; fileRef = 42AD256B1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp * / ; } ; <nl> + 42AD25731AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 42AD25711AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp * / ; } ; <nl> + 42AD25741AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 42AD25711AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp * / ; } ; <nl> + 42AD25751AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h in Headers * / = { isa = PBXBuildFile ; fileRef = 42AD25721AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h * / ; } ; <nl> + 42AD25761AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h in Headers * / = { isa = PBXBuildFile ; fileRef = 42AD25721AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h * / ; } ; <nl> 83A5661918DA878400FC31A0 / * jsb_socketio . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 83A5661718DA878400FC31A0 / * jsb_socketio . cpp * / ; } ; <nl> 83A5661A18DA878400FC31A0 / * jsb_socketio . cpp in Sources * / = { isa = PBXBuildFile ; fileRef = 83A5661718DA878400FC31A0 / * jsb_socketio . cpp * / ; } ; <nl> 83A5661B18DA878400FC31A0 / * jsb_socketio . h in Headers * / = { isa = PBXBuildFile ; fileRef = 83A5661818DA878400FC31A0 / * jsb_socketio . h * / ; } ; <nl> <nl> 420BBCEF1AA48EDE00493976 / * jsb_cocos2dx_3d_auto . hpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . h ; path = jsb_cocos2dx_3d_auto . hpp ; sourceTree = " < group > " ; } ; <nl> 420BBCF51AA48EE900493976 / * jsb_cocos2dx_3d_manual . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = jsb_cocos2dx_3d_manual . cpp ; sourceTree = " < group > " ; } ; <nl> 420BBCF61AA48EE900493976 / * jsb_cocos2dx_3d_manual . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = jsb_cocos2dx_3d_manual . h ; sourceTree = " < group > " ; } ; <nl> + 42AD256A1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = jsb_cocos2dx_experimental . cpp ; sourceTree = " < group > " ; } ; <nl> + 42AD256B1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . h ; path = jsb_cocos2dx_experimental . hpp ; sourceTree = " < group > " ; } ; <nl> + 42AD25711AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; name = jsb_cocos2dx_experimental_manual . cpp ; path = experimental / jsb_cocos2dx_experimental_manual . cpp ; sourceTree = " < group > " ; } ; <nl> + 42AD25721AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; name = jsb_cocos2dx_experimental_manual . h ; path = experimental / jsb_cocos2dx_experimental_manual . h ; sourceTree = " < group > " ; } ; <nl> 83A5661718DA878400FC31A0 / * jsb_socketio . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = jsb_socketio . cpp ; sourceTree = " < group > " ; } ; <nl> 83A5661818DA878400FC31A0 / * jsb_socketio . h * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . c . h ; path = jsb_socketio . h ; sourceTree = " < group > " ; } ; <nl> BA4095C01A6F730A005E53F6 / * jsb_cocos2dx_studio_conversions . cpp * / = { isa = PBXFileReference ; fileEncoding = 4 ; lastKnownFileType = sourcecode . cpp . cpp ; path = jsb_cocos2dx_studio_conversions . cpp ; sourceTree = " < group > " ; } ; <nl> <nl> 1A119E2E18BDF19200352BAA / * auto * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> + 42AD256A1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp * / , <nl> + 42AD256B1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp * / , <nl> 420BBCEE1AA48EDE00493976 / * jsb_cocos2dx_3d_auto . cpp * / , <nl> 420BBCEF1AA48EDE00493976 / * jsb_cocos2dx_3d_auto . hpp * / , <nl> BAEE4D6F1AC3FFAD003BEB0F / * jsb_cocos2dx_3d_extension_auto . cpp * / , <nl> <nl> 1A119E4118BDF19200352BAA / * manual * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> + 42AD25701AFF9DFC000176E2 / * experimental * / , <nl> 420BBCF41AA48EE900493976 / * 3d * / , <nl> 0541A74C1973876100E45470 / * ios * / , <nl> 1A119E4218BDF19200352BAA / * chipmunk * / , <nl> <nl> path = 3d ; <nl> sourceTree = " < group > " ; <nl> } ; <nl> + 42AD25701AFF9DFC000176E2 / * experimental * / = { <nl> + isa = PBXGroup ; <nl> + children = ( <nl> + 42AD25711AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp * / , <nl> + 42AD25721AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h * / , <nl> + ) ; <nl> + name = experimental ; <nl> + sourceTree = " < group > " ; <nl> + } ; <nl> BA623DFB191A192700761F37 / * pluginx * / = { <nl> isa = PBXGroup ; <nl> children = ( <nl> <nl> 1A119EEF18BDF19200352BAA / * js_bindings_system_functions . h in Headers * / , <nl> 1A119EDD18BDF19200352BAA / * js_manual_conversions . h in Headers * / , <nl> 1A119EFD18BDF19200352BAA / * XMLHTTPRequest . h in Headers * / , <nl> + 42AD25751AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h in Headers * / , <nl> 1AB5E63518D05BF30088DAA4 / * jsb_cocos2dx_ui_manual . h in Headers * / , <nl> 1A119EE318BDF19200352BAA / * jsb_opengl_functions . h in Headers * / , <nl> + 42AD256E1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp in Headers * / , <nl> 0541A74F1973876100E45470 / * JavaScriptObjCBridge . h in Headers * / , <nl> 1AB5E62D18D05BC80088DAA4 / * jsb_cocos2dx_ui_auto . hpp in Headers * / , <nl> 420BBCF91AA48EE900493976 / * jsb_cocos2dx_3d_manual . h in Headers * / , <nl> <nl> 1A119EB618BDF19200352BAA / * js_bindings_chipmunk_manual . h in Headers * / , <nl> 1A119EFA18BDF19200352BAA / * jsb_websocket . h in Headers * / , <nl> 1A119E9218BDF19200352BAA / * jsb_cocos2dx_extension_auto . hpp in Headers * / , <nl> + 42AD25761AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . h in Headers * / , <nl> 1A119E8C18BDF19200352BAA / * jsb_cocos2dx_builder_auto . hpp in Headers * / , <nl> BA623E19191A196F00761F37 / * jsb_cocos2dx_pluginx_auto . hpp in Headers * / , <nl> 1A119EAA18BDF19200352BAA / * js_bindings_chipmunk_auto_classes . h in Headers * / , <nl> <nl> 1A119ECC18BDF19200352BAA / * jsb_cocos2dx_extension_manual . h in Headers * / , <nl> 1A119EF018BDF19200352BAA / * js_bindings_system_functions . h in Headers * / , <nl> 1A119EDE18BDF19200352BAA / * js_manual_conversions . h in Headers * / , <nl> + 42AD256F1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . hpp in Headers * / , <nl> 1A119EFE18BDF19200352BAA / * XMLHTTPRequest . h in Headers * / , <nl> 1AB5E63618D05BF30088DAA4 / * jsb_cocos2dx_ui_manual . h in Headers * / , <nl> 1A119EE418BDF19200352BAA / * jsb_opengl_functions . h in Headers * / , <nl> <nl> 0541A7501973876100E45470 / * JavaScriptObjCBridge . mm in Sources * / , <nl> 1A119EB318BDF19200352BAA / * js_bindings_chipmunk_manual . cpp in Sources * / , <nl> 1A119EE918BDF19200352BAA / * jsb_opengl_registration . cpp in Sources * / , <nl> + 42AD25731AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp in Sources * / , <nl> 1A119EF718BDF19200352BAA / * jsb_websocket . cpp in Sources * / , <nl> 1A119ED318BDF19200352BAA / * js_bindings_core . cpp in Sources * / , <nl> 1A119EC118BDF19200352BAA / * js_bindings_ccbreader . cpp in Sources * / , <nl> 1A119EFB18BDF19200352BAA / * XMLHTTPRequest . cpp in Sources * / , <nl> BAEE4D711AC3FFAD003BEB0F / * jsb_cocos2dx_3d_extension_auto . cpp in Sources * / , <nl> + 42AD256C1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp in Sources * / , <nl> 1A119EC518BDF19200352BAA / * jsb_cocos2dx_studio_manual . cpp in Sources * / , <nl> 1A119E8F18BDF19200352BAA / * jsb_cocos2dx_extension_auto . cpp in Sources * / , <nl> 1A119ED718BDF19200352BAA / * js_bindings_opengl . cpp in Sources * / , <nl> <nl> 1AB5E62C18D05BC80088DAA4 / * jsb_cocos2dx_ui_auto . cpp in Sources * / , <nl> 1A119E9C18BDF19200352BAA / * jsb_cocos2dx_spine_auto . cpp in Sources * / , <nl> 1A119EF418BDF19200352BAA / * js_bindings_system_registration . cpp in Sources * / , <nl> + 42AD256D1AFF9D1A000176E2 / * jsb_cocos2dx_experimental . cpp in Sources * / , <nl> 1A119EBC18BDF19200352BAA / * cocos2d_specifics . cpp in Sources * / , <nl> 1A119EE618BDF19200352BAA / * jsb_opengl_manual . cpp in Sources * / , <nl> 1A119F0618BDF19200352BAA / * jsb_cocos2dx_spine_manual . cpp in Sources * / , <nl> BA623E0F191A195F00761F37 / * jsb_pluginx_manual_protocols . cpp in Sources * / , <nl> + 42AD25741AFF9E17000176E2 / * jsb_cocos2dx_experimental_manual . cpp in Sources * / , <nl> BA623E13191A195F00761F37 / * pluginxUTF8 . cpp in Sources * / , <nl> BAEE4D721AC3FFAD003BEB0F / * jsb_cocos2dx_3d_extension_auto . cpp in Sources * / , <nl> 1A119EB818BDF19200352BAA / * js_bindings_chipmunk_registration . cpp in Sources * / , <nl> mmm a / tests / js - tests / project . json <nl> ppp b / tests / js - tests / project . json <nl> <nl> " src / ChipmunkTest / ChipmunkTest . js " , <nl> <nl> " src / Presentation / Presentation . js " , <nl> - " src / ReflectionTest / ReflectionTest . js " <nl> + " src / ReflectionTest / ReflectionTest . js " , <nl> + <nl> + " src / SpritePolygonTest / SpritePolygonTest . js " <nl> ] <nl> } <nl> mmm a / tests / js - tests / project / Classes / AppDelegate . cpp <nl> ppp b / tests / js - tests / project / Classes / AppDelegate . cpp <nl> <nl> # include " jsb_cocos2dx_spine_auto . hpp " <nl> # include " jsb_cocos2dx_3d_auto . hpp " <nl> # include " jsb_cocos2dx_3d_extension_auto . hpp " <nl> + # include " jsb_cocos2dx_experimental . hpp " <nl> + # include " experimental / jsb_cocos2dx_experimental_manual . h " <nl> # include " 3d / jsb_cocos2dx_3d_manual . h " <nl> # include " extension / jsb_cocos2dx_extension_manual . h " <nl> # include " cocostudio / jsb_cocos2dx_studio_manual . h " <nl> bool AppDelegate : : applicationDidFinishLaunching ( ) <nl> <nl> sc - > addRegisterCallback ( register_all_cocos2dx_3d_extension ) ; <nl> <nl> + sc - > addRegisterCallback ( register_all_cocos2dx_experimental ) ; <nl> + sc - > addRegisterCallback ( register_all_cocos2dx_experimental_manual ) ; <nl> + <nl> # if ( CC_TARGET_PLATFORM = = CC_PLATFORM_ANDROID ) <nl> sc - > addRegisterCallback ( JavascriptJavaBridge : : _js_register ) ; <nl> # elif ( CC_TARGET_PLATFORM = = CC_PLATFORM_IOS | | CC_TARGET_PLATFORM = = CC_PLATFORM_MAC ) <nl> new file mode 100644 <nl> index 000000000000 . . 611019eacba4 <nl> mmm / dev / null <nl> ppp b / tests / js - tests / src / SpritePolygonTest / SpritePolygonTest . js <nl> <nl> + / * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * <nl> + Copyright ( c ) 2008 - 2010 Ricardo Quesada <nl> + Copyright ( c ) 2011 - 2012 cocos2d - x . org <nl> + Copyright ( c ) 2013 - 2014 Chukong Technologies Inc . <nl> + <nl> + http : / / www . cocos2d - x . org <nl> + <nl> + Permission is hereby granted , free of charge , to any person obtaining a copy <nl> + of this software and associated documentation files ( the " Software " ) , to deal <nl> + in the Software without restriction , including without limitation the rights <nl> + to use , copy , modify , merge , publish , distribute , sublicense , and / or sell <nl> + copies of the Software , and to permit persons to whom the Software is <nl> + furnished to do so , subject to the following conditions : <nl> + <nl> + The above copyright notice and this permission notice shall be included in <nl> + all copies or substantial portions of the Software . <nl> + <nl> + THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR <nl> + IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , <nl> + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE <nl> + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER <nl> + LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , <nl> + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN <nl> + THE SOFTWARE . <nl> + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * / <nl> + var SpritePolygonTestIdx = - 1 ; <nl> + <nl> + var SpritePolygonTestDemo = BaseTestLayer . extend ( { <nl> + _title : " " , <nl> + _subtitle : " " , <nl> + _debugDraw : null , <nl> + <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + } , <nl> + <nl> + initDefaultSprite : function ( filename , inst ) { <nl> + cc . director . setClearColor ( cc . color ( 102 / 255 , 184 / 255 , 204 / 255 , 255 / 255 ) ) ; <nl> + this . addChild ( inst ) ; <nl> + <nl> + var s = cc . director . getWinSize ( ) ; <nl> + inst . setPosition ( s . width / 2 + 0 . 15 * s . width , s . height / 2 ) ; <nl> + <nl> + var sp = new cc . Sprite ( filename ) ; <nl> + this . addChild ( sp ) ; <nl> + sp . setPosition ( s . width / 2 - 0 . 15 * s . width , s . height / 2 ) ; <nl> + <nl> + this . _debugDraw = new cc . DrawNode ( ) ; <nl> + sp . addChild ( this . _debugDraw ) ; <nl> + <nl> + var self = this ; <nl> + cc . eventManager . addListener ( { <nl> + event : cc . EventListener . TOUCH_ONE_BY_ONE , <nl> + onTouchBegan : function ( ) { <nl> + inst . showDebug ( true ) ; <nl> + self . _debugDraw . setVisible ( true ) ; <nl> + return true ; <nl> + } , <nl> + onTouchEnded : function ( ) { <nl> + inst . showDebug ( false ) ; <nl> + self . _debugDraw . setVisible ( false ) ; <nl> + } <nl> + } , this ) ; <nl> + <nl> + var positions = new Array ( 4 ) ; <nl> + var spSize = sp . getContentSize ( ) ; <nl> + positions [ 0 ] = cc . p ( 0 , spSize . height ) ; <nl> + positions [ 1 ] = cc . p ( spSize . width , spSize . height ) ; <nl> + positions [ 2 ] = cc . p ( spSize . width , 0 ) ; <nl> + positions [ 3 ] = cc . p ( 0 , 0 ) ; <nl> + <nl> + this . _debugDraw . drawSegment ( positions [ 0 ] , positions [ 1 ] , 1 , cc . color . GREEN ) ; <nl> + this . _debugDraw . drawSegment ( positions [ 1 ] , positions [ 2 ] , 1 , cc . color . GREEN ) ; <nl> + this . _debugDraw . drawSegment ( positions [ 2 ] , positions [ 3 ] , 1 , cc . color . GREEN ) ; <nl> + this . _debugDraw . drawSegment ( positions [ 3 ] , positions [ 0 ] , 1 , cc . color . GREEN ) ; <nl> + this . _debugDraw . drawSegment ( positions [ 0 ] , positions [ 2 ] , 1 , cc . color . GREEN ) ; <nl> + <nl> + this . _debugDraw . setVisible ( false ) ; <nl> + <nl> + var label1 = new cc . LabelTTF ( " Sprite : \ nPixels drawn : " + spSize . width * spSize . height , " fonts / arial . ttf " , 10 ) ; <nl> + sp . addChild ( label1 ) ; <nl> + label1 . setAnchorPoint ( cc . p ( 0 , 1 ) ) ; <nl> + <nl> + var label2 = new cc . LabelTTF ( " SpritePolygon : \ nPixels drawn : " + ( inst . getArea ( ) + inst . getVertCount ( ) ) , " fonts / arial . ttf " , 10 ) ; <nl> + inst . addChild ( label2 ) ; <nl> + label2 . setAnchorPoint ( cc . p ( 0 , 1 ) ) ; <nl> + <nl> + } , <nl> + <nl> + onRestartCallback : function ( sender ) { <nl> + var s = new SpritePolygonTestScene ( ) ; <nl> + s . addChild ( restartSpritePolygonTest ( ) ) ; <nl> + director . runScene ( s ) ; <nl> + } , <nl> + <nl> + onNextCallback : function ( sender ) { <nl> + var s = new SpritePolygonTestScene ( ) ; <nl> + s . addChild ( nextSpritePolygonTest ( ) ) ; <nl> + director . runScene ( s ) ; <nl> + } , <nl> + <nl> + onBackCallback : function ( sender ) { <nl> + var s = new SpritePolygonTestScene ( ) ; <nl> + s . addChild ( previousSpritePolygonTest ( ) ) ; <nl> + director . runScene ( s ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + var SpritePolygonTestScene = cc . Scene . extend ( { <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + <nl> + var label = new cc . LabelTTF ( " Main Menu " , " Arial " , 20 ) ; <nl> + var menuItem = new cc . MenuItemLabel ( label , this . onMainMenuCallback , this ) ; <nl> + <nl> + var menu = new cc . Menu ( menuItem ) ; <nl> + menu . x = 0 ; <nl> + menu . y = 0 ; <nl> + menuItem . x = winSize . width - 50 ; <nl> + menuItem . y = 25 ; <nl> + this . addChild ( menu , 99 ) ; <nl> + } , <nl> + onMainMenuCallback : function ( ) { <nl> + var scene = new cc . Scene ( ) ; <nl> + var layer = new TestController ( ) ; <nl> + scene . addChild ( layer ) ; <nl> + director . runScene ( scene ) ; <nl> + } , <nl> + runThisTest : function ( num ) { <nl> + SpritePolygonTestIdx = ( num | | num = = 0 ) ? ( num - 1 ) : - 1 ; <nl> + var layer = nextSpritePolygonTest ( ) ; <nl> + this . addChild ( layer ) ; <nl> + <nl> + director . runScene ( this ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + var SpritePolygonTest1 = SpritePolygonTestDemo . extend ( { <nl> + _title : " SpritePolygon Creation " , <nl> + _subtitle : " SpritePolygon : : create ( \ " Images / grossini . png \ " ) " , <nl> + <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + <nl> + var s = ccexp . SpritePolygon . create ( s_pathGrossini ) ; <nl> + this . initDefaultSprite ( s_pathGrossini , s ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + var SpritePolygonTest2 = SpritePolygonTestDemo . extend ( { <nl> + _title : " SpritePolygon Creation " , <nl> + _subtitle : " SpritePolygon : : create ( \ " Images / grossini . png \ " , verts ) " , <nl> + <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + <nl> + var verts = [ ] ; <nl> + verts . push ( cc . p ( 36 . 5 , 242 . 0 - 128 . 5 ) ) ; <nl> + verts . push ( cc . p ( 27 . 5 , 242 . 0 - 133 . 5 ) ) ; <nl> + verts . push ( cc . p ( 24 . 5 , 242 . 0 - 145 . 5 ) ) ; <nl> + verts . push ( cc . p ( 26 . 5 , 242 . 0 - 161 . 5 ) ) ; <nl> + verts . push ( cc . p ( 33 . 5 , 242 . 0 - 168 . 5 ) ) ; <nl> + verts . push ( cc . p ( 27 . 5 , 242 . 0 - 168 . 5 ) ) ; <nl> + verts . push ( cc . p ( 16 . 5 , 242 . 0 - 179 . 5 ) ) ; <nl> + verts . push ( cc . p ( 30 . 5 , 242 . 0 - 197 . 5 ) ) ; <nl> + verts . push ( cc . p ( 28 . 5 , 242 . 0 - 237 . 5 ) ) ; <nl> + verts . push ( cc . p ( 56 . 5 , 242 . 0 - 237 . 5 ) ) ; <nl> + verts . push ( cc . p ( 54 . 5 , 242 . 0 - 197 . 5 ) ) ; <nl> + verts . push ( cc . p ( 68 . 5 , 242 . 0 - 184 . 5 ) ) ; <nl> + verts . push ( cc . p ( 57 . 5 , 242 . 0 - 168 . 5 ) ) ; <nl> + verts . push ( cc . p ( 51 . 5 , 242 . 0 - 168 . 5 ) ) ; <nl> + verts . push ( cc . p ( 60 . 5 , 242 . 0 - 154 . 5 ) ) ; <nl> + verts . push ( cc . p ( 57 . 5 , 242 . 0 - 133 . 5 ) ) ; <nl> + verts . push ( cc . p ( 48 . 5 , 242 . 0 - 127 . 5 ) ) ; <nl> + verts . push ( cc . p ( 36 . 5 , 242 . 0 - 127 . 5 ) ) ; <nl> + <nl> + cc . SpritePolygonCache . getInstance ( ) . removeAllSpritePolygonCache ( ) ; <nl> + var s = ccexp . SpritePolygon . create ( s_pathGrossini , verts ) ; <nl> + this . initDefaultSprite ( s_pathGrossini , s ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + var SpritePolygonTest3 = SpritePolygonTestDemo . extend ( { <nl> + _title : " SpritePolygon Creation " , <nl> + _subtitle : " SpritePolygon : : create ( \ " Images / grossini . png \ " , verts , indices ) " , <nl> + <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + <nl> + var verts = [ ] ; <nl> + verts . push ( cc . p ( 33 . 500000 , 73 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 27 . 500000 , 73 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 16 . 500000 , 62 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 30 . 500000 , 44 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 54 . 500000 , 44 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 51 . 500000 , 73 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 60 . 500000 , 87 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 26 . 500000 , 80 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 24 . 500000 , 96 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 57 . 500000 , 108 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 36 . 500000 , 113 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 48 . 500000 , 114 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 36 . 500000 , 114 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 27 . 500000 , 108 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 68 . 500000 , 57 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 57 . 500000 , 73 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 56 . 500000 , 4 . 500000 ) ) ; <nl> + verts . push ( cc . p ( 28 . 500000 , 4 . 500000 ) ) ; <nl> + <nl> + var indices = [ 0 , 1 , 2 , 3 , 0 , 2 , 4 , 0 , 3 , 5 , 0 , 4 , 5 , 6 , 0 , 0 , 6 , 7 , 8 , 7 , 6 , 6 , 9 , 8 , 9 , 10 , 8 , 9 , 11 , 10 , 11 , 12 , 10 , 8 , 10 , 13 , 14 , 5 , 4 , 15 , 5 , 14 , 4 , 3 , 16 , 3 , 17 , 16 ] ; <nl> + <nl> + cc . SpritePolygonCache . getInstance ( ) . removeAllSpritePolygonCache ( ) ; <nl> + var s = ccexp . SpritePolygon . create ( s_pathGrossini , verts , indices ) ; <nl> + this . initDefaultSprite ( s_pathGrossini , s ) ; <nl> + } <nl> + } ) ; <nl> + <nl> + var SpritePolygonTest4 = SpritePolygonTestDemo . extend ( { <nl> + _title : " SpritePolygon Creation " , <nl> + _subtitle : " SpritePolygon : : create ( \ " Images / grossini . png \ " , \ n \ tvector < V3F_C4B_T2F > v , vector < unsigned short > indices ) " , <nl> + <nl> + ctor : function ( ) { <nl> + this . _super ( ) ; <nl> + <nl> + var vec3 = [ ] ; <nl> + vec3 . push ( cc . math . vec3 ( 33 . 500000 , 73 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 27 . 500000 , 73 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 16 . 500000 , 62 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 30 . 500000 , 44 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 54 . 500000 , 44 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 51 . 500000 , 73 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 60 . 500000 , 87 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 26 . 500000 , 80 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 24 . 500000 , 96 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 57 . 500000 , 108 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 36 . 500000 , 113 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 48 . 500000 , 114 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 36 . 500000 , 114 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 27 . 500000 , 108 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 68 . 500000 , 57 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 57 . 500000 , 73 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 56 . 500000 , 4 . 500000 , 0 ) ) ; <nl> + vec3 . push ( cc . math . vec3 ( 28 . 500000 , 4 . 50000 , 0 ) ) ; <nl> + <nl> + var t2f = [ ] ; <nl> + t2f . push ( cc . p ( 0 . 394118 , 0 . 392562 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 323529 , 0 . 392562 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 194118 , 0 . 483471 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 358824 , 0 . 632231 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 641176 , 0 . 632231 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 605882 , 0 . 392562 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 711765 , 0 . 276859 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 311765 , 0 . 334711 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 288235 , 0 . 202479 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 676471 , 0 . 103306 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 429412 , 0 . 061983 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 570588 , 0 . 053719 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 429412 , 0 . 053719 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 323529 , 0 . 103306 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 805882 , 0 . 524793 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 676471 , 0 . 392562 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 664706 , 0 . 962810 ) ) ; <nl> + t2f . push ( cc . p ( 0 . 335294 , 0 . 962810 ) ) ; <nl> + <nl> + var verts = [ ] ; <nl> + for ( var i = 0 ; i < 18 ; + + i ) <nl> + { <nl> + var t = { <nl> + v3f : vec3 [ i ] , <nl> + c4b : cc . color . WHITE , <nl> + t2f : t2f [ i ] <nl> + } ; <nl> + verts . push ( t ) ; <nl> + } <nl> + var indices = [ 0 , 1 , 2 , 3 , 0 , 2 , 4 , 0 , 3 , 5 , 0 , 4 , 5 , 6 , 0 , 0 , 6 , 7 , 8 , 7 , 6 , 6 , 9 , 8 , 9 , 10 , 8 , 9 , 11 , 10 , 11 , 12 , 10 , 8 , 10 , 13 , 14 , 5 , 4 , 15 , 5 , 14 , 4 , 3 , 16 , 3 , 17 , 16 ] ; <nl> + <nl> + cc . SpritePolygonCache . getInstance ( ) . removeAllSpritePolygonCache ( ) ; <nl> + var s = ccexp . SpritePolygon . create ( s_pathGrossini , verts , indices ) ; <nl> + this . initDefaultSprite ( s_pathGrossini , s ) ; <nl> + } <nl> + <nl> + } ) <nl> + / / <nl> + / / Flow control <nl> + / / <nl> + var arrayOfSpritePolygonTest = [ <nl> + SpritePolygonTest1 , <nl> + SpritePolygonTest2 , <nl> + SpritePolygonTest3 , <nl> + SpritePolygonTest4 <nl> + ] ; <nl> + <nl> + var nextSpritePolygonTest = function ( ) { <nl> + SpritePolygonTestIdx + + ; <nl> + SpritePolygonTestIdx = SpritePolygonTestIdx % arrayOfSpritePolygonTest . length ; <nl> + <nl> + if ( window . sideIndexBar ) { <nl> + SpritePolygonTestIdx = window . sideIndexBar . changeTest ( SpritePolygonTestIdx , 36 ) ; <nl> + } <nl> + <nl> + return new arrayOfSpritePolygonTest [ SpritePolygonTestIdx ] ( ) ; <nl> + } ; <nl> + var previousSpritePolygonTest = function ( ) { <nl> + SpritePolygonTestIdx - - ; <nl> + if ( SpritePolygonTestIdx < 0 ) <nl> + SpritePolygonTestIdx + = arrayOfSpritePolygonTest . length ; <nl> + <nl> + if ( window . sideIndexBar ) { <nl> + SpritePolygonTestIdx = window . sideIndexBar . changeTest ( SpritePolygonTestIdx , 36 ) ; <nl> + } <nl> + <nl> + return new arrayOfSpritePolygonTest [ SpritePolygonTestIdx ] ( ) ; <nl> + } ; <nl> + var restartSpritePolygonTest = function ( ) { <nl> + return new arrayOfSpritePolygonTest [ SpritePolygonTestIdx ] ( ) ; <nl> + } ; <nl> mmm a / tests / js - tests / src / tests - main . js <nl> ppp b / tests / js - tests / src / tests - main . js <nl> var testNames = [ <nl> return new Sprite3DTestScene ( ) ; <nl> } <nl> } , <nl> + { <nl> + title : " SpritePolygon Test " , <nl> + platforms : PLATFORM_JSB , <nl> + linksrc : " src / SpritePolygonTest / SpritePolygonTest . js " , <nl> + testScene : function ( ) { <nl> + return new SpritePolygonTestScene ( ) ; <nl> + } <nl> + } , <nl> { <nl> title : " Sprite Test " , <nl> resource : g_sprites , <nl> var testNames = [ <nl> <nl> / / " UserDefaultTest " , <nl> / / " ZwoptexTest " , <nl> - ] ; <nl> \ No newline at end of file <nl> + ] ; <nl> mmm a / tools / tojs / cocos2dx . ini <nl> ppp b / tools / tojs / cocos2dx . ini <nl> cxxgenerator_headers = <nl> extra_arguments = % ( android_headers ) s % ( clang_headers ) s % ( cxxgenerator_headers ) s % ( cocos_headers ) s % ( android_flags ) s % ( clang_flags ) s % ( cocos_flags ) s % ( extra_flags ) s <nl> <nl> # what headers to parse <nl> - headers = % ( cocosdir ) s / cocos / cocos2d . h % ( cocosdir ) s / cocos / audio / include / SimpleAudioEngine . h % ( cocosdir ) s / cocos / 2d / CCProtectedNode . h % ( cocosdir ) s / cocos / base / CCAsyncTaskPool . h <nl> + headers = % ( cocosdir ) s / cocos / cocos2d . h % ( cocosdir ) s / cocos / audio / include / SimpleAudioEngine . h % ( cocosdir ) s / cocos / 2d / CCProtectedNode . h % ( cocosdir ) s / cocos / base / CCAsyncTaskPool . h % ( cocosdir ) s / cocos / 2d / SpritePolygonCache . h <nl> <nl> # what classes to produce code for . You can use regular expressions here . When testing the regular <nl> # expression , it will be enclosed in " ^ $ " , like this : " ^ Menu * $ " . <nl> <nl> - classes = New . * Sprite SpriteBatchNode SpriteFrame SpriteFrameCache Scene Node . * Director Layer . * Menu . * Touch . * Action . * Move . * Rotate . * Blink . * Tint . * Sequence Repeat . * Fade . * Ease . * Scale . * Transition . * Spawn ReverseTime Animate AnimationFrame Animation AnimationCache Flip . * Delay . * Skew . * Jump . * Place . * Show . * Progress . * PointArray ToggleVisibility . * RemoveSelf Hide Particle . * Label . * Atlas . * TextureCache . * Texture2D Cardinal . * CatmullRom . * ParallaxNode TileMap . * TMX . * CallFunc CallFuncN RenderTexture GridAction Grid3DAction Grid3D TiledGrid3D GridBase $ . + Grid Shaky3D Waves3D FlipX3D FlipY3D Lens3D Ripple3D PageTurn3D ShakyTiles3D ShatteredTiles3D WavesTiles3D JumpTiles3D Speed ActionManager Set SimpleAudioEngine Scheduler Orbit . * Follow . * Bezier . * CardinalSpline . * Camera . * DrawNode Liquid $ Waves $ ShuffleTiles $ TurnOffTiles $ Split . * Twirl $ FileUtils $ GLProgram GLProgramCache Application ClippingNode MotionStreak TextFieldTTF GLViewProtocol GLView Component ComponentContainer __NodeRGBA __LayerRGBA SAXParser Event ( ? ! . * ( Physics ) . * ) . * Device Configuration ProtectedNode GLProgramState Image . * Light $ AsyncTaskPool <nl> + classes = New . * Sprite SpriteBatchNode SpriteFrame SpriteFrameCache Scene Node . * Director Layer . * Menu . * Touch . * Action . * Move . * Rotate . * Blink . * Tint . * Sequence Repeat . * Fade . * Ease . * Scale . * Transition . * Spawn ReverseTime Animate AnimationFrame Animation AnimationCache Flip . * Delay . * Skew . * Jump . * Place . * Show . * Progress . * PointArray ToggleVisibility . * RemoveSelf Hide Particle . * Label . * Atlas . * TextureCache . * Texture2D Cardinal . * CatmullRom . * ParallaxNode TileMap . * TMX . * CallFunc CallFuncN RenderTexture GridAction Grid3DAction Grid3D TiledGrid3D GridBase $ . + Grid Shaky3D Waves3D FlipX3D FlipY3D Lens3D Ripple3D PageTurn3D ShakyTiles3D ShatteredTiles3D WavesTiles3D JumpTiles3D Speed ActionManager Set SimpleAudioEngine Scheduler Orbit . * Follow . * Bezier . * CardinalSpline . * Camera . * DrawNode Liquid $ Waves $ ShuffleTiles $ TurnOffTiles $ Split . * Twirl $ FileUtils $ GLProgram GLProgramCache Application ClippingNode MotionStreak TextFieldTTF GLViewProtocol GLView Component ComponentContainer __NodeRGBA __LayerRGBA SAXParser Event ( ? ! . * ( Physics ) . * ) . * Device Configuration ProtectedNode GLProgramState Image . * Light $ AsyncTaskPool SpritePolygonCache <nl> <nl> classes_need_extend = Node __NodeRGBA Layer . * Sprite SpriteBatchNode SpriteFrame Menu MenuItem . * Scene DrawNode Component . * Action . * GridBase Grid3D TiledGrid3D MotionStreak ParticleBatchNode ParticleSystem TextFieldTTF RenderTexture TileMapAtlas TMXLayer TMXTiledMap TMXMapInfo TransitionScene ProgressTimer ParallaxNode Label . * GLProgram <nl> <nl> skip = Node : : [ ^ setPosition $ setGLServerState description getUserObject . * UserDat <nl> Component : : [ serialize ] , <nl> EventListenerCustom : : [ init ] , <nl> EventListener : : [ init ] , <nl> - Scene : : [ getCameras getLights initWithPhysics createWithPhysics getPhysicsWorld ] , <nl> + Scene : : [ getCameras getLights initWithPhysics createWithPhysics getPhysicsWorld getPhysics3DWorld ] , <nl> Animate3D : : [ * ] , <nl> Sprite3D : : [ * ] , <nl> AttachNode : : [ * ] , <nl> skip = Node : : [ ^ setPosition $ setGLServerState description getUserObject . * UserDat <nl> Device : : [ getTextureDataForText ] , <nl> BillBoard : : [ * ] , <nl> Camera : : [ unproject isVisibleInFrustum ] , <nl> - ClippingNode : : [ init ] <nl> + ClippingNode : : [ init ] , <nl> + SpritePolygonCache : : [ addSpritePolygonCache getSpritePolygonCache ] <nl> <nl> rename_functions = SpriteFrameCache : : [ addSpriteFramesWithFile = addSpriteFrames getSpriteFrameByName = getSpriteFrame ] , <nl> MenuItemFont : : [ setFontNameObj = setFontName setFontSizeObj = setFontSize getFontSizeObj = getFontSize getFontNameObj = getFontName ] , <nl> base_classes_to_skip = Ref Clonable <nl> # classes that create no constructor <nl> # Set is special and we will use a hand - written constructor <nl> <nl> - abstract_classes = Action FiniteTimeAction ActionInterval ActionEase EaseRateAction EaseElastic EaseBounce ActionInstant GridAction Grid3DAction TiledGrid3DAction Director SpriteFrameCache TransitionEaseScene Set SimpleAudioEngine FileUtils Application GLViewProtocol GLView ComponentContainer SAXParser Configuration EventListener BaseLight AsyncTaskPool <nl> + abstract_classes = Action FiniteTimeAction ActionInterval ActionEase EaseRateAction EaseElastic EaseBounce ActionInstant GridAction Grid3DAction TiledGrid3DAction Director SpriteFrameCache TransitionEaseScene Set SimpleAudioEngine FileUtils Application GLViewProtocol GLView ComponentContainer SAXParser Configuration EventListener BaseLight AsyncTaskPool SpritePolygonCache <nl> <nl> # Determining whether to use script object ( js object ) to control the lifecycle of native ( cpp ) object or the other way around . Supported values are ' yes ' or ' no ' . <nl> script_control_cpp = no <nl> mmm a / tools / tojs / cocos2dx_3d . ini <nl> ppp b / tools / tojs / cocos2dx_3d . ini <nl> skip = Skeleton3D : : [ create ] , <nl> Sprite3DCache : : [ addSprite3DData getSpriteData ] , <nl> Animation3D : : [ getBoneCurves ] , <nl> TextureCube : : [ setTexParameters ] , <nl> - Terrain : : [ getAABB getQuadTree create ] <nl> + Terrain : : [ getAABB getQuadTree create getHeightData ] <nl> <nl> <nl> rename_functions = <nl> new file mode 100644 <nl> index 000000000000 . . 4ae61bbee7ab <nl> mmm / dev / null <nl> ppp b / tools / tojs / cocos2dx_experimental . ini <nl> <nl> + [ cocos2dx_experimental ] <nl> + # the prefix to be added to the generated functions . You might or might not use this in your own <nl> + # templates <nl> + prefix = cocos2dx_experimental <nl> + <nl> + # create a target namespace ( in javascript , this would create some code like the equiv . to ` ns = ns | | { } ` ) <nl> + # all classes will be embedded in that namespace <nl> + target_namespace = ccexp <nl> + <nl> + android_headers = - I % ( androidndkdir ) s / platforms / android - 14 / arch - arm / usr / include - I % ( androidndkdir ) s / sources / cxx - stl / gnu - libstdc + + / 4 . 7 / libs / armeabi - v7a / include - I % ( androidndkdir ) s / sources / cxx - stl / gnu - libstdc + + / 4 . 7 / include - I % ( androidndkdir ) s / sources / cxx - stl / gnu - libstdc + + / 4 . 8 / libs / armeabi - v7a / include - I % ( androidndkdir ) s / sources / cxx - stl / gnu - libstdc + + / 4 . 8 / include <nl> + android_flags = - D_SIZE_T_DEFINED_ <nl> + <nl> + clang_headers = - I % ( clangllvmdir ) s / lib / clang / 3 . 3 / include <nl> + clang_flags = - nostdinc - x c + + - std = c + + 11 - U __SSE__ <nl> + <nl> + cocos_headers = - I % ( cocosdir ) s / cocos - I % ( cocosdir ) s / cocos / editor - support - I % ( cocosdir ) s / cocos / platform / android - I % ( cocosdir ) s / external <nl> + <nl> + cocos_flags = - DANDROID <nl> + <nl> + cxxgenerator_headers = <nl> + <nl> + # extra arguments for clang <nl> + extra_arguments = % ( android_headers ) s % ( clang_headers ) s % ( cxxgenerator_headers ) s % ( cocos_headers ) s % ( android_flags ) s % ( clang_flags ) s % ( cocos_flags ) s % ( extra_flags ) s <nl> + <nl> + # what headers to parse <nl> + headers = % ( cocosdir ) s / cocos / 2d / SpritePolygon . h <nl> + <nl> + # what classes to produce code for . You can use regular expressions here . When testing the regular <nl> + # expression , it will be enclosed in " ^ $ " , like this : " ^ Menu * $ " . <nl> + classes = SpritePolygon <nl> + <nl> + # what should we skip ? in the format ClassName : : [ function function ] <nl> + # ClassName is a regular expression , but will be used like this : " ^ ClassName $ " functions are also <nl> + # regular expressions , they will not be surrounded by " ^ $ " . If you want to skip a whole class , just <nl> + # add a single " * " as functions . See bellow for several examples . A special class name is " * " , which <nl> + # will apply to all class names . This is a convenience wildcard to be able to skip similar named <nl> + # functions from all classes . <nl> + <nl> + skip = SpritePolygon : : [ create initWithVerts initWithRect initWithPoly2tri initWithCache ] <nl> + <nl> + rename_functions = <nl> + <nl> + rename_classes = <nl> + <nl> + # for all class names , should we remove something when registering in the target VM ? <nl> + remove_prefix = <nl> + <nl> + # classes for which there will be no " parent " lookup <nl> + classes_have_no_parents = <nl> + <nl> + # base classes which will be skipped when their sub - classes found them . <nl> + base_classes_to_skip = <nl> + <nl> + # classes that create no constructor <nl> + # Set is special and we will use a hand - written constructor <nl> + abstract_classes = <nl> + <nl> + # Determining whether to use script object ( js object ) to control the lifecycle of native ( cpp ) object or the other way around . Supported values are ' yes ' or ' no ' . <nl> + script_control_cpp = no <nl> + <nl> mmm a / tools / tojs / genbindings . py <nl> ppp b / tools / tojs / genbindings . py <nl> def main ( ) : <nl> if platform = = ' win32 ' : <nl> config . set ( ' DEFAULT ' , ' extra_flags ' , ' - D__WCHAR_MAX__ = 0x7fffffff - U__MINGW32__ ' ) <nl> <nl> - conf_ini_file = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , ' userconf . ini ' ) ) <nl> + conf_ini_file = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , ' userconf . ini ' ) ) <nl> <nl> print ' generating userconf . ini . . . ' <nl> with open ( conf_ini_file , ' w ' ) as configfile : <nl> def main ( ) : <nl> ' cocos2dx_studio . ini ' : ( ' cocos2dx_studio ' , ' jsb_cocos2dx_studio_auto ' ) , \ <nl> ' cocos2dx_spine . ini ' : ( ' cocos2dx_spine ' , ' jsb_cocos2dx_spine_auto ' ) , \ <nl> ' cocos2dx_3d . ini ' : ( ' cocos2dx_3d ' , ' jsb_cocos2dx_3d_auto ' ) , \ <nl> - ' cocos2dx_3d_ext . ini ' : ( ' cocos2dx_3d_extension ' , ' jsb_cocos2dx_3d_extension_auto ' ) <nl> + ' cocos2dx_3d_ext . ini ' : ( ' cocos2dx_3d_extension ' , ' jsb_cocos2dx_3d_extension_auto ' ) , \ <nl> + ' cocos2dx_experimental . ini ' : ( ' cocos2dx_experimental ' , ' jsb_cocos2dx_experimental ' ) <nl> } <nl> target = ' spidermonkey ' <nl> generator_py = ' % s / generator . py ' % cxx_generator_root <nl> def main ( ) : <nl> with _pushd ( output_dir ) : <nl> _run_cmd ( ' dos2unix * ' ) <nl> <nl> - <nl> + <nl> custom_cmd_args = { } <nl> if len ( custom_cmd_args ) > 0 : <nl> output_dir = ' % s / frameworks / custom / auto ' % project_root <nl> def main ( ) : <nl> sys . exit ( 1 ) <nl> else : <nl> raise <nl> - <nl> + <nl> <nl> # mmmmmmmmmmmm - - main mmmmmmmmmmmm - - <nl> if __name__ = = ' __main__ ' : <nl> | add SpritePolygon binding & & js tests | cocos2d/cocos2d-x | 234f4db8a072fb48f013adabcd013dd6f1fb5320 | 2015-05-11T09:04:23Z |
mmm a / dbms / src / IO / HDFSCommon . cpp <nl> ppp b / dbms / src / IO / HDFSCommon . cpp <nl> HDFSBuilderPtr createHDFSBuilder ( const std : : string & uri_str ) <nl> const Poco : : URI uri ( uri_str ) ; <nl> auto & host = uri . getHost ( ) ; <nl> auto port = uri . getPort ( ) ; <nl> - auto & path = uri . getPath ( ) ; <nl> - if ( host . empty ( ) | | path . empty ( ) ) <nl> + const std : : string path = " / / " ; <nl> + if ( host . empty ( ) ) <nl> throw Exception ( " Illegal HDFS URI : " + uri . toString ( ) , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> <nl> HDFSBuilderPtr builder ( hdfsNewBuilder ( ) ) ; <nl> mmm a / dbms / src / IO / WriteBufferFromHDFS . cpp <nl> ppp b / dbms / src / IO / WriteBufferFromHDFS . cpp <nl> namespace ErrorCodes <nl> extern const int NETWORK_ERROR ; <nl> extern const int CANNOT_OPEN_FILE ; <nl> extern const int CANNOT_FSYNC ; <nl> + extern const int BAD_ARGUMENTS ; <nl> } <nl> <nl> <nl> struct WriteBufferFromHDFS : : WriteBufferFromHDFSImpl <nl> { <nl> const size_t begin_of_path = hdfs_uri . find ( ' / ' , hdfs_uri . find ( " / / " ) + 2 ) ; <nl> const std : : string path = hdfs_uri . substr ( begin_of_path ) ; <nl> - if ( path . find ( " * ? { " ) ! = std : : string : : npos ) <nl> + if ( path . find_first_of ( " * ? { " ) ! = std : : string : : npos ) <nl> throw Exception ( " URI ' " + hdfs_uri + " ' contains globs , so the table is in readonly mode " , ErrorCodes : : CANNOT_OPEN_FILE ) ; <nl> <nl> - fout = hdfsOpenFile ( fs . get ( ) , path . c_str ( ) , O_WRONLY , 0 , 0 , 0 ) ; <nl> + if ( ! hdfsExists ( fs . get ( ) , path . c_str ( ) ) ) <nl> + throw Exception ( " File : " + path + " is already exists " , ErrorCodes : : BAD_ARGUMENTS ) ; <nl> + fout = hdfsOpenFile ( fs . get ( ) , path . c_str ( ) , O_WRONLY , 0 , 0 , 0 ) ; / / / O_WRONLY meaning create or overwrite i . e . , implies O_TRUNCAT here <nl> <nl> if ( fout = = nullptr ) <nl> { <nl> mmm a / dbms / src / Storages / StorageFile . cpp <nl> ppp b / dbms / src / Storages / StorageFile . cpp <nl> class StorageFileBlockOutputStream : public IBlockOutputStream <nl> else <nl> { <nl> if ( storage . paths . size ( ) ! = 1 ) <nl> - throw Exception ( " Table ' " + storage . table_name + " ' is in readonly mode " , ErrorCodes : : DATABASE_ACCESS_DENIED ) ; <nl> + throw Exception ( " Table ' " + storage . table_name + " ' is in readonly mode because of globs in filepath " , ErrorCodes : : DATABASE_ACCESS_DENIED ) ; <nl> write_buf = std : : make_unique < WriteBufferFromFile > ( storage . paths [ 0 ] , DBMS_DEFAULT_BUFFER_SIZE , O_WRONLY | O_APPEND | O_CREAT ) ; <nl> } <nl> <nl> mmm a / dbms / tests / integration / test_storage_hdfs / test . py <nl> ppp b / dbms / tests / integration / test_storage_hdfs / test . py <nl> def started_cluster ( ) : <nl> <nl> def test_read_write_storage ( started_cluster ) : <nl> hdfs_api = HDFSApi ( " root " ) <nl> - hdfs_api . write_data ( " / simple_storage " , " 1 \ tMark \ t72 . 53 \ n " ) <nl> <nl> node1 . query ( " create table SimpleHDFSStorage ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / simple_storage ' , ' TSV ' ) " ) <nl> node1 . query ( " insert into SimpleHDFSStorage values ( 1 , ' Mark ' , 72 . 53 ) " ) <nl> def test_read_write_storage ( started_cluster ) : <nl> def test_read_write_storage_with_globs ( started_cluster ) : <nl> hdfs_api = HDFSApi ( " root " ) <nl> <nl> - for i in [ " 1 " , " 2 " , " 3 " ] : <nl> - hdfs_api . write_data ( " / storage " + i , i + " \ tMark \ t72 . 53 \ n " ) <nl> - assert hdfs_api . read_data ( " / storage " + i ) = = i + " \ tMark \ t72 . 53 \ n " <nl> - <nl> node1 . query ( " create table HDFSStorageWithRange ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / storage { 1 . . 5 } ' , ' TSV ' ) " ) <nl> node1 . query ( " create table HDFSStorageWithEnum ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / storage { 1 , 2 , 3 , 4 , 5 } ' , ' TSV ' ) " ) <nl> node1 . query ( " create table HDFSStorageWithQuestionMark ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / storage ? ' , ' TSV ' ) " ) <nl> node1 . query ( " create table HDFSStorageWithAsterisk ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / storage * ' , ' TSV ' ) " ) <nl> <nl> - assert node1 . query ( " select count ( * ) from HDFSStorageWithRange " ) = = ' 3 \ n ' <nl> - assert node1 . query ( " select count ( * ) from HDFSStorageWithEnum " ) = = ' 3 \ n ' <nl> - assert node1 . query ( " select count ( * ) from HDFSStorageWithQuestionMark " ) = = ' 3 \ n ' <nl> - assert node1 . query ( " select count ( * ) from HDFSStorageWithAsterisk " ) = = ' 3 \ n ' <nl> + for i in [ " 1 " , " 2 " , " 3 " ] : <nl> + hdfs_api . write_data ( " / storage " + i , i + " \ tMark \ t72 . 53 \ n " ) <nl> + assert hdfs_api . read_data ( " / storage " + i ) = = i + " \ tMark \ t72 . 53 \ n " <nl> + <nl> + assert node1 . query ( " select count ( * ) from HDFSStorageWithRange " ) = = " 3 \ n " <nl> + assert node1 . query ( " select count ( * ) from HDFSStorageWithEnum " ) = = " 3 \ n " <nl> + assert node1 . query ( " select count ( * ) from HDFSStorageWithQuestionMark " ) = = " 3 \ n " <nl> + assert node1 . query ( " select count ( * ) from HDFSStorageWithAsterisk " ) = = " 3 \ n " <nl> + <nl> + try : <nl> + node1 . query ( " insert into HDFSStorageWithEnum values ( 1 , ' NEW ' , 4 . 2 ) " ) <nl> + assert False , " Exception have to be thrown " <nl> + except Exception as ex : <nl> + print ex <nl> + assert " in readonly mode " in str ( ex ) <nl> + <nl> + try : <nl> + node1 . query ( " insert into HDFSStorageWithQuestionMark values ( 1 , ' NEW ' , 4 . 2 ) " ) <nl> + assert False , " Exception have to be thrown " <nl> + except Exception as ex : <nl> + print ex <nl> + assert " in readonly mode " in str ( ex ) <nl> + <nl> + try : <nl> + node1 . query ( " insert into HDFSStorageWithAsterisk values ( 1 , ' NEW ' , 4 . 2 ) " ) <nl> + assert False , " Exception have to be thrown " <nl> + except Exception as ex : <nl> + print ex <nl> + assert " in readonly mode " in str ( ex ) <nl> <nl> def test_read_write_table ( started_cluster ) : <nl> hdfs_api = HDFSApi ( " root " ) <nl> def test_bad_hdfs_uri ( started_cluster ) : <nl> node1 . query ( " create table BadStorage1 ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hads : hgsdfs100500 : 9000 / other_storage ' , ' TSV ' ) " ) <nl> except Exception as ex : <nl> print ex <nl> - assert ' Illegal HDFS URI ' in str ( ex ) <nl> + assert " Illegal HDFS URI " in str ( ex ) <nl> try : <nl> node1 . query ( " create table BadStorage2 ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs100500 : 9000 / other_storage ' , ' TSV ' ) " ) <nl> except Exception as ex : <nl> print ex <nl> - assert ' Unable to create builder to connect to HDFS ' in str ( ex ) <nl> + assert " Unable to create builder to connect to HDFS " in str ( ex ) <nl> <nl> try : <nl> node1 . query ( " create table BadStorage3 ( id UInt32 , name String , weight Float64 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / < > ' , ' TSV ' ) " ) <nl> except Exception as ex : <nl> print ex <nl> - assert ' Unable to open HDFS file ' in str ( ex ) <nl> + assert " Unable to open HDFS file " in str ( ex ) <nl> <nl> def test_globs_in_read_table ( started_cluster ) : <nl> hdfs_api = HDFSApi ( " root " ) <nl> mmm a / docs / en / operations / table_engines / hdfs . md <nl> ppp b / docs / en / operations / table_engines / hdfs . md <nl> The ` format ` parameter specifies one of the available file formats . To perform <nl> ` SELECT ` queries , the format must be supported for input , and to perform <nl> ` INSERT ` queries - - for output . The available formats are listed in the <nl> [ Formats ] ( . . / . . / interfaces / formats . md # formats ) section . <nl> + The path part of ` URI ` may contain globs . In this case the table would be readonly . <nl> <nl> * * Example : * * <nl> <nl> SELECT * FROM hdfs_engine_table LIMIT 2 <nl> - Indexes . <nl> - Replication . <nl> <nl> + * * Globs in path * * <nl> + <nl> + Multiple path components can have globs . For being processed file should exists and matches to the whole path pattern . Listing of files determines during ` SELECT ` ( not at ` CREATE ` moment ) . <nl> + <nl> + - ` * ` — Substitutes any number of any characters including none . <nl> + - ` ? ` — Substitutes any single character . <nl> + - ` { some_string , another_string , yet_another_one } ` — Substitutes any of strings ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Substitutes any number in range from N to M including both borders . <nl> + <nl> + Constructions with ` { } ` are similar to the [ remote table function ] ( . . / . . / query_language / table_functions / remote . md ) ) . <nl> + <nl> + * * Example * * <nl> + <nl> + 1 . Suppose we have several files in TSV format with the following URIs on HDFS : <nl> + <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_3 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_3 ' <nl> + <nl> + 2 . There are several ways to make a table consisting of all six files : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_range ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / some_file_ { 1 . . 3 } ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + Another way : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_question_mark ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / some_file_ ? ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + Table consists of all the files in both directories ( all files should satisfy format and schema described in query ) : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_asterisk ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / * ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + ! ! ! warning <nl> + If the listing of files contains number ranges with leading zeros , use the construction with braces for each digit separately or use ` ? ` . <nl> + <nl> + * * Example * * <nl> + <nl> + Create table with files named ` file000 ` , ` file001 ` , . . . , ` file999 ` : <nl> + <nl> + ` ` ` sql <nl> + CREARE TABLE big_table ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / big_dir / file { 0 . . 9 } { 0 . . 9 } { 0 . . 9 } ' , ' CSV ' ) <nl> + ` ` ` <nl> + <nl> [ Original article ] ( https : / / clickhouse . yandex / docs / en / operations / table_engines / hdfs / ) < ! - - hide - - > <nl> mmm a / docs / en / query_language / table_functions / file . md <nl> ppp b / docs / en / query_language / table_functions / file . md <nl> <nl> <nl> # file <nl> <nl> - Creates a table from a file . <nl> + Creates a table from a file . This table function is similar to [ url ] ( url . md ) and [ hdfs ] ( hdfs . md ) ones . <nl> <nl> ` ` ` <nl> file ( path , format , structure ) <nl> SELECT * FROM file ( ' test . csv ' , ' CSV ' , ' column1 UInt32 , column2 UInt32 , column3 U <nl> <nl> * * Globs in path * * <nl> <nl> - - ` * ` — Matches any number of any characters including none . <nl> - - ` ? ` — Matches any single character . <nl> - - ` { some_string , another_string , yet_another_one } ` — Matches any of strings ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> - - ` { N . . M } ` — Matches any number in range from N to M including both borders . <nl> + Multiple path components can have globs . For being processed file should exists and matches to the whole path pattern ( not only suffix or prefix ) . <nl> <nl> + - ` * ` — Substitutes any number of any characters including none . <nl> + - ` ? ` — Substitutes any single character . <nl> + - ` { some_string , another_string , yet_another_one } ` — Substitutes any of strings ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Substitutes any number in range from N to M including both borders . <nl> + <nl> + Constructions with ` { } ` are similar to the [ remote table function ] ( . . / . . / query_language / table_functions / remote . md ) ) . <nl> + <nl> + * * Example * * <nl> + <nl> + 1 . Suppose we have several files with the following relative paths : <nl> + <nl> + - ' some_dir / some_file_1 ' <nl> + - ' some_dir / some_file_2 ' <nl> + - ' some_dir / some_file_3 ' <nl> + - ' another_dir / some_file_1 ' <nl> + - ' another_dir / some_file_2 ' <nl> + - ' another_dir / some_file_3 ' <nl> + <nl> + 2 . Query the amount of rows in these files : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' { some , another } _dir / some_file_ { 1 . . 3 } ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> + <nl> + 3 . Query the amount of rows in all files of these two directories : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' { some , another } _dir / * ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> ! ! ! warning <nl> If your listing of files contains number ranges with leading zeros , use the construction with braces for each digit separately or use ` ? ` . <nl> <nl> - Multiple path components can have globs . For being processed file should exists and matches to the whole path pattern . <nl> + * * Example * * <nl> + <nl> + Query the data from files named ` file000 ` , ` file001 ` , . . . , ` file999 ` : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' big_dir / file { 0 . . 9 } { 0 . . 9 } { 0 . . 9 } ' , ' CSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> <nl> [ Original article ] ( https : / / clickhouse . yandex / docs / en / query_language / table_functions / file / ) < ! - - hide - - > <nl> mmm a / docs / en / query_language / table_functions / hdfs . md <nl> ppp b / docs / en / query_language / table_functions / hdfs . md <nl> <nl> <nl> # hdfs <nl> <nl> - Creates a table from a file in HDFS . <nl> + Creates a table from files in HDFS . This table function is similar to [ url ] ( url . md ) and [ file ] ( file . md ) ones . <nl> <nl> ` ` ` <nl> hdfs ( URI , format , structure ) <nl> LIMIT 2 <nl> <nl> * * Globs in path * * <nl> <nl> - - ` * ` — Matches any number of any characters including none . <nl> - - ` ? ` — Matches any single character . <nl> - - ` { some_string , another_string , yet_another_one } ` — Matches any of strings ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> - - ` { N . . M } ` — Matches any number in range from N to M including both borders . <nl> + Multiple path components can have globs . For being processed file should exists and matches to the whole path pattern ( not only suffix or prefix ) . <nl> + <nl> + - ` * ` — Substitutes any number of any characters including none . <nl> + - ` ? ` — Substitutes any single character . <nl> + - ` { some_string , another_string , yet_another_one } ` — Substitutes any of strings ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Substitutes any number in range from N to M including both borders . <nl> + <nl> + <nl> + Constructions with ` { } ` are similar to the [ remote table function ] ( . . / . . / query_language / table_functions / remote . md ) ) . <nl> + <nl> + * * Example * * <nl> + <nl> + 1 . Suppose that we have several files with following URIs on HDFS : <nl> + <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_3 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_3 ' <nl> + <nl> + 2 . Query the amount of rows in these files : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM hdfs ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / some_file_ { 1 . . 3 } ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> + <nl> + 3 . Query the amount of rows in all files of these two directories : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM hdfs ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / * ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> <nl> ! ! ! warning <nl> If your listing of files contains number ranges with leading zeros , use the construction with braces for each digit separately or use ` ? ` . <nl> <nl> - Multiple path components can have globs . For being processed file should exists and matches to the whole path pattern . <nl> + * * Example * * <nl> + <nl> + Query the data from files named ` file000 ` , ` file001 ` , . . . , ` file999 ` : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM hdfs ( ' hdfs : / / hdfs1 : 9000 / big_dir / file { 0 . . 9 } { 0 . . 9 } { 0 . . 9 } ' , ' CSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> <nl> [ Original article ] ( https : / / clickhouse . yandex / docs / en / query_language / table_functions / hdfs / ) < ! - - hide - - > <nl> mmm a / docs / ru / operations / table_engines / hdfs . md <nl> ppp b / docs / ru / operations / table_engines / hdfs . md <nl> <nl> # HDFS { # table_engines - hdfs } <nl> <nl> - Управляет данными в HDFS . Данный движок похож на движок [ File ] ( file . md ) и на движок [ URL ] ( url . md ) . <nl> + Управляет данными в HDFS . Данный движок похож на движки [ File ] ( file . md ) и [ URL ] ( url . md ) . <nl> <nl> # # Использование движка <nl> <nl> ENGINE = HDFS ( URI , format ) <nl> <nl> В параметр ` URI ` нужно передавать полный URI файла в HDFS . <nl> Параметр ` format ` должен быть таким , который ClickHouse может использовать и в запросах ` INSERT ` , и в запросах ` SELECT ` . Полный список поддерживаемых форматов смотрите в разделе [ Форматы ] ( . . / . . / interfaces / formats . md # formats ) . <nl> + Часть URI с путем файла может содержать шаблоны . В этом случае таблица может использоваться только для чтения . <nl> <nl> * * Пример : * * <nl> <nl> SELECT * FROM hdfs_engine_table LIMIT 2 <nl> - индексы ; <nl> - репликация . <nl> <nl> + * * Шаблоны в пути * * <nl> + <nl> + Шаблоны могут содержаться в нескольких компонентах пути . Обрабатываются только существующие файлы , название которых целиком удовлетворяет шаблону ( не только суффиксом или префиксом ) . <nl> + <nl> + - ` * ` — Заменяет любое количество любых символов , включая отсутствие символов . <nl> + - ` ? ` — Заменяет ровно один любой символ . <nl> + - ` { some_string , another_string , yet_another_one } ` — Заменяет любую из строк ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Заменяет любое число в интервале от ` N ` до ` M ` включительно . <nl> + <nl> + Конструкция с ` { } ` аналогична табличной функции [ remote ] ( remote . md ) . <nl> + <nl> + * * Пример * * <nl> + <nl> + 1 . Предположим , у нас есть несколько файлов со следующими URI в HDFS : <nl> + <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / some_dir / some_file_3 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_1 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_2 ' <nl> + - ' hdfs : / / hdfs1 : 9000 / another_dir / some_file_3 ' <nl> + <nl> + 2 . Есть несколько возможностей создать таблицу , состояющую из этих шести файлов : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_range ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / some_file_ { 1 . . 3 } ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + Другой способ : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_question_mark ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / some_file_ ? ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + Таблица , состоящая из всех файлов в обеих директориях ( все файлы должны удовлетворять формату и схеме , указанной в запросе ) : <nl> + <nl> + ` ` ` sql <nl> + CREATE TABLE table_with_asterisk ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / { some , another } _dir / * ' , ' TSV ' ) <nl> + ` ` ` <nl> + <nl> + ! ! ! warning <nl> + Если список файлов содержит числовые интервалы с ведущими нулями , используйте конструкцию с фигурными скобочками для каждой цифры или используйте ` ? ` . <nl> + <nl> + * * Example * * <nl> + <nl> + Создадим таблицу с именами ` file000 ` , ` file001 ` , . . . , ` file999 ` : <nl> + <nl> + ` ` ` sql <nl> + CREARE TABLE big_table ( name String , value UInt32 ) ENGINE = HDFS ( ' hdfs : / / hdfs1 : 9000 / big_dir / file { 0 . . 9 } { 0 . . 9 } { 0 . . 9 } ' , ' CSV ' ) <nl> + ` ` ` <nl> + <nl> [ Оригинальная статья ] ( https : / / clickhouse . yandex / docs / ru / operations / table_engines / hdfs / ) < ! - - hide - - > <nl> mmm a / docs / ru / query_language / table_functions / file . md <nl> ppp b / docs / ru / query_language / table_functions / file . md <nl> <nl> <nl> # file <nl> <nl> - Создаёт таблицу из файла . <nl> + Создаёт таблицу из файла . Данная табличная функция похожа на табличные функции [ file ] ( file . md ) и [ hdfs ] ( hdfs . md ) . <nl> <nl> ` ` ` <nl> file ( path , format , structure ) <nl> LIMIT 2 <nl> └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ <nl> ` ` ` <nl> <nl> - * * Шаблоны в пути файла * * <nl> + Шаблоны могут содержаться в нескольких компонентах пути . Обрабатываются только существующие файлы , название которых целиком удовлетворяет шаблону ( не только суффиксом или префиксом ) . <nl> <nl> - - ` * ` — Матчит любое количество любых символов , включая отсутствие символов . <nl> - - ` ? ` — Матчит ровно один любой символ . <nl> - - ` { some_string , another_string , yet_another_one } ` — Матчит любую из строк ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> - - ` { N . . M } ` — Матчит любое число в интервале от ` N ` до ` M ` включительно . <nl> + - ` * ` — Заменяет любое количество любых символов , включая отсутствие символов . <nl> + - ` ? ` — Заменяет ровно один любой символ . <nl> + - ` { some_string , another_string , yet_another_one } ` — Заменяет любую из строк ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Заменяет любое число в интервале от ` N ` до ` M ` включительно . <nl> + <nl> + Конструкция с ` { } ` аналогична табличной функции [ remote ] ( remote . md ) . <nl> + <nl> + * * Пример * * <nl> + <nl> + 1 . Предположим у нас есть несколько файлов со следующими относительными путями : <nl> + <nl> + - ' some_dir / some_file_1 ' <nl> + - ' some_dir / some_file_2 ' <nl> + - ' some_dir / some_file_3 ' <nl> + - ' another_dir / some_file_1 ' <nl> + - ' another_dir / some_file_2 ' <nl> + - ' another_dir / some_file_3 ' <nl> + <nl> + 2 . Запросим количество строк в этих файлах : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' { some , another } _dir / some_file_ { 1 . . 3 } ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> + <nl> + 3 . Запросим количество строк во всех файлах этих двух директорий : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' { some , another } _dir / * ' , ' TSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> <nl> ! ! ! warning <nl> Если ваш список файлов содержит интервал с ведущими нулями , используйте конструкцию с фигурными скобками для каждой цифры по отдельности или используйте ` ? ` . <nl> <nl> - Шаблоны могут содержаться в разных частях пути . Обрабатываться будут ровно те файлы , которые и удовлетворяют всему шаблону пути , и существуют в файловой системе . <nl> + * * Пример * * <nl> + <nl> + Запрос данных из файлов с именами ` file000 ` , ` file001 ` , . . . , ` file999 ` : <nl> + <nl> + ` ` ` sql <nl> + SELECT count ( * ) <nl> + FROM file ( ' big_dir / file { 0 . . 9 } { 0 . . 9 } { 0 . . 9 } ' , ' CSV ' , ' name String , value UInt32 ' ) <nl> + ` ` ` <nl> <nl> [ Оригинальная статья ] ( https : / / clickhouse . yandex / docs / ru / query_language / table_functions / file / ) < ! - - hide - - > <nl> mmm a / docs / ru / query_language / table_functions / hdfs . md <nl> ppp b / docs / ru / query_language / table_functions / hdfs . md <nl> <nl> <nl> # hdfs <nl> <nl> - Создаёт таблицу из файла в HDFS . <nl> + Создаёт таблицу из файла в HDFS . Данная табличная функция похожа на табличные функции [ url ] ( url . md ) и [ file ] ( file . md ) . <nl> <nl> ` ` ` <nl> hdfs ( URI , format , structure ) <nl> LIMIT 2 <nl> └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┴ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ <nl> ` ` ` <nl> <nl> - * * Шаблоны в пути файла * * <nl> + * * Шаблоны в пути * * <nl> <nl> - - ` * ` — Матчит любое количество любых символов , включая отсутствие символов . <nl> - - ` ? ` — Матчит ровно один любой символ . <nl> - - ` { some_string , another_string , yet_another_one } ` — Матчит любую из строк ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> - - ` { N . . M } ` — Матчит любое число в интервале от ` N ` до ` M ` включительно . <nl> + - ` * ` — Заменяет любое количество любых символов , включая отсутствие символов . <nl> + - ` ? ` — Заменяет ровно один любой символ . <nl> + - ` { some_string , another_string , yet_another_one } ` — Заменяет любую из строк ` ' some_string ' , ' another_string ' , ' yet_another_one ' ` . <nl> + - ` { N . . M } ` — Заменяет любое число в интервале от ` N ` до ` M ` включительно . <nl> + <nl> + Конструкция с ` { } ` аналогична табличной функции [ remote ] ( remote . md ) . <nl> <nl> ! ! ! warning <nl> Если ваш список файлов содержит интервал с ведущими нулями , используйте конструкцию с фигурными скобками для каждой цифры по отдельности или используйте ` ? ` . <nl> | cleanup hdfs docs | ClickHouse/ClickHouse | 7309e813bcaff58f3fe5a320cf0e33440565283d | 2019-09-20T11:26:00Z |
mmm a / android / sdk / src / test / java / com / taobao / weex / http / WXStreamModuleTest . java <nl> ppp b / android / sdk / src / test / java / com / taobao / weex / http / WXStreamModuleTest . java <nl> public void testFetchRequestHttpbinCallback ( ) throws Exception { <nl> JSCallback finish = mock ( JSCallback . class ) ; <nl> System . out . print ( " request start " + System . currentTimeMillis ( ) ) ; <nl> streamModule . fetch ( " { method : ' POST ' , url : ' http : / / httpbin . org / post ' , type : ' json ' } " , finish , progress ) ; <nl> - verify ( progress , timeout ( 10 * 1000 ) ) . invokeAndKeepAlive ( anyMapOf ( String . class , Object . class ) ) ; <nl> - verify ( finish , timeout ( 10 * 1000 ) ) . invoke ( anyMapOf ( String . class , Object . class ) ) ; <nl> + verify ( progress , timeout ( 10 * 1000 ) . atLeastOnce ( ) ) . invokeAndKeepAlive ( anyMapOf ( String . class , Object . class ) ) ; <nl> + verify ( finish , timeout ( 10 * 1000 ) . times ( 1 ) ) . invoke ( anyMapOf ( String . class , Object . class ) ) ; <nl> System . out . print ( " \ nrequest finish " + System . currentTimeMillis ( ) ) ; <nl> } <nl> <nl> | * [ android ] fix unit test | apache/incubator-weex | 40b10dea33bb3e17380e925e3e92eef609fb52d0 | 2016-05-27T02:40:41Z |
mmm a / plugins / producer_plugin / producer_plugin . cpp <nl> ppp b / plugins / producer_plugin / producer_plugin . cpp <nl> producer_plugin_impl : : start_block_result producer_plugin_impl : : start_block ( bool <nl> <nl> / / step one , if we will never produce , prune unapplied to our persisted trxs <nl> if ( _producers . empty ( ) ) { <nl> - for ( auto itr = unapplied_trxs . begin ( ) ; itr ! = unapplied_trxs . end ( ) ; ) { <nl> - const auto & trx = * itr ; <nl> + for ( size_t idx = 0 ; idx < unapplied_trxs . size ( ) ; ) { <nl> + const auto & trx = unapplied_trxs . at ( idx ) ; <nl> if ( persisted_by_id . find ( trx - > id ) = = persisted_by_id . end ( ) ) { <nl> / / remove it from further consideration as it is dropped <nl> chain . drop_unapplied_transaction ( trx ) ; <nl> - std : : iter_swap ( itr , std : : min_element ( itr , unapplied_trxs . end ( ) ) ) ; <nl> + std : : swap ( unapplied_trxs . at ( idx ) , unapplied_trxs . back ( ) ) ; <nl> unapplied_trxs . pop_back ( ) ; <nl> } else { <nl> - + + itr ; <nl> + + + idx ; <nl> } <nl> } <nl> } else { <nl> / / step two , if we may one day produce at least prune the expired trxs <nl> - for ( auto itr = unapplied_trxs . begin ( ) ; itr ! = unapplied_trxs . end ( ) ; ) { <nl> - const auto & trx = * itr ; <nl> + for ( size_t idx = 0 ; idx < unapplied_trxs . size ( ) ; ) { <nl> + const auto & trx = unapplied_trxs . at ( idx ) ; <nl> if ( trx - > packed_trx . expiration ( ) < pbs - > header . timestamp . to_time_point ( ) ) { <nl> / / expired , drop it <nl> chain . drop_unapplied_transaction ( trx ) ; <nl> - std : : iter_swap ( itr , std : : min_element ( itr , unapplied_trxs . end ( ) ) ) ; <nl> + std : : swap ( unapplied_trxs . at ( idx ) , unapplied_trxs . back ( ) ) ; <nl> unapplied_trxs . pop_back ( ) ; <nl> } else { <nl> - + + itr ; <nl> + + + idx ; <nl> } <nl> } <nl> } <nl> <nl> / / step three if we have persisted transactions apply them regarless of mode <nl> if ( ! persisted_by_expiry . empty ( ) ) { <nl> - for ( auto itr = unapplied_trxs . begin ( ) ; itr ! = unapplied_trxs . end ( ) ; ) { <nl> - const auto & trx = * itr ; <nl> + for ( size_t idx = 0 ; idx < unapplied_trxs . size ( ) ; ) { <nl> + const auto & trx = unapplied_trxs . at ( idx ) ; <nl> if ( persisted_by_id . find ( trx - > id ) ! = persisted_by_id . end ( ) ) { <nl> <nl> / / this is a persisted transaction , push it into the block ( even if we are speculating ) with <nl> producer_plugin_impl : : start_block_result producer_plugin_impl : : start_block ( bool <nl> / / failed and was removed - or - <nl> / / subjectively failed and should be tried _next_ block <nl> <nl> - std : : iter_swap ( itr , std : : min_element ( itr , unapplied_trxs . end ( ) ) ) ; <nl> + std : : swap ( unapplied_trxs . at ( idx ) , unapplied_trxs . back ( ) ) ; <nl> unapplied_trxs . pop_back ( ) ; <nl> } else { <nl> - + + itr ; <nl> + + + idx ; <nl> } <nl> } <nl> } <nl> | clean up error maintaining vector | EOSIO/eos | fa33444855a47db52493aebeda193dd7c5adc987 | 2018-09-11T16:10:15Z |
mmm a / hphp / hack / src / hhbc / emit_inout_helpers . ml <nl> ppp b / hphp / hack / src / hhbc / emit_inout_helpers . ml <nl> let extract_inout_or_ref_param_locations ~ is_sync ~ is_closure_or_func params = <nl> let need_wrapper = <nl> Hhbc_options . create_inout_wrapper_functions ! Hhbc_options . compiler_options <nl> & & ( Emit_env . is_hh_syntax_enabled ( ) ) <nl> - & & ( Hhbc_options . reffiness_invariance ! Hhbc_options . compiler_options <nl> + & & ( ( Hhbc_options . reffiness_invariance ! Hhbc_options . compiler_options ) = 2 <nl> | | is_closure_or_func ) <nl> & & not @ @ List . exists params <nl> ~ f : ( fun p - > p . T . param_is_variadic & & p . T . param_is_reference ) in <nl> mmm a / hphp / hack / src / hhbc / hhbc_options . ml <nl> ppp b / hphp / hack / src / hhbc / hhbc_options . ml <nl> type t = { <nl> option_php7_uvs : bool ; <nl> option_php7_ltr_assign : bool ; <nl> option_create_inout_wrapper_functions : bool ; <nl> - option_reffiness_invariance : bool ; <nl> + option_reffiness_invariance : int ; <nl> option_hack_arr_compat_notices : bool ; <nl> option_hack_arr_dv_arrs : bool ; <nl> option_dynamic_invoke_functions : SSet . t ; <nl> let default = { <nl> * HHVM it ' s helpful to renumber in order that the labels match more closely * ) <nl> option_relabel = true ; <nl> option_create_inout_wrapper_functions = true ; <nl> - option_reffiness_invariance = false ; <nl> + option_reffiness_invariance = 0 ; <nl> option_hack_arr_compat_notices = false ; <nl> option_hack_arr_dv_arrs = false ; <nl> option_dynamic_invoke_functions = SSet . empty ; <nl> let to_string o = <nl> ; Printf . sprintf " php7_ltr_assign : % B " @ @ php7_ltr_assign o <nl> ; Printf . sprintf " create_inout_wrapper_functions : % B " <nl> @ @ create_inout_wrapper_functions o <nl> - ; Printf . sprintf " reffiness_invariance : % B " @ @ reffiness_invariance o <nl> + ; Printf . sprintf " reffiness_invariance : % d " @ @ reffiness_invariance o <nl> ; Printf . sprintf " hack_arr_compat_notices : % B " @ @ hack_arr_compat_notices o <nl> ; Printf . sprintf " hack_arr_dv_arrs : % B " @ @ hack_arr_dv_arrs o <nl> ; Printf . sprintf " dynamic_invoke_functions : [ % s ] " dynamic_invokes <nl> let set_option options name value = <nl> | " eval . createinoutwrapperfunctions " - > <nl> { options with option_create_inout_wrapper_functions = as_bool value } <nl> | " eval . reffinessinvariance " - > <nl> - { options with option_reffiness_invariance = as_bool value } <nl> + { options with option_reffiness_invariance = int_of_string value } <nl> | " eval . hackarrcompatnotices " - > <nl> { options with option_hack_arr_compat_notices = as_bool value } <nl> | " eval . hackarrdvarrs " - > <nl> let value_setters = [ <nl> ( set_value " hhvm . create_in_out_wrapper_functions " get_value_from_config_int @ @ <nl> fun opts v - > { opts with option_create_inout_wrapper_functions = ( v = 1 ) } ) ; <nl> ( set_value " hhvm . reffiness_invariance " get_value_from_config_int @ @ <nl> - fun opts v - > { opts with option_reffiness_invariance = ( v = 1 ) } ) ; <nl> + fun opts v - > { opts with option_reffiness_invariance = v } ) ; <nl> ( set_value " hhvm . hack_arr_compat_notices " get_value_from_config_int @ @ <nl> fun opts v - > { opts with option_hack_arr_compat_notices = ( v = 1 ) } ) ; <nl> ( set_value " hhvm . hack_arr_dv_arrs " get_value_from_config_int @ @ <nl> mmm a / hphp / runtime / base / runtime - option . h <nl> ppp b / hphp / runtime / base / runtime - option . h <nl> struct RuntimeOption { <nl> F ( bool , NoticeOnReadDynamicProp , false ) \ <nl> F ( bool , NoticeOnImplicitInvokeToString , false ) \ <nl> F ( bool , FatalOnConvertObjectToString , false ) \ <nl> - F ( bool , ReffinessInvariance , false ) \ <nl> + / * Indicates whether parameters of overridden methods must match the <nl> + reffiness of the parent method . When set to enforcing mode , inout <nl> + wrappers will be created for methods . <nl> + 0 - do nothing <nl> + 1 - raise a warning on reffiness mismatch <nl> + 2 - raise a fatal on reffiness mismatch * / \ <nl> + F ( uint32_t , ReffinessInvariance , 0 ) \ <nl> F ( bool , NoticeOnBuiltinDynamicCalls , false ) \ <nl> F ( bool , RxPretendIsEnabled , false ) \ <nl> F ( bool , NoArrayAccessInIdx , false ) \ <nl> mmm a / hphp / runtime / base / unit - cache . cpp <nl> ppp b / hphp / runtime / base / unit - cache . cpp <nl> std : : string mangleUnitSha1 ( const std : : string & fileSha1 , <nl> + ( RuntimeOption : : EvalHackCompilerVerboseErrors ? ' 1 ' : ' 0 ' ) <nl> + ( RuntimeOption : : EvalJitEnableRenameFunction ? ' 1 ' : ' 0 ' ) <nl> + ( RuntimeOption : : EvalLoadFilepathFromUnitCache ? ' 1 ' : ' 0 ' ) <nl> - + ( RuntimeOption : : EvalReffinessInvariance ? ' 1 ' : ' 0 ' ) <nl> + + std : : to_string ( RuntimeOption : : EvalReffinessInvariance ) <nl> + std : : to_string ( RuntimeOption : : EvalForbidDynamicCalls ) <nl> + ( RuntimeOption : : EvalNoticeOnBuiltinDynamicCalls ? ' 1 ' : ' 0 ' ) <nl> + ( RuntimeOption : : EvalHackArrDVArrs ? ' 1 ' : ' 0 ' ) <nl> mmm a / hphp / runtime / vm / class . cpp <nl> ppp b / hphp / runtime / vm / class . cpp <nl> inline void checkRefCompat ( const char * kind , const Func * self , <nl> / / When reffiness invariance is disabled we cannot create wrappers for ref <nl> / / functions , as those wrappers would violate our invariance rules for inout <nl> / / functions . <nl> - assertx ( RuntimeOption : : EvalReffinessInvariance | | ! self - > isInOutWrapper ( ) ) ; <nl> + assertx ( RuntimeOption : : EvalReffinessInvariance = = 2 | | <nl> + ! self - > isInOutWrapper ( ) ) ; <nl> return ; <nl> } <nl> <nl> inline void checkRefCompat ( const char * kind , const Func * self , <nl> if ( ! self - > anyByRef ( ) & & ! inherit - > anyByRef ( ) ) return ; <nl> } <nl> <nl> + auto const fatal = <nl> + RuntimeOption : : EvalReffinessInvariance = = 2 | | <nl> + ( self - > isInOutWrapper ( ) | | inherit - > isInOutWrapper ( ) ) ; <nl> + <nl> auto const sname = self - > fullDisplayName ( ) - > data ( ) ; <nl> auto const iname = inherit - > fullDisplayName ( ) - > data ( ) ; <nl> auto const max = std : : max ( <nl> inline void checkRefCompat ( const char * kind , const Func * self , <nl> auto const smode = self - > byRef ( i ) ; <nl> auto const imode = inherit - > byRef ( i ) ; <nl> if ( smode ! = imode | | ( smode & & ! both_wrap ) ) { <nl> - if ( smode & & ( ! imode | | self - > isInOutWrapper ( ) | | both_wrap ) ) { <nl> - auto const sdecl = self - > isInOutWrapper ( ) ? " inout " : " ' & ' " ; <nl> - auto const idecl = i > = inherit - > numNonVariadicParams ( ) ? " " : sdecl ; <nl> - raise_error ( " Parameter % i on function % s was declared % sbut is not " <nl> - " declared % son % s function % s " , i + 1 , sname , sdecl , idecl , <nl> - kind , iname ) ; <nl> - } else { <nl> - auto const idecl = inherit - > isInOutWrapper ( ) ? " inout " : " ' & ' " ; <nl> - auto const sdecl = i > = self - > numNonVariadicParams ( ) ? " " : idecl ; <nl> - raise_error ( " Parameter % i on function % s was not declared % sbut is " <nl> - " declared % son % s function % s " , i + 1 , sname , sdecl , idecl , <nl> - kind , iname ) ; <nl> - } <nl> + auto const msg = [ & ] { <nl> + if ( smode & & ( ! imode | | self - > isInOutWrapper ( ) | | both_wrap ) ) { <nl> + auto const sdecl = self - > isInOutWrapper ( ) ? " inout " : " ' & ' " ; <nl> + auto const idecl = i > = inherit - > numNonVariadicParams ( ) ? " " : sdecl ; <nl> + return folly : : sformat ( <nl> + " Parameter { } on function { } was declared { } but is not " <nl> + " declared { } on { } function { } " , i + 1 , sname , sdecl , idecl , <nl> + kind , iname ) ; <nl> + } else { <nl> + auto const idecl = inherit - > isInOutWrapper ( ) ? " inout " : " ' & ' " ; <nl> + auto const sdecl = i > = self - > numNonVariadicParams ( ) ? " " : idecl ; <nl> + return folly : : sformat ( <nl> + " Parameter { } on function { } was not declared { } but is " <nl> + " declared { } on { } function { } " , i + 1 , sname , sdecl , idecl , <nl> + kind , iname ) ; <nl> + } <nl> + } ( ) ; <nl> + if ( fatal ) raise_error ( msg ) ; <nl> + else raise_warning ( msg ) ; <nl> } <nl> } <nl> } <nl> mmm a / hphp / runtime / vm / repo - global - data . h <nl> ppp b / hphp / runtime / vm / repo - global - data . h <nl> struct Repo : : GlobalData { <nl> * Should we enforce that reffiness annotations are invaraint in overridden <nl> * methods ? <nl> * / <nl> - bool ReffinessInvariance = false ; <nl> + uint32_t ReffinessInvariance = 0 ; <nl> <nl> / * <nl> * Should HHBBC do build time verification ? <nl> mmm a / hphp / test / slow / inout / abstract - method . php . hphp_opts <nl> ppp b / hphp / test / slow / inout / abstract - method . php . hphp_opts <nl> <nl> - - vEval . ReffinessInvariance = 1 <nl> + - vEval . ReffinessInvariance = 2 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / abstract - method . php . opts <nl> ppp b / hphp / test / slow / inout / abstract - method . php . opts <nl> <nl> - - vEval . ReffinessInvariance = 1 <nl> + - vEval . ReffinessInvariance = 2 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / method - interop - dynamic . php . hphp_opts <nl> ppp b / hphp / test / slow / inout / method - interop - dynamic . php . hphp_opts <nl> <nl> - - vRuntime . Eval . ReffinessInvariance = 1 - d hhvm . php7 . all = 0 <nl> + - vRuntime . Eval . ReffinessInvariance = 2 - d hhvm . php7 . all = 0 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / method - interop - dynamic . php . opts <nl> ppp b / hphp / test / slow / inout / method - interop - dynamic . php . opts <nl> <nl> - - vEval . ReffinessInvariance = 1 <nl> + - vEval . ReffinessInvariance = 2 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / method - interop - static . php . hphp_opts <nl> ppp b / hphp / test / slow / inout / method - interop - static . php . hphp_opts <nl> <nl> - - vRuntime . Eval . ReffinessInvariance = 1 - d hhvm . php7 . all = 0 <nl> + - vRuntime . Eval . ReffinessInvariance = 2 - d hhvm . php7 . all = 0 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / method - interop - static . php . opts <nl> ppp b / hphp / test / slow / inout / method - interop - static . php . opts <nl> <nl> - - vEval . ReffinessInvariance = 1 <nl> + - vEval . ReffinessInvariance = 2 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / trait - magic . php . hphp_opts <nl> ppp b / hphp / test / slow / inout / trait - magic . php . hphp_opts <nl> <nl> - - vRuntime . Eval . ReffinessInvariance = 1 - d hhvm . php7 . all = 0 <nl> + - vRuntime . Eval . ReffinessInvariance = 2 - d hhvm . php7 . all = 0 <nl> - vEval . EnablePHP = 1 <nl> mmm a / hphp / test / slow / inout / trait - magic . php . opts <nl> ppp b / hphp / test / slow / inout / trait - magic . php . opts <nl> <nl> - - vEval . ReffinessInvariance = 1 <nl> + - vEval . ReffinessInvariance = 2 <nl> - vEval . EnablePHP = 1 <nl> new file mode 100644 <nl> index 00000000000 . . 95d9a628ea4 <nl> mmm / dev / null <nl> ppp b / hphp / test / slow / reffy - warn . php <nl> <nl> + < ? hh <nl> + <nl> + class Foo { <nl> + function alpha ( $ one , & $ two , $ three , & $ four ) { } <nl> + } <nl> + <nl> + class Bar extends Foo { <nl> + function alpha ( & $ one , $ two , $ three , & $ four ) { } <nl> + } <nl> new file mode 100644 <nl> index 00000000000 . . a733cb6115b <nl> mmm / dev / null <nl> ppp b / hphp / test / slow / reffy - warn . php . expectf <nl> <nl> + <nl> + Warning : Parameter 1 on function alpha was declared ' & ' but is not declared ' & ' on parent function Foo : : alpha in % s / reffy - warn . php on line 7 <nl> + <nl> + Warning : Parameter 2 on function alpha was not declared ' & ' but is declared ' & ' on parent function Foo : : alpha in % s / reffy - warn . php on line 7 <nl> new file mode 100644 <nl> index 00000000000 . . d6e1ca709c3 <nl> mmm / dev / null <nl> ppp b / hphp / test / slow / reffy - warn . php . hphp_opts <nl> @ @ - 0 , 0 + 1 @ @ <nl> + - vRuntime . Eval . ReffinessInvariance = 1 <nl> new file mode 100644 <nl> index 00000000000 . . e824f80fa7a <nl> mmm / dev / null <nl> ppp b / hphp / test / slow / reffy - warn . php . opts <nl> @ @ - 0 , 0 + 1 @ @ <nl> + - vEval . ReffinessInvariance = 1 <nl> | Add a warn mode for reffiness invariance | facebook/hhvm | 07e6fd614c51b4344faf762c8c43c142b24a7821 | 2019-05-05T17:43:33Z |
mmm a / src / wallet / rpcwallet . cpp <nl> ppp b / src / wallet / rpcwallet . cpp <nl> UniValue addwitnessaddress ( const UniValue & params , bool fHelp ) <nl> throw JSONRPCError ( RPC_WALLET_ERROR , " Public key or redeemscript not known to wallet " ) ; <nl> } <nl> <nl> + pwalletMain - > SetAddressBook ( w . result , " " , " receive " ) ; <nl> + <nl> return CBitcoinAddress ( w . result ) . ToString ( ) ; <nl> } <nl> <nl> | add witness address to address book | bitcoin/bitcoin | 62ffbbdec30699941069baeae61716ff12155ba6 | 2016-09-09T16:15:08Z |
mmm a / fdbserver / RestoreWorker . actor . cpp <nl> ppp b / fdbserver / RestoreWorker . actor . cpp <nl> void initRestoreWorkerConfig ( ) ; <nl> ACTOR Future < Void > handlerTerminateWorkerRequest ( RestoreSimpleRequest req , Reference < RestoreWorkerData > self , <nl> RestoreWorkerInterface workerInterf , Database cx ) ; <nl> ACTOR Future < Void > monitorWorkerLiveness ( Reference < RestoreWorkerData > self ) ; <nl> - Future < Void > handleRecruitRoleRequest ( RestoreRecruitRoleRequest req , Reference < RestoreWorkerData > self , <nl> + void handleRecruitRoleRequest ( RestoreRecruitRoleRequest req , Reference < RestoreWorkerData > self , <nl> ActorCollection * actors , Database cx ) ; <nl> ACTOR Future < Void > collectRestoreWorkerInterface ( Reference < RestoreWorkerData > self , Database cx , <nl> int min_num_workers = 2 ) ; <nl> ACTOR Future < Void > handlerTerminateWorkerRequest ( RestoreSimpleRequest req , Refer <nl> <nl> / / Assume only 1 role on a restore worker . <nl> / / Future : Multiple roles in a restore worker <nl> - Future < Void > handleRecruitRoleRequest ( RestoreRecruitRoleRequest req , Reference < RestoreWorkerData > self , <nl> + void handleRecruitRoleRequest ( RestoreRecruitRoleRequest req , Reference < RestoreWorkerData > self , <nl> ActorCollection * actors , Database cx ) { <nl> / / Already recruited a role <nl> / / Future : Allow multiple restore roles on a restore worker . The design should easily allow this . <nl> if ( self - > loaderInterf . present ( ) ) { <nl> ASSERT ( req . role = = RestoreRole : : Loader ) ; <nl> req . reply . send ( RestoreRecruitRoleReply ( self - > id ( ) , RestoreRole : : Loader , self - > loaderInterf . get ( ) ) ) ; <nl> - return Void ( ) ; <nl> + return ; <nl> } else if ( self - > applierInterf . present ( ) ) { <nl> req . reply . send ( RestoreRecruitRoleReply ( self - > id ( ) , RestoreRole : : Applier , self - > applierInterf . get ( ) ) ) ; <nl> - return Void ( ) ; <nl> + return ; <nl> } <nl> <nl> if ( req . role = = RestoreRole : : Loader ) { <nl> Future < Void > handleRecruitRoleRequest ( RestoreRecruitRoleRequest req , Reference < R <nl> . detail ( " HandleRecruitRoleRequest " , " UnknownRole " ) ; / / . detail ( " Request " , req . printable ( ) ) ; <nl> } <nl> <nl> - return Void ( ) ; <nl> + return ; <nl> } <nl> <nl> / / Read restoreWorkersKeys from DB to get each restore worker ' s workerInterface and set it to self - > workerInterfaces ; <nl> ACTOR Future < Void > startRestoreWorker ( Reference < RestoreWorkerData > self , Restore <nl> } <nl> when ( RestoreRecruitRoleRequest req = waitNext ( interf . recruitRole . getFuture ( ) ) ) { <nl> requestTypeStr = " recruitRole " ; <nl> - actors . add ( handleRecruitRoleRequest ( req , self , & actors , cx ) ) ; <nl> + handleRecruitRoleRequest ( req , self , & actors , cx ) ; <nl> } <nl> when ( RestoreSimpleRequest req = waitNext ( interf . terminateWorker . getFuture ( ) ) ) { <nl> / / Destroy the worker at the end of the restore <nl> | FastRestore : Change handleRecruitRoleRequest return Void to void | apple/foundationdb | 1c2cfb2ca0e0086522d6064b5989b76379dc13ac | 2019-12-02T19:38:55Z |
deleted file mode 100644 <nl> index c158d817e09 . . 00000000000 <nl> mmm a / a . out . js <nl> ppp / dev / null <nl> <nl> - / / Copyright 2010 The Emscripten Authors . All rights reserved . <nl> - / / Emscripten is available under two separate licenses , the MIT license and the <nl> - / / University of Illinois / NCSA Open Source License . Both these licenses can be <nl> - / / found in the LICENSE file . <nl> - <nl> - / / The Module object : Our interface to the outside world . We import <nl> - / / and export values on it . There are various ways Module can be used : <nl> - / / 1 . Not defined . We create it here <nl> - / / 2 . A function parameter , function ( Module ) { . . generated code . . } <nl> - / / 3 . pre - run appended it , var Module = { } ; . . generated code . . <nl> - / / 4 . External script tag defines var Module . <nl> - / / We need to check if Module already exists ( e . g . case 3 above ) . <nl> - / / Substitution will be replaced with actual code on later stage of the build , <nl> - / / this way Closure Compiler will not mangle it ( e . g . case 4 . above ) . <nl> - / / Note that if you want to run closure , and also to use Module <nl> - / / after the generated code , you will need to define var Module = { } ; <nl> - / / before the code . Then that object will be used in the code , and you <nl> - / / can continue to use Module afterwards as well . <nl> - var Module = typeof Module ! = = ' undefined ' ? Module : { } ; <nl> - <nl> - / / - - pre - jses are emitted after the Module integration code , so that they can <nl> - / / refer to Module ( if they choose ; they can also define Module ) <nl> - <nl> - <nl> - <nl> - / / Sometimes an existing Module object exists with properties <nl> - / / meant to overwrite the default module functionality . Here <nl> - / / we collect those properties and reapply _after_ we configure <nl> - / / the current environment ' s defaults to avoid having to be so <nl> - / / defensive during initialization . <nl> - var moduleOverrides = { } ; <nl> - var key ; <nl> - for ( key in Module ) { <nl> - if ( Module . hasOwnProperty ( key ) ) { <nl> - moduleOverrides [ key ] = Module [ key ] ; <nl> - } <nl> - } <nl> - <nl> - var arguments_ = [ ] ; <nl> - var thisProgram = ' . / this . program ' ; <nl> - var quit_ = function ( status , toThrow ) { <nl> - throw toThrow ; <nl> - } ; <nl> - <nl> - / / Determine the runtime environment we are in . You can customize this by <nl> - / / setting the ENVIRONMENT setting at compile time ( see settings . js ) . <nl> - <nl> - var ENVIRONMENT_IS_WEB = false ; <nl> - var ENVIRONMENT_IS_WORKER = false ; <nl> - var ENVIRONMENT_IS_NODE = false ; <nl> - var ENVIRONMENT_HAS_NODE = false ; <nl> - var ENVIRONMENT_IS_SHELL = false ; <nl> - ENVIRONMENT_IS_WEB = typeof window = = = ' object ' ; <nl> - ENVIRONMENT_IS_WORKER = typeof importScripts = = = ' function ' ; <nl> - / / A web environment like Electron . js can have Node enabled , so we must <nl> - / / distinguish between Node - enabled environments and Node environments per se . <nl> - / / This will allow the former to do things like mount NODEFS . <nl> - / / Extended check using process . versions fixes issue # 8816 . <nl> - / / ( Also makes redundant the original check that ' require ' is a function . ) <nl> - ENVIRONMENT_HAS_NODE = typeof process = = = ' object ' & & typeof process . versions = = = ' object ' & & typeof process . versions . node = = = ' string ' ; <nl> - ENVIRONMENT_IS_NODE = ENVIRONMENT_HAS_NODE & & ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_WORKER ; <nl> - ENVIRONMENT_IS_SHELL = ! ENVIRONMENT_IS_WEB & & ! ENVIRONMENT_IS_NODE & & ! ENVIRONMENT_IS_WORKER ; <nl> - <nl> - if ( Module [ ' ENVIRONMENT ' ] ) { <nl> - throw new Error ( ' Module . ENVIRONMENT has been deprecated . To force the environment , use the ENVIRONMENT compile - time option ( for example , - s ENVIRONMENT = web or - s ENVIRONMENT = node ) ' ) ; <nl> - } <nl> - <nl> - <nl> - / / Three configurations we can be running in : <nl> - / / 1 ) We could be the application main ( ) thread running in the main JS UI thread . ( ENVIRONMENT_IS_WORKER = = false and ENVIRONMENT_IS_PTHREAD = = false ) <nl> - / / 2 ) We could be the application main ( ) thread proxied to worker . ( with Emscripten - s PROXY_TO_WORKER = 1 ) ( ENVIRONMENT_IS_WORKER = = true , ENVIRONMENT_IS_PTHREAD = = false ) <nl> - / / 3 ) We could be an application pthread running in a worker . ( ENVIRONMENT_IS_WORKER = = true and ENVIRONMENT_IS_PTHREAD = = true ) <nl> - <nl> - <nl> - <nl> - <nl> - / / ` / ` should be present at the end if ` scriptDirectory ` is not empty <nl> - var scriptDirectory = ' ' ; <nl> - function locateFile ( path ) { <nl> - if ( Module [ ' locateFile ' ] ) { <nl> - return Module [ ' locateFile ' ] ( path , scriptDirectory ) ; <nl> - } <nl> - return scriptDirectory + path ; <nl> - } <nl> - <nl> - / / Hooks that are implemented differently in different runtime environments . <nl> - var read_ , <nl> - readAsync , <nl> - readBinary , <nl> - setWindowTitle ; <nl> - <nl> - if ( ENVIRONMENT_IS_NODE ) { <nl> - scriptDirectory = __dirname + ' / ' ; <nl> - <nl> - / / Expose functionality in the same simple way that the shells work <nl> - / / Note that we pollute the global namespace here , otherwise we break in node <nl> - var nodeFS ; <nl> - var nodePath ; <nl> - <nl> - read_ = function shell_read ( filename , binary ) { <nl> - var ret ; <nl> - if ( ! nodeFS ) nodeFS = require ( ' fs ' ) ; <nl> - if ( ! nodePath ) nodePath = require ( ' path ' ) ; <nl> - filename = nodePath [ ' normalize ' ] ( filename ) ; <nl> - ret = nodeFS [ ' readFileSync ' ] ( filename ) ; <nl> - return binary ? ret : ret . toString ( ) ; <nl> - } ; <nl> - <nl> - readBinary = function readBinary ( filename ) { <nl> - var ret = read_ ( filename , true ) ; <nl> - if ( ! ret . buffer ) { <nl> - ret = new Uint8Array ( ret ) ; <nl> - } <nl> - assert ( ret . buffer ) ; <nl> - return ret ; <nl> - } ; <nl> - <nl> - if ( process [ ' argv ' ] . length > 1 ) { <nl> - thisProgram = process [ ' argv ' ] [ 1 ] . replace ( / \ \ / g , ' / ' ) ; <nl> - } <nl> - <nl> - arguments_ = process [ ' argv ' ] . slice ( 2 ) ; <nl> - <nl> - if ( typeof module ! = = ' undefined ' ) { <nl> - module [ ' exports ' ] = Module ; <nl> - } <nl> - <nl> - process [ ' on ' ] ( ' uncaughtException ' , function ( ex ) { <nl> - / / suppress ExitStatus exceptions from showing an error <nl> - if ( ! ( ex instanceof ExitStatus ) ) { <nl> - throw ex ; <nl> - } <nl> - } ) ; <nl> - <nl> - process [ ' on ' ] ( ' unhandledRejection ' , abort ) ; <nl> - <nl> - quit_ = function ( status ) { <nl> - process [ ' exit ' ] ( status ) ; <nl> - } ; <nl> - <nl> - Module [ ' inspect ' ] = function ( ) { return ' [ Emscripten Module object ] ' ; } ; <nl> - } else <nl> - if ( ENVIRONMENT_IS_SHELL ) { <nl> - <nl> - <nl> - if ( typeof read ! = ' undefined ' ) { <nl> - read_ = function shell_read ( f ) { <nl> - return read ( f ) ; <nl> - } ; <nl> - } <nl> - <nl> - readBinary = function readBinary ( f ) { <nl> - var data ; <nl> - if ( typeof readbuffer = = = ' function ' ) { <nl> - return new Uint8Array ( readbuffer ( f ) ) ; <nl> - } <nl> - data = read ( f , ' binary ' ) ; <nl> - assert ( typeof data = = = ' object ' ) ; <nl> - return data ; <nl> - } ; <nl> - <nl> - if ( typeof scriptArgs ! = ' undefined ' ) { <nl> - arguments_ = scriptArgs ; <nl> - } else if ( typeof arguments ! = ' undefined ' ) { <nl> - arguments_ = arguments ; <nl> - } <nl> - <nl> - if ( typeof quit = = = ' function ' ) { <nl> - quit_ = function ( status ) { <nl> - quit ( status ) ; <nl> - } ; <nl> - } <nl> - <nl> - if ( typeof print ! = = ' undefined ' ) { <nl> - / / Prefer to use print / printErr where they exist , as they usually work better . <nl> - if ( typeof console = = = ' undefined ' ) console = { } ; <nl> - console . log = print ; <nl> - console . warn = console . error = typeof printErr ! = = ' undefined ' ? printErr : print ; <nl> - } <nl> - } else <nl> - if ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) { <nl> - if ( ENVIRONMENT_IS_WORKER ) { / / Check worker , not web , since window could be polyfilled <nl> - scriptDirectory = self . location . href ; <nl> - } else if ( document . currentScript ) { / / web <nl> - scriptDirectory = document . currentScript . src ; <nl> - } <nl> - / / blob urls look like blob : http : / / site . com / etc / etc and we cannot infer anything from them . <nl> - / / otherwise , slice off the final part of the url to find the script directory . <nl> - / / if scriptDirectory does not contain a slash , lastIndexOf will return - 1 , <nl> - / / and scriptDirectory will correctly be replaced with an empty string . <nl> - if ( scriptDirectory . indexOf ( ' blob : ' ) ! = = 0 ) { <nl> - scriptDirectory = scriptDirectory . substr ( 0 , scriptDirectory . lastIndexOf ( ' / ' ) + 1 ) ; <nl> - } else { <nl> - scriptDirectory = ' ' ; <nl> - } <nl> - <nl> - <nl> - read_ = function shell_read ( url ) { <nl> - var xhr = new XMLHttpRequest ( ) ; <nl> - xhr . open ( ' GET ' , url , false ) ; <nl> - xhr . send ( null ) ; <nl> - return xhr . responseText ; <nl> - } ; <nl> - <nl> - if ( ENVIRONMENT_IS_WORKER ) { <nl> - readBinary = function readBinary ( url ) { <nl> - var xhr = new XMLHttpRequest ( ) ; <nl> - xhr . open ( ' GET ' , url , false ) ; <nl> - xhr . responseType = ' arraybuffer ' ; <nl> - xhr . send ( null ) ; <nl> - return new Uint8Array ( xhr . response ) ; <nl> - } ; <nl> - } <nl> - <nl> - readAsync = function readAsync ( url , onload , onerror ) { <nl> - var xhr = new XMLHttpRequest ( ) ; <nl> - xhr . open ( ' GET ' , url , true ) ; <nl> - xhr . responseType = ' arraybuffer ' ; <nl> - xhr . onload = function xhr_onload ( ) { <nl> - if ( xhr . status = = 200 | | ( xhr . status = = 0 & & xhr . response ) ) { / / file URLs can return 0 <nl> - onload ( xhr . response ) ; <nl> - return ; <nl> - } <nl> - onerror ( ) ; <nl> - } ; <nl> - xhr . onerror = onerror ; <nl> - xhr . send ( null ) ; <nl> - } ; <nl> - <nl> - setWindowTitle = function ( title ) { document . title = title } ; <nl> - } else <nl> - { <nl> - throw new Error ( ' environment detection error ' ) ; <nl> - } <nl> - <nl> - / / Set up the out ( ) and err ( ) hooks , which are how we can print to stdout or <nl> - / / stderr , respectively . <nl> - var out = Module [ ' print ' ] | | console . log . bind ( console ) ; <nl> - var err = Module [ ' printErr ' ] | | console . warn . bind ( console ) ; <nl> - <nl> - / / Merge back in the overrides <nl> - for ( key in moduleOverrides ) { <nl> - if ( moduleOverrides . hasOwnProperty ( key ) ) { <nl> - Module [ key ] = moduleOverrides [ key ] ; <nl> - } <nl> - } <nl> - / / Free the object hierarchy contained in the overrides , this lets the GC <nl> - / / reclaim data used e . g . in memoryInitializerRequest , which is a large typed array . <nl> - moduleOverrides = null ; <nl> - <nl> - / / Emit code to handle expected values on the Module object . This applies Module . x <nl> - / / to the proper local x . This has two benefits : first , we only emit it if it is <nl> - / / expected to arrive , and second , by using a local everywhere else that can be <nl> - / / minified . <nl> - if ( Module [ ' arguments ' ] ) arguments_ = Module [ ' arguments ' ] ; if ( ! Object . getOwnPropertyDescriptor ( Module , ' arguments ' ) ) Object . defineProperty ( Module , ' arguments ' , { get : function ( ) { abort ( ' Module . arguments has been replaced with plain arguments_ ' ) } } ) ; <nl> - if ( Module [ ' thisProgram ' ] ) thisProgram = Module [ ' thisProgram ' ] ; if ( ! Object . getOwnPropertyDescriptor ( Module , ' thisProgram ' ) ) Object . defineProperty ( Module , ' thisProgram ' , { get : function ( ) { abort ( ' Module . thisProgram has been replaced with plain thisProgram ' ) } } ) ; <nl> - if ( Module [ ' quit ' ] ) quit_ = Module [ ' quit ' ] ; if ( ! Object . getOwnPropertyDescriptor ( Module , ' quit ' ) ) Object . defineProperty ( Module , ' quit ' , { get : function ( ) { abort ( ' Module . quit has been replaced with plain quit_ ' ) } } ) ; <nl> - <nl> - / / perform assertions in shell . js after we set up out ( ) and err ( ) , as otherwise if an assertion fails it cannot print the message <nl> - / / Assertions on removed incoming Module JS APIs . <nl> - assert ( typeof Module [ ' memoryInitializerPrefixURL ' ] = = = ' undefined ' , ' Module . memoryInitializerPrefixURL option was removed , use Module . locateFile instead ' ) ; <nl> - assert ( typeof Module [ ' pthreadMainPrefixURL ' ] = = = ' undefined ' , ' Module . pthreadMainPrefixURL option was removed , use Module . locateFile instead ' ) ; <nl> - assert ( typeof Module [ ' cdInitializerPrefixURL ' ] = = = ' undefined ' , ' Module . cdInitializerPrefixURL option was removed , use Module . locateFile instead ' ) ; <nl> - assert ( typeof Module [ ' filePackagePrefixURL ' ] = = = ' undefined ' , ' Module . filePackagePrefixURL option was removed , use Module . locateFile instead ' ) ; <nl> - assert ( typeof Module [ ' read ' ] = = = ' undefined ' , ' Module . read option was removed ( modify read_ in JS ) ' ) ; <nl> - assert ( typeof Module [ ' readAsync ' ] = = = ' undefined ' , ' Module . readAsync option was removed ( modify readAsync in JS ) ' ) ; <nl> - assert ( typeof Module [ ' readBinary ' ] = = = ' undefined ' , ' Module . readBinary option was removed ( modify readBinary in JS ) ' ) ; <nl> - assert ( typeof Module [ ' setWindowTitle ' ] = = = ' undefined ' , ' Module . setWindowTitle option was removed ( modify setWindowTitle in JS ) ' ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , ' read ' ) ) Object . defineProperty ( Module , ' read ' , { get : function ( ) { abort ( ' Module . read has been replaced with plain read_ ' ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , ' readAsync ' ) ) Object . defineProperty ( Module , ' readAsync ' , { get : function ( ) { abort ( ' Module . readAsync has been replaced with plain readAsync ' ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , ' readBinary ' ) ) Object . defineProperty ( Module , ' readBinary ' , { get : function ( ) { abort ( ' Module . readBinary has been replaced with plain readBinary ' ) } } ) ; <nl> - / / TODO : add when SDL2 is fixed if ( ! Object . getOwnPropertyDescriptor ( Module , ' setWindowTitle ' ) ) Object . defineProperty ( Module , ' setWindowTitle ' , { get : function ( ) { abort ( ' Module . setWindowTitle has been replaced with plain setWindowTitle ' ) } } ) ; <nl> - <nl> - <nl> - / / TODO remove when SDL2 is fixed ( also see above ) <nl> - <nl> - <nl> - <nl> - / / Copyright 2017 The Emscripten Authors . All rights reserved . <nl> - / / Emscripten is available under two separate licenses , the MIT license and the <nl> - / / University of Illinois / NCSA Open Source License . Both these licenses can be <nl> - / / found in the LICENSE file . <nl> - <nl> - / / { { PREAMBLE_ADDITIONS } } <nl> - <nl> - var STACK_ALIGN = 16 ; <nl> - <nl> - / / stack management , and other functionality that is provided by the compiled code , <nl> - / / should not be used before it is ready <nl> - stackSave = stackRestore = stackAlloc = function ( ) { <nl> - abort ( ' cannot use the stack before compiled code is ready to run , and has provided stack access ' ) ; <nl> - } ; <nl> - <nl> - function staticAlloc ( size ) { <nl> - abort ( ' staticAlloc is no longer available at runtime ; instead , perform static allocations at compile time ( using makeStaticAlloc ) ' ) ; <nl> - } <nl> - <nl> - function dynamicAlloc ( size ) { <nl> - assert ( DYNAMICTOP_PTR ) ; <nl> - var ret = HEAP32 [ DYNAMICTOP_PTR > > 2 ] ; <nl> - var end = ( ret + size + 15 ) & - 16 ; <nl> - if ( end > _emscripten_get_heap_size ( ) ) { <nl> - abort ( ' failure to dynamicAlloc - memory growth etc . is not supported there , call malloc / sbrk directly ' ) ; <nl> - } <nl> - HEAP32 [ DYNAMICTOP_PTR > > 2 ] = end ; <nl> - return ret ; <nl> - } <nl> - <nl> - function alignMemory ( size , factor ) { <nl> - if ( ! factor ) factor = STACK_ALIGN ; / / stack alignment ( 16 - byte ) by default <nl> - return Math . ceil ( size / factor ) * factor ; <nl> - } <nl> - <nl> - function getNativeTypeSize ( type ) { <nl> - switch ( type ) { <nl> - case ' i1 ' : case ' i8 ' : return 1 ; <nl> - case ' i16 ' : return 2 ; <nl> - case ' i32 ' : return 4 ; <nl> - case ' i64 ' : return 8 ; <nl> - case ' float ' : return 4 ; <nl> - case ' double ' : return 8 ; <nl> - default : { <nl> - if ( type [ type . length - 1 ] = = = ' * ' ) { <nl> - return 4 ; / / A pointer <nl> - } else if ( type [ 0 ] = = = ' i ' ) { <nl> - var bits = parseInt ( type . substr ( 1 ) ) ; <nl> - assert ( bits % 8 = = = 0 , ' getNativeTypeSize invalid bits ' + bits + ' , type ' + type ) ; <nl> - return bits / 8 ; <nl> - } else { <nl> - return 0 ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> - function warnOnce ( text ) { <nl> - if ( ! warnOnce . shown ) warnOnce . shown = { } ; <nl> - if ( ! warnOnce . shown [ text ] ) { <nl> - warnOnce . shown [ text ] = 1 ; <nl> - err ( text ) ; <nl> - } <nl> - } <nl> - <nl> - var asm2wasmImports = { / / special asm2wasm imports <nl> - " f64 - rem " : function ( x , y ) { <nl> - return x % y ; <nl> - } , <nl> - " debugger " : function ( ) { <nl> - debugger ; <nl> - } <nl> - } ; <nl> - <nl> - <nl> - <nl> - <nl> - / / Wraps a JS function as a wasm function with a given signature . <nl> - / / In the future , we may get a WebAssembly . Function constructor . Until then , <nl> - / / we create a wasm module that takes the JS function as an import with a given <nl> - / / signature , and re - exports that as a wasm function . <nl> - function convertJsFunctionToWasm ( func , sig ) { <nl> - <nl> - / / The module is static , with the exception of the type section , which is <nl> - / / generated based on the signature passed in . <nl> - var typeSection = [ <nl> - 0x01 , / / id : section , <nl> - 0x00 , / / length : 0 ( placeholder ) <nl> - 0x01 , / / count : 1 <nl> - 0x60 , / / form : func <nl> - ] ; <nl> - var sigRet = sig . slice ( 0 , 1 ) ; <nl> - var sigParam = sig . slice ( 1 ) ; <nl> - var typeCodes = { <nl> - ' i ' : 0x7f , / / i32 <nl> - ' j ' : 0x7e , / / i64 <nl> - ' f ' : 0x7d , / / f32 <nl> - ' d ' : 0x7c , / / f64 <nl> - } ; <nl> - <nl> - / / Parameters , length + signatures <nl> - typeSection . push ( sigParam . length ) ; <nl> - for ( var i = 0 ; i < sigParam . length ; + + i ) { <nl> - typeSection . push ( typeCodes [ sigParam [ i ] ] ) ; <nl> - } <nl> - <nl> - / / Return values , length + signatures <nl> - / / With no multi - return in MVP , either 0 ( void ) or 1 ( anything else ) <nl> - if ( sigRet = = ' v ' ) { <nl> - typeSection . push ( 0x00 ) ; <nl> - } else { <nl> - typeSection = typeSection . concat ( [ 0x01 , typeCodes [ sigRet ] ] ) ; <nl> - } <nl> - <nl> - / / Write the overall length of the type section back into the section header <nl> - / / ( excepting the 2 bytes for the section id and length ) <nl> - typeSection [ 1 ] = typeSection . length - 2 ; <nl> - <nl> - / / Rest of the module is static <nl> - var bytes = new Uint8Array ( [ <nl> - 0x00 , 0x61 , 0x73 , 0x6d , / / magic ( " \ 0asm " ) <nl> - 0x01 , 0x00 , 0x00 , 0x00 , / / version : 1 <nl> - ] . concat ( typeSection , [ <nl> - 0x02 , 0x07 , / / import section <nl> - / / ( import " e " " f " ( func 0 ( type 0 ) ) ) <nl> - 0x01 , 0x01 , 0x65 , 0x01 , 0x66 , 0x00 , 0x00 , <nl> - 0x07 , 0x05 , / / export section <nl> - / / ( export " f " ( func 0 ( type 0 ) ) ) <nl> - 0x01 , 0x01 , 0x66 , 0x00 , 0x00 , <nl> - ] ) ) ; <nl> - <nl> - / / We can compile this wasm module synchronously because it is very small . <nl> - / / This accepts an import ( at " e . f " ) , that it reroutes to an export ( at " f " ) <nl> - var module = new WebAssembly . Module ( bytes ) ; <nl> - var instance = new WebAssembly . Instance ( module , { <nl> - e : { <nl> - f : func <nl> - } <nl> - } ) ; <nl> - var wrappedFunc = instance . exports . f ; <nl> - return wrappedFunc ; <nl> - } <nl> - <nl> - / / Add a wasm function to the table . <nl> - function addFunctionWasm ( func , sig ) { <nl> - var table = wasmTable ; <nl> - var ret = table . length ; <nl> - <nl> - / / Grow the table <nl> - try { <nl> - table . grow ( 1 ) ; <nl> - } catch ( err ) { <nl> - if ( ! err instanceof RangeError ) { <nl> - throw err ; <nl> - } <nl> - throw ' Unable to grow wasm table . Use a higher value for RESERVED_FUNCTION_POINTERS or set ALLOW_TABLE_GROWTH . ' ; <nl> - } <nl> - <nl> - / / Insert new element <nl> - try { <nl> - / / Attempting to call this with JS function will cause of table . set ( ) to fail <nl> - table . set ( ret , func ) ; <nl> - } catch ( err ) { <nl> - if ( ! err instanceof TypeError ) { <nl> - throw err ; <nl> - } <nl> - assert ( typeof sig ! = = ' undefined ' , ' Missing signature argument to addFunction ' ) ; <nl> - var wrapped = convertJsFunctionToWasm ( func , sig ) ; <nl> - table . set ( ret , wrapped ) ; <nl> - } <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - function removeFunctionWasm ( index ) { <nl> - / / TODO ( sbc ) : Look into implementing this to allow re - using of table slots <nl> - } <nl> - <nl> - / / ' sig ' parameter is required for the llvm backend but only when func is not <nl> - / / already a WebAssembly function . <nl> - function addFunction ( func , sig ) { <nl> - <nl> - return addFunctionWasm ( func , sig ) ; <nl> - } <nl> - <nl> - function removeFunction ( index ) { <nl> - removeFunctionWasm ( index ) ; <nl> - } <nl> - <nl> - var funcWrappers = { } ; <nl> - <nl> - function getFuncWrapper ( func , sig ) { <nl> - if ( ! func ) return ; / / on null pointer , return undefined <nl> - assert ( sig ) ; <nl> - if ( ! funcWrappers [ sig ] ) { <nl> - funcWrappers [ sig ] = { } ; <nl> - } <nl> - var sigCache = funcWrappers [ sig ] ; <nl> - if ( ! sigCache [ func ] ) { <nl> - / / optimize away arguments usage in common cases <nl> - if ( sig . length = = = 1 ) { <nl> - sigCache [ func ] = function dynCall_wrapper ( ) { <nl> - return dynCall ( sig , func ) ; <nl> - } ; <nl> - } else if ( sig . length = = = 2 ) { <nl> - sigCache [ func ] = function dynCall_wrapper ( arg ) { <nl> - return dynCall ( sig , func , [ arg ] ) ; <nl> - } ; <nl> - } else { <nl> - / / general case <nl> - sigCache [ func ] = function dynCall_wrapper ( ) { <nl> - return dynCall ( sig , func , Array . prototype . slice . call ( arguments ) ) ; <nl> - } ; <nl> - } <nl> - } <nl> - return sigCache [ func ] ; <nl> - } <nl> - <nl> - <nl> - function makeBigInt ( low , high , unsigned ) { <nl> - return unsigned ? ( ( + ( ( low > > > 0 ) ) ) + ( ( + ( ( high > > > 0 ) ) ) * 4294967296 . 0 ) ) : ( ( + ( ( low > > > 0 ) ) ) + ( ( + ( ( high | 0 ) ) ) * 4294967296 . 0 ) ) ; <nl> - } <nl> - <nl> - function dynCall ( sig , ptr , args ) { <nl> - if ( args & & args . length ) { <nl> - assert ( args . length = = sig . length - 1 ) ; <nl> - assert ( ( ' dynCall_ ' + sig ) in Module , ' bad function pointer type - no table for sig \ ' ' + sig + ' \ ' ' ) ; <nl> - return Module [ ' dynCall_ ' + sig ] . apply ( null , [ ptr ] . concat ( args ) ) ; <nl> - } else { <nl> - assert ( sig . length = = 1 ) ; <nl> - assert ( ( ' dynCall_ ' + sig ) in Module , ' bad function pointer type - no table for sig \ ' ' + sig + ' \ ' ' ) ; <nl> - return Module [ ' dynCall_ ' + sig ] . call ( null , ptr ) ; <nl> - } <nl> - } <nl> - <nl> - var tempRet0 = 0 ; <nl> - <nl> - var setTempRet0 = function ( value ) { <nl> - tempRet0 = value ; <nl> - } ; <nl> - <nl> - var getTempRet0 = function ( ) { <nl> - return tempRet0 ; <nl> - } ; <nl> - <nl> - function getCompilerSetting ( name ) { <nl> - throw ' You must build with - s RETAIN_COMPILER_SETTINGS = 1 for getCompilerSetting or emscripten_get_compiler_setting to work ' ; <nl> - } <nl> - <nl> - var Runtime = { <nl> - / / helpful errors <nl> - getTempRet0 : function ( ) { abort ( ' getTempRet0 ( ) is now a top - level function , after removing the Runtime object . Remove " Runtime . " ' ) } , <nl> - staticAlloc : function ( ) { abort ( ' staticAlloc ( ) is now a top - level function , after removing the Runtime object . Remove " Runtime . " ' ) } , <nl> - stackAlloc : function ( ) { abort ( ' stackAlloc ( ) is now a top - level function , after removing the Runtime object . Remove " Runtime . " ' ) } , <nl> - } ; <nl> - <nl> - / / The address globals begin at . Very low in memory , for code size and optimization opportunities . <nl> - / / Above 0 is static memory , starting with globals . <nl> - / / Then the stack . <nl> - / / Then ' dynamic ' memory for sbrk . <nl> - var GLOBAL_BASE = 1024 ; <nl> - <nl> - <nl> - <nl> - <nl> - / / = = = Preamble library stuff = = = <nl> - <nl> - / / Documentation for the public APIs defined in this file must be updated in : <nl> - / / site / source / docs / api_reference / preamble . js . rst <nl> - / / A prebuilt local version of the documentation is available at : <nl> - / / site / build / text / docs / api_reference / preamble . js . txt <nl> - / / You can also build docs locally as HTML or other formats in site / <nl> - / / An online HTML version ( which may be of a different version of Emscripten ) <nl> - / / is up at http : / / kripken . github . io / emscripten - site / docs / api_reference / preamble . js . html <nl> - <nl> - <nl> - var wasmBinary ; if ( Module [ ' wasmBinary ' ] ) wasmBinary = Module [ ' wasmBinary ' ] ; if ( ! Object . getOwnPropertyDescriptor ( Module , ' wasmBinary ' ) ) Object . defineProperty ( Module , ' wasmBinary ' , { get : function ( ) { abort ( ' Module . wasmBinary has been replaced with plain wasmBinary ' ) } } ) ; <nl> - <nl> - <nl> - if ( typeof WebAssembly ! = = ' object ' ) { <nl> - abort ( ' No WebAssembly support found . Build with - s WASM = 0 to target JavaScript instead . ' ) ; <nl> - } <nl> - <nl> - <nl> - / / In MINIMAL_RUNTIME , setValue ( ) and getValue ( ) are only available when building with safe heap enabled , for heap safety checking . <nl> - / / In traditional runtime , setValue ( ) and getValue ( ) are always available ( although their use is highly discouraged due to perf penalties ) <nl> - <nl> - / * * @ type { function ( number , number , string , boolean = ) } * / <nl> - function setValue ( ptr , value , type , noSafe ) { <nl> - type = type | | ' i8 ' ; <nl> - if ( type . charAt ( type . length - 1 ) = = = ' * ' ) type = ' i32 ' ; / / pointers are 32 - bit <nl> - switch ( type ) { <nl> - case ' i1 ' : HEAP8 [ ( ( ptr ) > > 0 ) ] = value ; break ; <nl> - case ' i8 ' : HEAP8 [ ( ( ptr ) > > 0 ) ] = value ; break ; <nl> - case ' i16 ' : HEAP16 [ ( ( ptr ) > > 1 ) ] = value ; break ; <nl> - case ' i32 ' : HEAP32 [ ( ( ptr ) > > 2 ) ] = value ; break ; <nl> - case ' i64 ' : ( tempI64 = [ value > > > 0 , ( tempDouble = value , ( + ( Math_abs ( tempDouble ) ) ) > = 1 . 0 ? ( tempDouble > 0 . 0 ? ( ( Math_min ( ( + ( Math_floor ( ( tempDouble ) / 4294967296 . 0 ) ) ) , 4294967295 . 0 ) ) | 0 ) > > > 0 : ( ~ ~ ( ( + ( Math_ceil ( ( tempDouble - + ( ( ( ~ ~ ( tempDouble ) ) ) > > > 0 ) ) / 4294967296 . 0 ) ) ) ) ) > > > 0 ) : 0 ) ] , HEAP32 [ ( ( ptr ) > > 2 ) ] = tempI64 [ 0 ] , HEAP32 [ ( ( ( ptr ) + ( 4 ) ) > > 2 ) ] = tempI64 [ 1 ] ) ; break ; <nl> - case ' float ' : HEAPF32 [ ( ( ptr ) > > 2 ) ] = value ; break ; <nl> - case ' double ' : HEAPF64 [ ( ( ptr ) > > 3 ) ] = value ; break ; <nl> - default : abort ( ' invalid type for setValue : ' + type ) ; <nl> - } <nl> - } <nl> - <nl> - / * * @ type { function ( number , string , boolean = ) } * / <nl> - function getValue ( ptr , type , noSafe ) { <nl> - type = type | | ' i8 ' ; <nl> - if ( type . charAt ( type . length - 1 ) = = = ' * ' ) type = ' i32 ' ; / / pointers are 32 - bit <nl> - switch ( type ) { <nl> - case ' i1 ' : return HEAP8 [ ( ( ptr ) > > 0 ) ] ; <nl> - case ' i8 ' : return HEAP8 [ ( ( ptr ) > > 0 ) ] ; <nl> - case ' i16 ' : return HEAP16 [ ( ( ptr ) > > 1 ) ] ; <nl> - case ' i32 ' : return HEAP32 [ ( ( ptr ) > > 2 ) ] ; <nl> - case ' i64 ' : return HEAP32 [ ( ( ptr ) > > 2 ) ] ; <nl> - case ' float ' : return HEAPF32 [ ( ( ptr ) > > 2 ) ] ; <nl> - case ' double ' : return HEAPF64 [ ( ( ptr ) > > 3 ) ] ; <nl> - default : abort ( ' invalid type for getValue : ' + type ) ; <nl> - } <nl> - return null ; <nl> - } <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - / / Wasm globals <nl> - <nl> - var wasmMemory ; <nl> - <nl> - / / Potentially used for direct table calls . <nl> - var wasmTable ; <nl> - <nl> - <nl> - / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - / / Runtime essentials <nl> - / / = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> - <nl> - / / whether we are quitting the application . no code should run after this . <nl> - / / set in exit ( ) and abort ( ) <nl> - var ABORT = false ; <nl> - <nl> - / / set by exit ( ) and abort ( ) . Passed to ' onExit ' handler . <nl> - / / NOTE : This is also used as the process return code code in shell environments <nl> - / / but only when noExitRuntime is false . <nl> - var EXITSTATUS = 0 ; <nl> - <nl> - / * * @ type { function ( * , string = ) } * / <nl> - function assert ( condition , text ) { <nl> - if ( ! condition ) { <nl> - abort ( ' Assertion failed : ' + text ) ; <nl> - } <nl> - } <nl> - <nl> - / / Returns the C function with a specified identifier ( for C + + , you need to do manual name mangling ) <nl> - function getCFunc ( ident ) { <nl> - var func = Module [ ' _ ' + ident ] ; / / closure exported function <nl> - assert ( func , ' Cannot call unknown function ' + ident + ' , make sure it is exported ' ) ; <nl> - return func ; <nl> - } <nl> - <nl> - / / C calling interface . <nl> - function ccall ( ident , returnType , argTypes , args , opts ) { <nl> - / / For fast lookup of conversion functions <nl> - var toC = { <nl> - ' string ' : function ( str ) { <nl> - var ret = 0 ; <nl> - if ( str ! = = null & & str ! = = undefined & & str ! = = 0 ) { / / null string <nl> - / / at most 4 bytes per UTF - 8 code point , + 1 for the trailing ' \ 0 ' <nl> - var len = ( str . length < < 2 ) + 1 ; <nl> - ret = stackAlloc ( len ) ; <nl> - stringToUTF8 ( str , ret , len ) ; <nl> - } <nl> - return ret ; <nl> - } , <nl> - ' array ' : function ( arr ) { <nl> - var ret = stackAlloc ( arr . length ) ; <nl> - writeArrayToMemory ( arr , ret ) ; <nl> - return ret ; <nl> - } <nl> - } ; <nl> - <nl> - function convertReturnValue ( ret ) { <nl> - if ( returnType = = = ' string ' ) return UTF8ToString ( ret ) ; <nl> - if ( returnType = = = ' boolean ' ) return Boolean ( ret ) ; <nl> - return ret ; <nl> - } <nl> - <nl> - var func = getCFunc ( ident ) ; <nl> - var cArgs = [ ] ; <nl> - var stack = 0 ; <nl> - assert ( returnType ! = = ' array ' , ' Return type should not be " array " . ' ) ; <nl> - if ( args ) { <nl> - for ( var i = 0 ; i < args . length ; i + + ) { <nl> - var converter = toC [ argTypes [ i ] ] ; <nl> - if ( converter ) { <nl> - if ( stack = = = 0 ) stack = stackSave ( ) ; <nl> - cArgs [ i ] = converter ( args [ i ] ) ; <nl> - } else { <nl> - cArgs [ i ] = args [ i ] ; <nl> - } <nl> - } <nl> - } <nl> - var ret = func . apply ( null , cArgs ) ; <nl> - assert ( ! ( opts & & opts . async ) , ' async call is only supported with Emterpretify for now , see # 9029 ' ) ; <nl> - <nl> - ret = convertReturnValue ( ret ) ; <nl> - if ( stack ! = = 0 ) stackRestore ( stack ) ; <nl> - return ret ; <nl> - } <nl> - <nl> - function cwrap ( ident , returnType , argTypes , opts ) { <nl> - return function ( ) { <nl> - return ccall ( ident , returnType , argTypes , arguments , opts ) ; <nl> - } <nl> - } <nl> - <nl> - var ALLOC_NORMAL = 0 ; / / Tries to use _malloc ( ) <nl> - var ALLOC_STACK = 1 ; / / Lives for the duration of the current function call <nl> - var ALLOC_DYNAMIC = 2 ; / / Cannot be freed except through sbrk <nl> - var ALLOC_NONE = 3 ; / / Do not allocate <nl> - <nl> - / / allocate ( ) : This is for internal use . You can use it yourself as well , but the interface <nl> - / / is a little tricky ( see docs right below ) . The reason is that it is optimized <nl> - / / for multiple syntaxes to save space in generated code . So you should <nl> - / / normally not use allocate ( ) , and instead allocate memory using _malloc ( ) , <nl> - / / initialize it with setValue ( ) , and so forth . <nl> - / / @ slab : An array of data , or a number . If a number , then the size of the block to allocate , <nl> - / / in * bytes * ( note that this is sometimes confusing : the next parameter does not <nl> - / / affect this ! ) <nl> - / / @ types : Either an array of types , one for each byte ( or 0 if no type at that position ) , <nl> - / / or a single type which is used for the entire block . This only matters if there <nl> - / / is initial data - if @ slab is a number , then this does not matter at all and is <nl> - / / ignored . <nl> - / / @ allocator : How to allocate memory , see ALLOC_ * <nl> - / * * @ type { function ( ( TypedArray | Array < number > | number ) , string , number , number = ) } * / <nl> - function allocate ( slab , types , allocator , ptr ) { <nl> - var zeroinit , size ; <nl> - if ( typeof slab = = = ' number ' ) { <nl> - zeroinit = true ; <nl> - size = slab ; <nl> - } else { <nl> - zeroinit = false ; <nl> - size = slab . length ; <nl> - } <nl> - <nl> - var singleType = typeof types = = = ' string ' ? types : null ; <nl> - <nl> - var ret ; <nl> - if ( allocator = = ALLOC_NONE ) { <nl> - ret = ptr ; <nl> - } else { <nl> - ret = [ _malloc , <nl> - stackAlloc , <nl> - dynamicAlloc ] [ allocator ] ( Math . max ( size , singleType ? 1 : types . length ) ) ; <nl> - } <nl> - <nl> - if ( zeroinit ) { <nl> - var stop ; <nl> - ptr = ret ; <nl> - assert ( ( ret & 3 ) = = 0 ) ; <nl> - stop = ret + ( size & ~ 3 ) ; <nl> - for ( ; ptr < stop ; ptr + = 4 ) { <nl> - HEAP32 [ ( ( ptr ) > > 2 ) ] = 0 ; <nl> - } <nl> - stop = ret + size ; <nl> - while ( ptr < stop ) { <nl> - HEAP8 [ ( ( ptr + + ) > > 0 ) ] = 0 ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - if ( singleType = = = ' i8 ' ) { <nl> - if ( slab . subarray | | slab . slice ) { <nl> - HEAPU8 . set ( / * * @ type { ! Uint8Array } * / ( slab ) , ret ) ; <nl> - } else { <nl> - HEAPU8 . set ( new Uint8Array ( slab ) , ret ) ; <nl> - } <nl> - return ret ; <nl> - } <nl> - <nl> - var i = 0 , type , typeSize , previousType ; <nl> - while ( i < size ) { <nl> - var curr = slab [ i ] ; <nl> - <nl> - type = singleType | | types [ i ] ; <nl> - if ( type = = = 0 ) { <nl> - i + + ; <nl> - continue ; <nl> - } <nl> - assert ( type , ' Must know what type to store in allocate ! ' ) ; <nl> - <nl> - if ( type = = ' i64 ' ) type = ' i32 ' ; / / special case : we have one i32 here , and one i32 later <nl> - <nl> - setValue ( ret + i , curr , type ) ; <nl> - <nl> - / / no need to look up size unless type changes , so cache it <nl> - if ( previousType ! = = type ) { <nl> - typeSize = getNativeTypeSize ( type ) ; <nl> - previousType = type ; <nl> - } <nl> - i + = typeSize ; <nl> - } <nl> - <nl> - return ret ; <nl> - } <nl> - <nl> - / / Allocate memory during any stage of startup - static memory early on , dynamic memory later , malloc when ready <nl> - function getMemory ( size ) { <nl> - if ( ! runtimeInitialized ) return dynamicAlloc ( size ) ; <nl> - return _malloc ( size ) ; <nl> - } <nl> - <nl> - <nl> - <nl> - <nl> - / * * @ type { function ( number , number = ) } * / <nl> - function Pointer_stringify ( ptr , length ) { <nl> - abort ( " this function has been removed - you should use UTF8ToString ( ptr , maxBytesToRead ) instead ! " ) ; <nl> - } <nl> - <nl> - / / Given a pointer ' ptr ' to a null - terminated ASCII - encoded string in the emscripten HEAP , returns <nl> - / / a copy of that string as a Javascript String object . <nl> - <nl> - function AsciiToString ( ptr ) { <nl> - var str = ' ' ; <nl> - while ( 1 ) { <nl> - var ch = HEAPU8 [ ( ( ptr + + ) > > 0 ) ] ; <nl> - if ( ! ch ) return str ; <nl> - str + = String . fromCharCode ( ch ) ; <nl> - } <nl> - } <nl> - <nl> - / / Copies the given Javascript String object ' str ' to the emscripten HEAP at address ' outPtr ' , <nl> - / / null - terminated and encoded in ASCII form . The copy will require at most str . length + 1 bytes of space in the HEAP . <nl> - <nl> - function stringToAscii ( str , outPtr ) { <nl> - return writeAsciiToMemory ( str , outPtr , false ) ; <nl> - } <nl> - <nl> - <nl> - / / Given a pointer ' ptr ' to a null - terminated UTF8 - encoded string in the given array that contains uint8 values , returns <nl> - / / a copy of that string as a Javascript String object . <nl> - <nl> - var UTF8Decoder = typeof TextDecoder ! = = ' undefined ' ? new TextDecoder ( ' utf8 ' ) : undefined ; <nl> - <nl> - / * * <nl> - * @ param { number } idx <nl> - * @ param { number = } maxBytesToRead <nl> - * @ return { string } <nl> - * / <nl> - function UTF8ArrayToString ( u8Array , idx , maxBytesToRead ) { <nl> - var endIdx = idx + maxBytesToRead ; <nl> - var endPtr = idx ; <nl> - / / TextDecoder needs to know the byte length in advance , it doesn ' t stop on null terminator by itself . <nl> - / / Also , use the length info to avoid running tiny strings through TextDecoder , since . subarray ( ) allocates garbage . <nl> - / / ( As a tiny code save trick , compare endPtr against endIdx using a negation , so that undefined means Infinity ) <nl> - while ( u8Array [ endPtr ] & & ! ( endPtr > = endIdx ) ) + + endPtr ; <nl> - <nl> - if ( endPtr - idx > 16 & & u8Array . subarray & & UTF8Decoder ) { <nl> - return UTF8Decoder . decode ( u8Array . subarray ( idx , endPtr ) ) ; <nl> - } else { <nl> - var str = ' ' ; <nl> - / / If building with TextDecoder , we have already computed the string length above , so test loop end condition against that <nl> - while ( idx < endPtr ) { <nl> - / / For UTF8 byte structure , see : <nl> - / / http : / / en . wikipedia . org / wiki / UTF - 8 # Description <nl> - / / https : / / www . ietf . org / rfc / rfc2279 . txt <nl> - / / https : / / tools . ietf . org / html / rfc3629 <nl> - var u0 = u8Array [ idx + + ] ; <nl> - if ( ! ( u0 & 0x80 ) ) { str + = String . fromCharCode ( u0 ) ; continue ; } <nl> - var u1 = u8Array [ idx + + ] & 63 ; <nl> - if ( ( u0 & 0xE0 ) = = 0xC0 ) { str + = String . fromCharCode ( ( ( u0 & 31 ) < < 6 ) | u1 ) ; continue ; } <nl> - var u2 = u8Array [ idx + + ] & 63 ; <nl> - if ( ( u0 & 0xF0 ) = = 0xE0 ) { <nl> - u0 = ( ( u0 & 15 ) < < 12 ) | ( u1 < < 6 ) | u2 ; <nl> - } else { <nl> - if ( ( u0 & 0xF8 ) ! = 0xF0 ) warnOnce ( ' Invalid UTF - 8 leading byte 0x ' + u0 . toString ( 16 ) + ' encountered when deserializing a UTF - 8 string on the asm . js / wasm heap to a JS string ! ' ) ; <nl> - u0 = ( ( u0 & 7 ) < < 18 ) | ( u1 < < 12 ) | ( u2 < < 6 ) | ( u8Array [ idx + + ] & 63 ) ; <nl> - } <nl> - <nl> - if ( u0 < 0x10000 ) { <nl> - str + = String . fromCharCode ( u0 ) ; <nl> - } else { <nl> - var ch = u0 - 0x10000 ; <nl> - str + = String . fromCharCode ( 0xD800 | ( ch > > 10 ) , 0xDC00 | ( ch & 0x3FF ) ) ; <nl> - } <nl> - } <nl> - } <nl> - return str ; <nl> - } <nl> - <nl> - / / Given a pointer ' ptr ' to a null - terminated UTF8 - encoded string in the emscripten HEAP , returns a <nl> - / / copy of that string as a Javascript String object . <nl> - / / maxBytesToRead : an optional length that specifies the maximum number of bytes to read . You can omit <nl> - / / this parameter to scan the string until the first \ 0 byte . If maxBytesToRead is <nl> - / / passed , and the string at [ ptr , ptr + maxBytesToReadr [ contains a null byte in the <nl> - / / middle , then the string will cut short at that byte index ( i . e . maxBytesToRead will <nl> - / / not produce a string of exact length [ ptr , ptr + maxBytesToRead [ ) <nl> - / / N . B . mixing frequent uses of UTF8ToString ( ) with and without maxBytesToRead may <nl> - / / throw JS JIT optimizations off , so it is worth to consider consistently using one <nl> - / / style or the other . <nl> - / * * <nl> - * @ param { number } ptr <nl> - * @ param { number = } maxBytesToRead <nl> - * @ return { string } <nl> - * / <nl> - function UTF8ToString ( ptr , maxBytesToRead ) { <nl> - return ptr ? UTF8ArrayToString ( HEAPU8 , ptr , maxBytesToRead ) : ' ' ; <nl> - } <nl> - <nl> - / / Copies the given Javascript String object ' str ' to the given byte array at address ' outIdx ' , <nl> - / / encoded in UTF8 form and null - terminated . The copy will require at most str . length * 4 + 1 bytes of space in the HEAP . <nl> - / / Use the function lengthBytesUTF8 to compute the exact number of bytes ( excluding null terminator ) that this function will write . <nl> - / / Parameters : <nl> - / / str : the Javascript string to copy . <nl> - / / outU8Array : the array to copy to . Each index in this array is assumed to be one 8 - byte element . <nl> - / / outIdx : The starting offset in the array to begin the copying . <nl> - / / maxBytesToWrite : The maximum number of bytes this function can write to the array . <nl> - / / This count should include the null terminator , <nl> - / / i . e . if maxBytesToWrite = 1 , only the null terminator will be written and nothing else . <nl> - / / maxBytesToWrite = 0 does not write any bytes to the output , not even the null terminator . <nl> - / / Returns the number of bytes written , EXCLUDING the null terminator . <nl> - <nl> - function stringToUTF8Array ( str , outU8Array , outIdx , maxBytesToWrite ) { <nl> - if ( ! ( maxBytesToWrite > 0 ) ) / / Parameter maxBytesToWrite is not optional . Negative values , 0 , null , undefined and false each don ' t write out any bytes . <nl> - return 0 ; <nl> - <nl> - var startIdx = outIdx ; <nl> - var endIdx = outIdx + maxBytesToWrite - 1 ; / / - 1 for string null terminator . <nl> - for ( var i = 0 ; i < str . length ; + + i ) { <nl> - / / Gotcha : charCodeAt returns a 16 - bit word that is a UTF - 16 encoded code unit , not a Unicode code point of the character ! So decode UTF16 - > UTF32 - > UTF8 . <nl> - / / See http : / / unicode . org / faq / utf_bom . html # utf16 - 3 <nl> - / / For UTF8 byte structure , see http : / / en . wikipedia . org / wiki / UTF - 8 # Description and https : / / www . ietf . org / rfc / rfc2279 . txt and https : / / tools . ietf . org / html / rfc3629 <nl> - var u = str . charCodeAt ( i ) ; / / possibly a lead surrogate <nl> - if ( u > = 0xD800 & & u < = 0xDFFF ) { <nl> - var u1 = str . charCodeAt ( + + i ) ; <nl> - u = 0x10000 + ( ( u & 0x3FF ) < < 10 ) | ( u1 & 0x3FF ) ; <nl> - } <nl> - if ( u < = 0x7F ) { <nl> - if ( outIdx > = endIdx ) break ; <nl> - outU8Array [ outIdx + + ] = u ; <nl> - } else if ( u < = 0x7FF ) { <nl> - if ( outIdx + 1 > = endIdx ) break ; <nl> - outU8Array [ outIdx + + ] = 0xC0 | ( u > > 6 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( u & 63 ) ; <nl> - } else if ( u < = 0xFFFF ) { <nl> - if ( outIdx + 2 > = endIdx ) break ; <nl> - outU8Array [ outIdx + + ] = 0xE0 | ( u > > 12 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( ( u > > 6 ) & 63 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( u & 63 ) ; <nl> - } else { <nl> - if ( outIdx + 3 > = endIdx ) break ; <nl> - if ( u > = 0x200000 ) warnOnce ( ' Invalid Unicode code point 0x ' + u . toString ( 16 ) + ' encountered when serializing a JS string to an UTF - 8 string on the asm . js / wasm heap ! ( Valid unicode code points should be in range 0 - 0x1FFFFF ) . ' ) ; <nl> - outU8Array [ outIdx + + ] = 0xF0 | ( u > > 18 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( ( u > > 12 ) & 63 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( ( u > > 6 ) & 63 ) ; <nl> - outU8Array [ outIdx + + ] = 0x80 | ( u & 63 ) ; <nl> - } <nl> - } <nl> - / / Null - terminate the pointer to the buffer . <nl> - outU8Array [ outIdx ] = 0 ; <nl> - return outIdx - startIdx ; <nl> - } <nl> - <nl> - / / Copies the given Javascript String object ' str ' to the emscripten HEAP at address ' outPtr ' , <nl> - / / null - terminated and encoded in UTF8 form . The copy will require at most str . length * 4 + 1 bytes of space in the HEAP . <nl> - / / Use the function lengthBytesUTF8 to compute the exact number of bytes ( excluding null terminator ) that this function will write . <nl> - / / Returns the number of bytes written , EXCLUDING the null terminator . <nl> - <nl> - function stringToUTF8 ( str , outPtr , maxBytesToWrite ) { <nl> - assert ( typeof maxBytesToWrite = = ' number ' , ' stringToUTF8 ( str , outPtr , maxBytesToWrite ) is missing the third parameter that specifies the length of the output buffer ! ' ) ; <nl> - return stringToUTF8Array ( str , HEAPU8 , outPtr , maxBytesToWrite ) ; <nl> - } <nl> - <nl> - / / Returns the number of bytes the given Javascript string takes if encoded as a UTF8 byte array , EXCLUDING the null terminator byte . <nl> - function lengthBytesUTF8 ( str ) { <nl> - var len = 0 ; <nl> - for ( var i = 0 ; i < str . length ; + + i ) { <nl> - / / Gotcha : charCodeAt returns a 16 - bit word that is a UTF - 16 encoded code unit , not a Unicode code point of the character ! So decode UTF16 - > UTF32 - > UTF8 . <nl> - / / See http : / / unicode . org / faq / utf_bom . html # utf16 - 3 <nl> - var u = str . charCodeAt ( i ) ; / / possibly a lead surrogate <nl> - if ( u > = 0xD800 & & u < = 0xDFFF ) u = 0x10000 + ( ( u & 0x3FF ) < < 10 ) | ( str . charCodeAt ( + + i ) & 0x3FF ) ; <nl> - if ( u < = 0x7F ) + + len ; <nl> - else if ( u < = 0x7FF ) len + = 2 ; <nl> - else if ( u < = 0xFFFF ) len + = 3 ; <nl> - else len + = 4 ; <nl> - } <nl> - return len ; <nl> - } <nl> - <nl> - <nl> - / / Given a pointer ' ptr ' to a null - terminated UTF16LE - encoded string in the emscripten HEAP , returns <nl> - / / a copy of that string as a Javascript String object . <nl> - <nl> - var UTF16Decoder = typeof TextDecoder ! = = ' undefined ' ? new TextDecoder ( ' utf - 16le ' ) : undefined ; <nl> - function UTF16ToString ( ptr ) { <nl> - assert ( ptr % 2 = = 0 , ' Pointer passed to UTF16ToString must be aligned to two bytes ! ' ) ; <nl> - var endPtr = ptr ; <nl> - / / TextDecoder needs to know the byte length in advance , it doesn ' t stop on null terminator by itself . <nl> - / / Also , use the length info to avoid running tiny strings through TextDecoder , since . subarray ( ) allocates garbage . <nl> - var idx = endPtr > > 1 ; <nl> - while ( HEAP16 [ idx ] ) + + idx ; <nl> - endPtr = idx < < 1 ; <nl> - <nl> - if ( endPtr - ptr > 32 & & UTF16Decoder ) { <nl> - return UTF16Decoder . decode ( HEAPU8 . subarray ( ptr , endPtr ) ) ; <nl> - } else { <nl> - var i = 0 ; <nl> - <nl> - var str = ' ' ; <nl> - while ( 1 ) { <nl> - var codeUnit = HEAP16 [ ( ( ( ptr ) + ( i * 2 ) ) > > 1 ) ] ; <nl> - if ( codeUnit = = 0 ) return str ; <nl> - + + i ; <nl> - / / fromCharCode constructs a character from a UTF - 16 code unit , so we can pass the UTF16 string right through . <nl> - str + = String . fromCharCode ( codeUnit ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / / Copies the given Javascript String object ' str ' to the emscripten HEAP at address ' outPtr ' , <nl> - / / null - terminated and encoded in UTF16 form . The copy will require at most str . length * 4 + 2 bytes of space in the HEAP . <nl> - / / Use the function lengthBytesUTF16 ( ) to compute the exact number of bytes ( excluding null terminator ) that this function will write . <nl> - / / Parameters : <nl> - / / str : the Javascript string to copy . <nl> - / / outPtr : Byte address in Emscripten HEAP where to write the string to . <nl> - / / maxBytesToWrite : The maximum number of bytes this function can write to the array . This count should include the null <nl> - / / terminator , i . e . if maxBytesToWrite = 2 , only the null terminator will be written and nothing else . <nl> - / / maxBytesToWrite < 2 does not write any bytes to the output , not even the null terminator . <nl> - / / Returns the number of bytes written , EXCLUDING the null terminator . <nl> - <nl> - function stringToUTF16 ( str , outPtr , maxBytesToWrite ) { <nl> - assert ( outPtr % 2 = = 0 , ' Pointer passed to stringToUTF16 must be aligned to two bytes ! ' ) ; <nl> - assert ( typeof maxBytesToWrite = = ' number ' , ' stringToUTF16 ( str , outPtr , maxBytesToWrite ) is missing the third parameter that specifies the length of the output buffer ! ' ) ; <nl> - / / Backwards compatibility : if max bytes is not specified , assume unsafe unbounded write is allowed . <nl> - if ( maxBytesToWrite = = = undefined ) { <nl> - maxBytesToWrite = 0x7FFFFFFF ; <nl> - } <nl> - if ( maxBytesToWrite < 2 ) return 0 ; <nl> - maxBytesToWrite - = 2 ; / / Null terminator . <nl> - var startPtr = outPtr ; <nl> - var numCharsToWrite = ( maxBytesToWrite < str . length * 2 ) ? ( maxBytesToWrite / 2 ) : str . length ; <nl> - for ( var i = 0 ; i < numCharsToWrite ; + + i ) { <nl> - / / charCodeAt returns a UTF - 16 encoded code unit , so it can be directly written to the HEAP . <nl> - var codeUnit = str . charCodeAt ( i ) ; / / possibly a lead surrogate <nl> - HEAP16 [ ( ( outPtr ) > > 1 ) ] = codeUnit ; <nl> - outPtr + = 2 ; <nl> - } <nl> - / / Null - terminate the pointer to the HEAP . <nl> - HEAP16 [ ( ( outPtr ) > > 1 ) ] = 0 ; <nl> - return outPtr - startPtr ; <nl> - } <nl> - <nl> - / / Returns the number of bytes the given Javascript string takes if encoded as a UTF16 byte array , EXCLUDING the null terminator byte . <nl> - <nl> - function lengthBytesUTF16 ( str ) { <nl> - return str . length * 2 ; <nl> - } <nl> - <nl> - function UTF32ToString ( ptr ) { <nl> - assert ( ptr % 4 = = 0 , ' Pointer passed to UTF32ToString must be aligned to four bytes ! ' ) ; <nl> - var i = 0 ; <nl> - <nl> - var str = ' ' ; <nl> - while ( 1 ) { <nl> - var utf32 = HEAP32 [ ( ( ( ptr ) + ( i * 4 ) ) > > 2 ) ] ; <nl> - if ( utf32 = = 0 ) <nl> - return str ; <nl> - + + i ; <nl> - / / Gotcha : fromCharCode constructs a character from a UTF - 16 encoded code ( pair ) , not from a Unicode code point ! So encode the code point to UTF - 16 for constructing . <nl> - / / See http : / / unicode . org / faq / utf_bom . html # utf16 - 3 <nl> - if ( utf32 > = 0x10000 ) { <nl> - var ch = utf32 - 0x10000 ; <nl> - str + = String . fromCharCode ( 0xD800 | ( ch > > 10 ) , 0xDC00 | ( ch & 0x3FF ) ) ; <nl> - } else { <nl> - str + = String . fromCharCode ( utf32 ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - / / Copies the given Javascript String object ' str ' to the emscripten HEAP at address ' outPtr ' , <nl> - / / null - terminated and encoded in UTF32 form . The copy will require at most str . length * 4 + 4 bytes of space in the HEAP . <nl> - / / Use the function lengthBytesUTF32 ( ) to compute the exact number of bytes ( excluding null terminator ) that this function will write . <nl> - / / Parameters : <nl> - / / str : the Javascript string to copy . <nl> - / / outPtr : Byte address in Emscripten HEAP where to write the string to . <nl> - / / maxBytesToWrite : The maximum number of bytes this function can write to the array . This count should include the null <nl> - / / terminator , i . e . if maxBytesToWrite = 4 , only the null terminator will be written and nothing else . <nl> - / / maxBytesToWrite < 4 does not write any bytes to the output , not even the null terminator . <nl> - / / Returns the number of bytes written , EXCLUDING the null terminator . <nl> - <nl> - function stringToUTF32 ( str , outPtr , maxBytesToWrite ) { <nl> - assert ( outPtr % 4 = = 0 , ' Pointer passed to stringToUTF32 must be aligned to four bytes ! ' ) ; <nl> - assert ( typeof maxBytesToWrite = = ' number ' , ' stringToUTF32 ( str , outPtr , maxBytesToWrite ) is missing the third parameter that specifies the length of the output buffer ! ' ) ; <nl> - / / Backwards compatibility : if max bytes is not specified , assume unsafe unbounded write is allowed . <nl> - if ( maxBytesToWrite = = = undefined ) { <nl> - maxBytesToWrite = 0x7FFFFFFF ; <nl> - } <nl> - if ( maxBytesToWrite < 4 ) return 0 ; <nl> - var startPtr = outPtr ; <nl> - var endPtr = startPtr + maxBytesToWrite - 4 ; <nl> - for ( var i = 0 ; i < str . length ; + + i ) { <nl> - / / Gotcha : charCodeAt returns a 16 - bit word that is a UTF - 16 encoded code unit , not a Unicode code point of the character ! We must decode the string to UTF - 32 to the heap . <nl> - / / See http : / / unicode . org / faq / utf_bom . html # utf16 - 3 <nl> - var codeUnit = str . charCodeAt ( i ) ; / / possibly a lead surrogate <nl> - if ( codeUnit > = 0xD800 & & codeUnit < = 0xDFFF ) { <nl> - var trailSurrogate = str . charCodeAt ( + + i ) ; <nl> - codeUnit = 0x10000 + ( ( codeUnit & 0x3FF ) < < 10 ) | ( trailSurrogate & 0x3FF ) ; <nl> - } <nl> - HEAP32 [ ( ( outPtr ) > > 2 ) ] = codeUnit ; <nl> - outPtr + = 4 ; <nl> - if ( outPtr + 4 > endPtr ) break ; <nl> - } <nl> - / / Null - terminate the pointer to the HEAP . <nl> - HEAP32 [ ( ( outPtr ) > > 2 ) ] = 0 ; <nl> - return outPtr - startPtr ; <nl> - } <nl> - <nl> - / / Returns the number of bytes the given Javascript string takes if encoded as a UTF16 byte array , EXCLUDING the null terminator byte . <nl> - <nl> - function lengthBytesUTF32 ( str ) { <nl> - var len = 0 ; <nl> - for ( var i = 0 ; i < str . length ; + + i ) { <nl> - / / Gotcha : charCodeAt returns a 16 - bit word that is a UTF - 16 encoded code unit , not a Unicode code point of the character ! We must decode the string to UTF - 32 to the heap . <nl> - / / See http : / / unicode . org / faq / utf_bom . html # utf16 - 3 <nl> - var codeUnit = str . charCodeAt ( i ) ; <nl> - if ( codeUnit > = 0xD800 & & codeUnit < = 0xDFFF ) + + i ; / / possibly a lead surrogate , so skip over the tail surrogate . <nl> - len + = 4 ; <nl> - } <nl> - <nl> - return len ; <nl> - } <nl> - <nl> - / / Allocate heap space for a JS string , and write it there . <nl> - / / It is the responsibility of the caller to free ( ) that memory . <nl> - function allocateUTF8 ( str ) { <nl> - var size = lengthBytesUTF8 ( str ) + 1 ; <nl> - var ret = _malloc ( size ) ; <nl> - if ( ret ) stringToUTF8Array ( str , HEAP8 , ret , size ) ; <nl> - return ret ; <nl> - } <nl> - <nl> - / / Allocate stack space for a JS string , and write it there . <nl> - function allocateUTF8OnStack ( str ) { <nl> - var size = lengthBytesUTF8 ( str ) + 1 ; <nl> - var ret = stackAlloc ( size ) ; <nl> - stringToUTF8Array ( str , HEAP8 , ret , size ) ; <nl> - return ret ; <nl> - } <nl> - <nl> - / / Deprecated : This function should not be called because it is unsafe and does not provide <nl> - / / a maximum length limit of how many bytes it is allowed to write . Prefer calling the <nl> - / / function stringToUTF8Array ( ) instead , which takes in a maximum length that can be used <nl> - / / to be secure from out of bounds writes . <nl> - / * * @ deprecated * / <nl> - function writeStringToMemory ( string , buffer , dontAddNull ) { <nl> - warnOnce ( ' writeStringToMemory is deprecated and should not be called ! Use stringToUTF8 ( ) instead ! ' ) ; <nl> - <nl> - var / * * @ type { number } * / lastChar , / * * @ type { number } * / end ; <nl> - if ( dontAddNull ) { <nl> - / / stringToUTF8Array always appends null . If we don ' t want to do that , remember the <nl> - / / character that existed at the location where the null will be placed , and restore <nl> - / / that after the write ( below ) . <nl> - end = buffer + lengthBytesUTF8 ( string ) ; <nl> - lastChar = HEAP8 [ end ] ; <nl> - } <nl> - stringToUTF8 ( string , buffer , Infinity ) ; <nl> - if ( dontAddNull ) HEAP8 [ end ] = lastChar ; / / Restore the value under the null character . <nl> - } <nl> - <nl> - function writeArrayToMemory ( array , buffer ) { <nl> - assert ( array . length > = 0 , ' writeArrayToMemory array must have a length ( should be an array or typed array ) ' ) <nl> - HEAP8 . set ( array , buffer ) ; <nl> - } <nl> - <nl> - function writeAsciiToMemory ( str , buffer , dontAddNull ) { <nl> - for ( var i = 0 ; i < str . length ; + + i ) { <nl> - assert ( str . charCodeAt ( i ) = = = str . charCodeAt ( i ) & 0xff ) ; <nl> - HEAP8 [ ( ( buffer + + ) > > 0 ) ] = str . charCodeAt ( i ) ; <nl> - } <nl> - / / Null - terminate the pointer to the HEAP . <nl> - if ( ! dontAddNull ) HEAP8 [ ( ( buffer ) > > 0 ) ] = 0 ; <nl> - } <nl> - <nl> - <nl> - <nl> - <nl> - / / Memory management <nl> - <nl> - var PAGE_SIZE = 16384 ; <nl> - var WASM_PAGE_SIZE = 65536 ; <nl> - var ASMJS_PAGE_SIZE = 16777216 ; <nl> - <nl> - function alignUp ( x , multiple ) { <nl> - if ( x % multiple > 0 ) { <nl> - x + = multiple - ( x % multiple ) ; <nl> - } <nl> - return x ; <nl> - } <nl> - <nl> - var HEAP , <nl> - / * * @ type { ArrayBuffer } * / <nl> - buffer , <nl> - / * * @ type { Int8Array } * / <nl> - HEAP8 , <nl> - / * * @ type { Uint8Array } * / <nl> - HEAPU8 , <nl> - / * * @ type { Int16Array } * / <nl> - HEAP16 , <nl> - / * * @ type { Uint16Array } * / <nl> - HEAPU16 , <nl> - / * * @ type { Int32Array } * / <nl> - HEAP32 , <nl> - / * * @ type { Uint32Array } * / <nl> - HEAPU32 , <nl> - / * * @ type { Float32Array } * / <nl> - HEAPF32 , <nl> - / * * @ type { Float64Array } * / <nl> - HEAPF64 ; <nl> - <nl> - function updateGlobalBufferViews ( ) { <nl> - Module [ ' HEAP8 ' ] = HEAP8 = new Int8Array ( buffer ) ; <nl> - Module [ ' HEAP16 ' ] = HEAP16 = new Int16Array ( buffer ) ; <nl> - Module [ ' HEAP32 ' ] = HEAP32 = new Int32Array ( buffer ) ; <nl> - Module [ ' HEAPU8 ' ] = HEAPU8 = new Uint8Array ( buffer ) ; <nl> - Module [ ' HEAPU16 ' ] = HEAPU16 = new Uint16Array ( buffer ) ; <nl> - Module [ ' HEAPU32 ' ] = HEAPU32 = new Uint32Array ( buffer ) ; <nl> - Module [ ' HEAPF32 ' ] = HEAPF32 = new Float32Array ( buffer ) ; <nl> - Module [ ' HEAPF64 ' ] = HEAPF64 = new Float64Array ( buffer ) ; <nl> - } <nl> - <nl> - <nl> - var STATIC_BASE = 1024 , <nl> - STACK_BASE = 5246672 , <nl> - STACKTOP = STACK_BASE , <nl> - STACK_MAX = 3792 , <nl> - DYNAMIC_BASE = 5246672 , <nl> - DYNAMICTOP_PTR = 3776 ; <nl> - <nl> - assert ( STACK_BASE % 16 = = = 0 , ' stack must start aligned ' ) ; <nl> - assert ( DYNAMIC_BASE % 16 = = = 0 , ' heap must start aligned ' ) ; <nl> - <nl> - <nl> - <nl> - var TOTAL_STACK = 5242880 ; <nl> - if ( Module [ ' TOTAL_STACK ' ] ) assert ( TOTAL_STACK = = = Module [ ' TOTAL_STACK ' ] , ' the stack size can no longer be determined at runtime ' ) <nl> - <nl> - var INITIAL_TOTAL_MEMORY = Module [ ' TOTAL_MEMORY ' ] | | 16777216 ; if ( ! Object . getOwnPropertyDescriptor ( Module , ' TOTAL_MEMORY ' ) ) Object . defineProperty ( Module , ' TOTAL_MEMORY ' , { get : function ( ) { abort ( ' Module . TOTAL_MEMORY has been replaced with plain INITIAL_TOTAL_MEMORY ' ) } } ) ; <nl> - <nl> - assert ( INITIAL_TOTAL_MEMORY > = TOTAL_STACK , ' TOTAL_MEMORY should be larger than TOTAL_STACK , was ' + INITIAL_TOTAL_MEMORY + ' ! ( TOTAL_STACK = ' + TOTAL_STACK + ' ) ' ) ; <nl> - <nl> - / / check for full engine support ( use string ' subarray ' to avoid closure compiler confusion ) <nl> - assert ( typeof Int32Array ! = = ' undefined ' & & typeof Float64Array ! = = ' undefined ' & & Int32Array . prototype . subarray ! = = undefined & & Int32Array . prototype . set ! = = undefined , <nl> - ' JS engine does not provide full typed array support ' ) ; <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - if ( Module [ ' wasmMemory ' ] ) { <nl> - wasmMemory = Module [ ' wasmMemory ' ] ; <nl> - } else <nl> - { <nl> - wasmMemory = new WebAssembly . Memory ( { <nl> - ' initial ' : INITIAL_TOTAL_MEMORY / WASM_PAGE_SIZE <nl> - , <nl> - ' maximum ' : INITIAL_TOTAL_MEMORY / WASM_PAGE_SIZE <nl> - } ) ; <nl> - } <nl> - <nl> - <nl> - if ( wasmMemory ) { <nl> - buffer = wasmMemory . buffer ; <nl> - } <nl> - <nl> - / / If the user provides an incorrect length , just use that length instead rather than providing the user to <nl> - / / specifically provide the memory length with Module [ ' TOTAL_MEMORY ' ] . <nl> - INITIAL_TOTAL_MEMORY = buffer . byteLength ; <nl> - assert ( INITIAL_TOTAL_MEMORY % WASM_PAGE_SIZE = = = 0 ) ; <nl> - updateGlobalBufferViews ( ) ; <nl> - <nl> - HEAP32 [ DYNAMICTOP_PTR > > 2 ] = DYNAMIC_BASE ; <nl> - <nl> - <nl> - / / Initializes the stack cookie . Called at the startup of main and at the startup of each thread in pthreads mode . <nl> - function writeStackCookie ( ) { <nl> - assert ( ( STACK_MAX & 3 ) = = 0 ) ; <nl> - / / The stack grows downwards <nl> - HEAPU32 [ ( STACK_MAX > > 2 ) + 1 ] = 0x02135467 ; <nl> - HEAPU32 [ ( STACK_MAX > > 2 ) + 2 ] = 0x89BACDFE ; <nl> - } <nl> - <nl> - function checkStackCookie ( ) { <nl> - var cookie1 = HEAPU32 [ ( STACK_MAX > > 2 ) + 1 ] ; <nl> - var cookie2 = HEAPU32 [ ( STACK_MAX > > 2 ) + 2 ] ; <nl> - if ( cookie1 ! = 0x02135467 | | cookie2 ! = 0x89BACDFE ) { <nl> - abort ( ' Stack overflow ! Stack cookie has been overwritten , expected hex dwords 0x89BACDFE and 0x02135467 , but received 0x ' + cookie2 . toString ( 16 ) + ' ' + cookie1 . toString ( 16 ) ) ; <nl> - } <nl> - / / Also test the global address 0 for integrity . <nl> - / / We don ' t do this with ASan because ASan does its own checks for this . <nl> - if ( HEAP32 [ 0 ] ! = = 0x63736d65 / * ' emsc ' * / ) abort ( ' Runtime error : The application has corrupted its heap memory area ( address zero ) ! ' ) ; <nl> - } <nl> - <nl> - function abortStackOverflow ( allocSize ) { <nl> - abort ( ' Stack overflow ! Attempted to allocate ' + allocSize + ' bytes on the stack , but stack has only ' + ( STACK_MAX - stackSave ( ) + allocSize ) + ' bytes available ! ' ) ; <nl> - } <nl> - <nl> - <nl> - HEAP32 [ 0 ] = 0x63736d65 ; / * ' emsc ' * / <nl> - <nl> - <nl> - <nl> - / / Endianness check ( note : assumes compiler arch was little - endian ) <nl> - HEAP16 [ 1 ] = 0x6373 ; <nl> - if ( HEAPU8 [ 2 ] ! = = 0x73 | | HEAPU8 [ 3 ] ! = = 0x63 ) throw ' Runtime error : expected the system to be little - endian ! ' ; <nl> - <nl> - function abortFnPtrError ( ptr , sig ) { <nl> - abort ( " Invalid function pointer " + ptr + " called with signature ' " + sig + " ' . Perhaps this is an invalid value ( e . g . caused by calling a virtual method on a NULL pointer ) ? Or calling a function with an incorrect type , which will fail ? ( it is worth building your source files with - Werror ( warnings are errors ) , as warnings can indicate undefined behavior which can cause this ) . Build with ASSERTIONS = 2 for more info . " ) ; <nl> - } <nl> - <nl> - <nl> - <nl> - function callRuntimeCallbacks ( callbacks ) { <nl> - while ( callbacks . length > 0 ) { <nl> - var callback = callbacks . shift ( ) ; <nl> - if ( typeof callback = = ' function ' ) { <nl> - callback ( ) ; <nl> - continue ; <nl> - } <nl> - var func = callback . func ; <nl> - if ( typeof func = = = ' number ' ) { <nl> - if ( callback . arg = = = undefined ) { <nl> - Module [ ' dynCall_v ' ] ( func ) ; <nl> - } else { <nl> - Module [ ' dynCall_vi ' ] ( func , callback . arg ) ; <nl> - } <nl> - } else { <nl> - func ( callback . arg = = = undefined ? null : callback . arg ) ; <nl> - } <nl> - } <nl> - } <nl> - <nl> - var __ATPRERUN__ = [ ] ; / / functions called before the runtime is initialized <nl> - var __ATINIT__ = [ ] ; / / functions called during startup <nl> - var __ATMAIN__ = [ ] ; / / functions called when main ( ) is to be run <nl> - var __ATEXIT__ = [ ] ; / / functions called during shutdown <nl> - var __ATPOSTRUN__ = [ ] ; / / functions called after the main ( ) is called <nl> - <nl> - var runtimeInitialized = false ; <nl> - var runtimeExited = false ; <nl> - <nl> - <nl> - function preRun ( ) { <nl> - <nl> - if ( Module [ ' preRun ' ] ) { <nl> - if ( typeof Module [ ' preRun ' ] = = ' function ' ) Module [ ' preRun ' ] = [ Module [ ' preRun ' ] ] ; <nl> - while ( Module [ ' preRun ' ] . length ) { <nl> - addOnPreRun ( Module [ ' preRun ' ] . shift ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - callRuntimeCallbacks ( __ATPRERUN__ ) ; <nl> - } <nl> - <nl> - function initRuntime ( ) { <nl> - checkStackCookie ( ) ; <nl> - assert ( ! runtimeInitialized ) ; <nl> - runtimeInitialized = true ; <nl> - <nl> - callRuntimeCallbacks ( __ATINIT__ ) ; <nl> - } <nl> - <nl> - function preMain ( ) { <nl> - checkStackCookie ( ) ; <nl> - <nl> - callRuntimeCallbacks ( __ATMAIN__ ) ; <nl> - } <nl> - <nl> - function exitRuntime ( ) { <nl> - checkStackCookie ( ) ; <nl> - runtimeExited = true ; <nl> - } <nl> - <nl> - function postRun ( ) { <nl> - checkStackCookie ( ) ; <nl> - <nl> - if ( Module [ ' postRun ' ] ) { <nl> - if ( typeof Module [ ' postRun ' ] = = ' function ' ) Module [ ' postRun ' ] = [ Module [ ' postRun ' ] ] ; <nl> - while ( Module [ ' postRun ' ] . length ) { <nl> - addOnPostRun ( Module [ ' postRun ' ] . shift ( ) ) ; <nl> - } <nl> - } <nl> - <nl> - callRuntimeCallbacks ( __ATPOSTRUN__ ) ; <nl> - } <nl> - <nl> - function addOnPreRun ( cb ) { <nl> - __ATPRERUN__ . unshift ( cb ) ; <nl> - } <nl> - <nl> - function addOnInit ( cb ) { <nl> - __ATINIT__ . unshift ( cb ) ; <nl> - } <nl> - <nl> - function addOnPreMain ( cb ) { <nl> - __ATMAIN__ . unshift ( cb ) ; <nl> - } <nl> - <nl> - function addOnExit ( cb ) { <nl> - } <nl> - <nl> - function addOnPostRun ( cb ) { <nl> - __ATPOSTRUN__ . unshift ( cb ) ; <nl> - } <nl> - <nl> - function unSign ( value , bits , ignore ) { <nl> - if ( value > = 0 ) { <nl> - return value ; <nl> - } <nl> - return bits < = 32 ? 2 * Math . abs ( 1 < < ( bits - 1 ) ) + value / / Need some trickery , since if bits = = 32 , we are right at the limit of the bits JS uses in bitshifts <nl> - : Math . pow ( 2 , bits ) + value ; <nl> - } <nl> - function reSign ( value , bits , ignore ) { <nl> - if ( value < = 0 ) { <nl> - return value ; <nl> - } <nl> - var half = bits < = 32 ? Math . abs ( 1 < < ( bits - 1 ) ) / / abs is needed if bits = = 32 <nl> - : Math . pow ( 2 , bits - 1 ) ; <nl> - if ( value > = half & & ( bits < = 32 | | value > half ) ) { / / for huge values , we can hit the precision limit and always get true here . so don ' t do that <nl> - / / but , in general there is no perfect solution here . With 64 - bit ints , we get rounding and errors <nl> - / / TODO : In i64 mode 1 , resign the two parts separately and safely <nl> - value = - 2 * half + value ; / / Cannot bitshift half , as it may be at the limit of the bits JS uses in bitshifts <nl> - } <nl> - return value ; <nl> - } <nl> - <nl> - <nl> - assert ( Math . imul , ' This browser does not support Math . imul ( ) , build with LEGACY_VM_SUPPORT or POLYFILL_OLD_MATH_FUNCTIONS to add in a polyfill ' ) ; <nl> - assert ( Math . fround , ' This browser does not support Math . fround ( ) , build with LEGACY_VM_SUPPORT or POLYFILL_OLD_MATH_FUNCTIONS to add in a polyfill ' ) ; <nl> - assert ( Math . clz32 , ' This browser does not support Math . clz32 ( ) , build with LEGACY_VM_SUPPORT or POLYFILL_OLD_MATH_FUNCTIONS to add in a polyfill ' ) ; <nl> - assert ( Math . trunc , ' This browser does not support Math . trunc ( ) , build with LEGACY_VM_SUPPORT or POLYFILL_OLD_MATH_FUNCTIONS to add in a polyfill ' ) ; <nl> - <nl> - var Math_abs = Math . abs ; <nl> - var Math_cos = Math . cos ; <nl> - var Math_sin = Math . sin ; <nl> - var Math_tan = Math . tan ; <nl> - var Math_acos = Math . acos ; <nl> - var Math_asin = Math . asin ; <nl> - var Math_atan = Math . atan ; <nl> - var Math_atan2 = Math . atan2 ; <nl> - var Math_exp = Math . exp ; <nl> - var Math_log = Math . log ; <nl> - var Math_sqrt = Math . sqrt ; <nl> - var Math_ceil = Math . ceil ; <nl> - var Math_floor = Math . floor ; <nl> - var Math_pow = Math . pow ; <nl> - var Math_imul = Math . imul ; <nl> - var Math_fround = Math . fround ; <nl> - var Math_round = Math . round ; <nl> - var Math_min = Math . min ; <nl> - var Math_max = Math . max ; <nl> - var Math_clz32 = Math . clz32 ; <nl> - var Math_trunc = Math . trunc ; <nl> - <nl> - <nl> - <nl> - / / A counter of dependencies for calling run ( ) . If we need to <nl> - / / do asynchronous work before running , increment this and <nl> - / / decrement it . Incrementing must happen in a place like <nl> - / / Module . preRun ( used by emcc to add file preloading ) . <nl> - / / Note that you can add dependencies in preRun , even though <nl> - / / it happens right before run - run will be postponed until <nl> - / / the dependencies are met . <nl> - var runDependencies = 0 ; <nl> - var runDependencyWatcher = null ; <nl> - var dependenciesFulfilled = null ; / / overridden to take different actions when all run dependencies are fulfilled <nl> - var runDependencyTracking = { } ; <nl> - <nl> - function getUniqueRunDependency ( id ) { <nl> - var orig = id ; <nl> - while ( 1 ) { <nl> - if ( ! runDependencyTracking [ id ] ) return id ; <nl> - id = orig + Math . random ( ) ; <nl> - } <nl> - return id ; <nl> - } <nl> - <nl> - function addRunDependency ( id ) { <nl> - runDependencies + + ; <nl> - <nl> - if ( Module [ ' monitorRunDependencies ' ] ) { <nl> - Module [ ' monitorRunDependencies ' ] ( runDependencies ) ; <nl> - } <nl> - <nl> - if ( id ) { <nl> - assert ( ! runDependencyTracking [ id ] ) ; <nl> - runDependencyTracking [ id ] = 1 ; <nl> - if ( runDependencyWatcher = = = null & & typeof setInterval ! = = ' undefined ' ) { <nl> - / / Check for missing dependencies every few seconds <nl> - runDependencyWatcher = setInterval ( function ( ) { <nl> - if ( ABORT ) { <nl> - clearInterval ( runDependencyWatcher ) ; <nl> - runDependencyWatcher = null ; <nl> - return ; <nl> - } <nl> - var shown = false ; <nl> - for ( var dep in runDependencyTracking ) { <nl> - if ( ! shown ) { <nl> - shown = true ; <nl> - err ( ' still waiting on run dependencies : ' ) ; <nl> - } <nl> - err ( ' dependency : ' + dep ) ; <nl> - } <nl> - if ( shown ) { <nl> - err ( ' ( end of list ) ' ) ; <nl> - } <nl> - } , 10000 ) ; <nl> - } <nl> - } else { <nl> - err ( ' warning : run dependency added without ID ' ) ; <nl> - } <nl> - } <nl> - <nl> - function removeRunDependency ( id ) { <nl> - runDependencies - - ; <nl> - <nl> - if ( Module [ ' monitorRunDependencies ' ] ) { <nl> - Module [ ' monitorRunDependencies ' ] ( runDependencies ) ; <nl> - } <nl> - <nl> - if ( id ) { <nl> - assert ( runDependencyTracking [ id ] ) ; <nl> - delete runDependencyTracking [ id ] ; <nl> - } else { <nl> - err ( ' warning : run dependency removed without ID ' ) ; <nl> - } <nl> - if ( runDependencies = = 0 ) { <nl> - if ( runDependencyWatcher ! = = null ) { <nl> - clearInterval ( runDependencyWatcher ) ; <nl> - runDependencyWatcher = null ; <nl> - } <nl> - if ( dependenciesFulfilled ) { <nl> - var callback = dependenciesFulfilled ; <nl> - dependenciesFulfilled = null ; <nl> - callback ( ) ; / / can add another dependenciesFulfilled <nl> - } <nl> - } <nl> - } <nl> - <nl> - Module [ " preloadedImages " ] = { } ; / / maps url to image data <nl> - Module [ " preloadedAudios " ] = { } ; / / maps url to audio data <nl> - <nl> - <nl> - var memoryInitializer = null ; <nl> - <nl> - <nl> - / / Copyright 2015 The Emscripten Authors . All rights reserved . <nl> - / / Emscripten is available under two separate licenses , the MIT license and the <nl> - / / University of Illinois / NCSA Open Source License . Both these licenses can be <nl> - / / found in the LICENSE file . <nl> - <nl> - var emscriptenMemoryProfiler = { <nl> - / / If true , walks all allocated pointers at graphing time to print a detailed memory fragmentation map . If false , used <nl> - / / memory is only graphed in one block ( at the bottom of DYNAMIC memory space ) . Set this to false to improve performance at the expense of <nl> - / / accuracy . <nl> - detailedHeapUsage : true , <nl> - <nl> - / / Allocations of memory blocks larger than this threshold will get their detailed callstack captured and logged at runtime . <nl> - trackedCallstackMinSizeBytes : ( typeof new Error ( ) . stack = = = ' undefined ' ) ? Infinity : 16 * 1024 * 1024 , <nl> - <nl> - / / Allocations from call sites having more than this many outstanding allocated pointers will get their detailed callstack captured and logged at runtime . <nl> - trackedCallstackMinAllocCount : ( typeof new Error ( ) . stack = = = ' undefined ' ) ? Infinity : 10000 , <nl> - <nl> - / / If true , we hook into stackAlloc to be able to catch better estimate of the maximum used STACK space . <nl> - / / You might only ever want to set this to false for performance reasons . Since stack allocations may occur often , this might impact performance . <nl> - hookStackAlloc : true , <nl> - <nl> - / / How often the log page is refreshed . <nl> - uiUpdateIntervalMsecs : 2000 , <nl> - <nl> - / / Tracks data for the allocation statistics . <nl> - allocationsAtLoc : { } , <nl> - allocationSitePtrs : { } , <nl> - <nl> - / / Stores an associative array of records HEAP ptr - > size so that we can retrieve how much memory was freed in calls to <nl> - / / _free ( ) and decrement the tracked usage accordingly . <nl> - / / E . g . sizeOfAllocatedPtr [ address ] returns the size of the heap pointer starting at ' address ' . <nl> - sizeOfAllocatedPtr : { } , <nl> - <nl> - / / Conceptually same as the above array , except this one tracks only pointers that were allocated during the application preRun step , which <nl> - / / corresponds to the data added to the VFS with - - preload - file . <nl> - sizeOfPreRunAllocatedPtr : { } , <nl> - <nl> - / / Once set to true , preRun is finished and the above array is not touched anymore . <nl> - pagePreRunIsFinished : false , <nl> - <nl> - / / Grand total of memory currently allocated via malloc ( ) . Decremented on free ( ) s . <nl> - totalMemoryAllocated : 0 , <nl> - <nl> - / / The running count of the number of times malloc ( ) and free ( ) have been called in the app . Used to keep track of # of currently alive pointers . <nl> - / / TODO : Perhaps in the future give a statistic of allocations per second to see how trashing memory usage is . <nl> - totalTimesMallocCalled : 0 , <nl> - totalTimesFreeCalled : 0 , <nl> - <nl> - / / Tracks the highest seen location of the STACKTOP variable . <nl> - stackTopWatermark : 0 , <nl> - <nl> - / / The canvas DOM element to which to draw the allocation map . <nl> - canvas : null , <nl> - <nl> - / / The 2D drawing context on the canvas . <nl> - drawContext : null , <nl> - <nl> - / / Converts number f to string with at most two decimals , without redundant trailing zeros . <nl> - truncDec : function truncDec ( f ) { <nl> - f = f | | 0 ; <nl> - var str = f . toFixed ( 2 ) ; <nl> - if ( str . indexOf ( ' . 00 ' , str . length - 3 ) ! = = - 1 ) return str . substr ( 0 , str . length - 3 ) ; <nl> - else if ( str . indexOf ( ' 0 ' , str . length - 1 ) ! = = - 1 ) return str . substr ( 0 , str . length - 1 ) ; <nl> - else return str ; <nl> - } , <nl> - <nl> - / / Converts a number of bytes pretty - formatted as a string . <nl> - formatBytes : function formatBytes ( bytes ) { <nl> - if ( bytes > = 1000 * 1024 * 1024 ) return emscriptenMemoryProfiler . truncDec ( bytes / ( 1024 * 1024 * 1024 ) ) + ' GB ' ; <nl> - else if ( bytes > = 1000 * 1024 ) return emscriptenMemoryProfiler . truncDec ( bytes / ( 1024 * 1024 ) ) + ' MB ' ; <nl> - else if ( bytes > = 1000 ) return emscriptenMemoryProfiler . truncDec ( bytes / 1024 ) + ' KB ' ; <nl> - else return emscriptenMemoryProfiler . truncDec ( bytes ) + ' B ' ; <nl> - } , <nl> - <nl> - onMalloc : function onMalloc ( ptr , size ) { <nl> - if ( ! ptr ) return ; <nl> - if ( emscriptenMemoryProfiler . sizeOfAllocatedPtr [ ptr ] ) <nl> - { <nl> - / / Uncomment to debug internal workings of tracing : <nl> - / / console . error ( ' Allocation error in onMalloc ! Pointer ' + ptr + ' had already been tracked as allocated ! ' ) ; <nl> - / / console . error ( ' Previous site of allocation : ' + emscriptenMemoryProfiler . allocationSitePtrs [ ptr ] ) ; <nl> - / / console . error ( ' This doubly attempted site of allocation : ' + new Error ( ) . stack . toString ( ) ) ; <nl> - / / throw ' malloc internal inconsistency ! ' ; <nl> - return ; <nl> - } <nl> - var self = emscriptenMemoryProfiler ; <nl> - / / Gather global stats . <nl> - self . totalMemoryAllocated + = size ; <nl> - + + self . totalTimesMallocCalled ; <nl> - self . stackTopWatermark = Math . max ( self . stackTopWatermark , STACKTOP ) ; <nl> - <nl> - / / Remember the size of the allocated block to know how much will be _free ( ) d later . <nl> - self . sizeOfAllocatedPtr [ ptr ] = size ; <nl> - / / Also track if this was a _malloc performed at preRun time . <nl> - if ( ! self . pagePreRunIsFinished ) self . sizeOfPreRunAllocatedPtr [ ptr ] = size ; <nl> - <nl> - var loc = new Error ( ) . stack . toString ( ) ; <nl> - if ( ! self . allocationsAtLoc [ loc ] ) self . allocationsAtLoc [ loc ] = [ 0 , 0 , self . filterCallstack ( loc ) ] ; <nl> - self . allocationsAtLoc [ loc ] [ 0 ] + = 1 ; <nl> - self . allocationsAtLoc [ loc ] [ 1 ] + = size ; <nl> - self . allocationSitePtrs [ ptr ] = loc ; <nl> - } , <nl> - <nl> - onFree : function onFree ( ptr ) { <nl> - if ( ! ptr ) return ; <nl> - <nl> - var self = emscriptenMemoryProfiler ; <nl> - <nl> - / / Decrement global stats . <nl> - var sz = self . sizeOfAllocatedPtr [ ptr ] ; <nl> - if ( ! isNaN ( sz ) ) self . totalMemoryAllocated - = sz ; <nl> - else <nl> - { <nl> - / / Uncomment to debug internal workings of tracing : <nl> - / / console . error ( ' Detected double free of pointer ' + ptr + ' at location : \ n ' + new Error ( ) . stack . toString ( ) ) ; <nl> - / / throw ' double free ! ' ; <nl> - return ; <nl> - } <nl> - <nl> - self . stackTopWatermark = Math . max ( self . stackTopWatermark , STACKTOP ) ; <nl> - <nl> - var loc = self . allocationSitePtrs [ ptr ] ; <nl> - if ( loc ) { <nl> - var allocsAtThisLoc = self . allocationsAtLoc [ loc ] ; <nl> - if ( allocsAtThisLoc ) { <nl> - allocsAtThisLoc [ 0 ] - = 1 ; <nl> - allocsAtThisLoc [ 1 ] - = sz ; <nl> - if ( allocsAtThisLoc [ 0 ] < = 0 ) delete self . allocationsAtLoc [ loc ] ; <nl> - } <nl> - } <nl> - delete self . allocationSitePtrs [ ptr ] ; <nl> - delete self . sizeOfAllocatedPtr [ ptr ] ; <nl> - delete self . sizeOfPreRunAllocatedPtr [ ptr ] ; / / Also free if this happened to be a _malloc performed at preRun time . <nl> - + + self . totalTimesFreeCalled ; <nl> - } , <nl> - <nl> - onRealloc : function onRealloc ( oldAddress , newAddress , size ) { <nl> - emscriptenMemoryProfiler . onFree ( oldAddress ) ; <nl> - emscriptenMemoryProfiler . onMalloc ( newAddress , size ) ; <nl> - } , <nl> - <nl> - onPreloadComplete : function onPreloadComplete ( ) { <nl> - emscriptenMemoryProfiler . pagePreRunIsFinished = true ; <nl> - / / It is common to set ' overflow : hidden ; ' on canvas pages that do WebGL . When MemoryProfiler is being used , there will be a long block of text on the page , so force - enable scrolling . <nl> - document . body . style . overflow = ' ' ; <nl> - } , <nl> - <nl> - / / Installs startup hook and periodic UI update timer . <nl> - initialize : function initialize ( ) { <nl> - / / Inject the memoryprofiler hooks . <nl> - Module [ ' onMalloc ' ] = function onMalloc ( ptr , size ) { emscriptenMemoryProfiler . onMalloc ( ptr , size ) ; } ; <nl> - Module [ ' onRealloc ' ] = function onRealloc ( oldAddress , newAddress , size ) { emscriptenMemoryProfiler . onRealloc ( oldAddress , newAddress , size ) ; } ; <nl> - Module [ ' onFree ' ] = function onFree ( ptr ) { emscriptenMemoryProfiler . onFree ( ptr ) ; } ; <nl> - <nl> - / / Add a tracking mechanism to detect when VFS loading is complete . <nl> - if ( ! Module [ ' preRun ' ] ) Module [ ' preRun ' ] = [ ] ; <nl> - Module [ ' preRun ' ] . push ( function ( ) { emscriptenMemoryProfiler . onPreloadComplete ( ) ; } ) ; <nl> - <nl> - if ( emscriptenMemoryProfiler . hookStackAlloc & & typeof stackAlloc = = = ' function ' ) { <nl> - / / Inject stack allocator . <nl> - var prevStackAlloc = stackAlloc ; <nl> - var hookedStackAlloc = function ( size ) { <nl> - emscriptenMemoryProfiler . stackTopWatermark = Math . max ( emscriptenMemoryProfiler . stackTopWatermark , STACKTOP + size ) ; <nl> - return prevStackAlloc ( size ) ; <nl> - } <nl> - stackAlloc = hookedStackAlloc ; <nl> - } <nl> - <nl> - if ( location . search . toLowerCase ( ) . indexOf ( ' trackbytes = ' ) ! = - 1 ) { <nl> - emscriptenMemoryProfiler . trackedCallstackMinSizeBytes = parseInt ( location . search . substr ( location . search . toLowerCase ( ) . indexOf ( ' trackbytes = ' ) + ' trackbytes = ' . length ) ) ; <nl> - } <nl> - if ( location . search . toLowerCase ( ) . indexOf ( ' trackcount = ' ) ! = - 1 ) { <nl> - emscriptenMemoryProfiler . trackedCallstackMinAllocCount = parseInt ( location . search . substr ( location . search . toLowerCase ( ) . indexOf ( ' trackcount = ' ) + ' trackcount = ' . length ) ) ; <nl> - } <nl> - <nl> - emscriptenMemoryProfiler . memoryprofiler_summary = document . getElementById ( ' memoryprofiler_summary ' ) ; <nl> - var div ; <nl> - if ( ! emscriptenMemoryProfiler . memoryprofiler_summary ) { <nl> - div = document . createElement ( " div " ) ; <nl> - div . innerHTML = " < div style = ' border : 2px solid black ; padding : 2px ; ' > < canvas style = ' border : 1px solid black ; margin - left : auto ; margin - right : auto ; display : block ; ' id = ' memoryprofiler_canvas ' width = ' 100 % ' height = ' 50 ' > < / canvas > Track all allocation sites larger than < input id = ' memoryprofiler_min_tracked_alloc_size ' type = number value = " + emscriptenMemoryProfiler . trackedCallstackMinSizeBytes + " > < / input > bytes , and all allocation sites with more than < input id = ' memoryprofiler_min_tracked_alloc_count ' type = number value = " + emscriptenMemoryProfiler . trackedCallstackMinAllocCount + " > < / input > outstanding allocations . ( visit this page via URL query params foo . html ? trackbytes = 1000 & trackcount = 100 to apply custom thresholds starting from page load ) < br / > < div id = ' memoryprofiler_summary ' > < / div > < input id = ' memoryprofiler_clear_alloc_stats ' type = ' button ' value = ' Clear alloc stats ' > < / input > < br / > Sort allocations by : < select id = ' memoryProfilerSort ' > < option value = ' bytes ' > Bytes < / option > < option value = ' count ' > Count < / option > < option value = ' fixed ' > Fixed < / option > < / select > < div id = ' memoryprofiler_ptrs ' > < / div > " ; <nl> - } <nl> - var populateHtmlBody = function ( ) { <nl> - if ( div ) document . body . appendChild ( div ) ; <nl> - var self = emscriptenMemoryProfiler ; <nl> - self . memoryprofiler_summary = document . getElementById ( ' memoryprofiler_summary ' ) ; <nl> - self . memoryprofiler_ptrs = document . getElementById ( ' memoryprofiler_ptrs ' ) ; <nl> - <nl> - document . getElementById ( ' memoryprofiler_min_tracked_alloc_size ' ) . addEventListener ( " change " , function ( e ) { self . trackedCallstackMinSizeBytes = parseInt ( this . value ) ; } ) ; <nl> - document . getElementById ( ' memoryprofiler_min_tracked_alloc_count ' ) . addEventListener ( " change " , function ( e ) { self . trackedCallstackMinAllocCount = parseInt ( this . value ) ; } ) ; <nl> - document . getElementById ( ' memoryprofiler_clear_alloc_stats ' ) . addEventListener ( " click " , function ( e ) { self . allocationsAtLoc = { } ; self . allocationSitePtrs = { } ; } ) ; <nl> - self . canvas = document . getElementById ( ' memoryprofiler_canvas ' ) ; <nl> - self . canvas . width = document . documentElement . clientWidth - 32 ; <nl> - self . drawContext = self . canvas . getContext ( ' 2d ' ) ; <nl> - <nl> - self . updateUi ( ) ; <nl> - setInterval ( function ( ) { emscriptenMemoryProfiler . updateUi ( ) } , self . uiUpdateIntervalMsecs ) ; <nl> - <nl> - } ; <nl> - / / User might initialize memoryprofiler in the < head > of a page , when document . body does not yet exist . In that case , delay initialization <nl> - / / of the memoryprofiler UI until page has loaded <nl> - if ( document . body ) populateHtmlBody ( ) ; <nl> - else setTimeout ( populateHtmlBody , 1000 ) ; <nl> - } , <nl> - <nl> - / / Given a pointer ' bytes ' , compute the linear 1D position on the graph as pixels , rounding down for start address of a block . <nl> - bytesToPixelsRoundedDown : function bytesToPixelsRoundedDown ( bytes ) { <nl> - return ( bytes * emscriptenMemoryProfiler . canvas . width * emscriptenMemoryProfiler . canvas . height / HEAP8 . length ) | 0 ; <nl> - } , <nl> - <nl> - / / Same as bytesToPixelsRoundedDown , but rounds up for the end address of a block . The different rounding will <nl> - / / guarantee that even ' thin ' allocations should get at least one pixel dot in the graph . <nl> - bytesToPixelsRoundedUp : function bytesToPixelsRoundedUp ( bytes ) { <nl> - return ( ( bytes * emscriptenMemoryProfiler . canvas . width * emscriptenMemoryProfiler . canvas . height + HEAP8 . length - 1 ) / HEAP8 . length ) | 0 ; <nl> - } , <nl> - <nl> - / / Graphs a range of allocated memory . The memory range will be drawn as a top - to - bottom , left - to - right stripes or columns of pixels . <nl> - fillLine : function fillLine ( startBytes , endBytes ) { <nl> - var self = emscriptenMemoryProfiler ; <nl> - var startPixels = self . bytesToPixelsRoundedDown ( startBytes ) ; <nl> - var endPixels = self . bytesToPixelsRoundedUp ( endBytes ) ; <nl> - <nl> - / / Starting pos ( top - left corner ) of this allocation on the graph . <nl> - var x0 = ( startPixels / self . canvas . height ) | 0 ; <nl> - var y0 = startPixels - x0 * self . canvas . height ; <nl> - / / Ending pos ( bottom - right corner ) of this allocation on the graph . <nl> - var x1 = ( endPixels / self . canvas . height ) | 0 ; <nl> - var y1 = endPixels - x1 * self . canvas . height ; <nl> - <nl> - / / Draw the left side partial column of the allocation block . <nl> - if ( y0 > 0 & & x0 < x1 ) { <nl> - self . drawContext . fillRect ( x0 , y0 , 1 , self . canvas . height - y0 ) ; <nl> - / / Proceed to the start of the next full column . <nl> - y0 = 0 ; <nl> - + + x0 ; <nl> - } <nl> - / / Draw the right side partial column . <nl> - if ( y1 < self . canvas . height & & x0 < x1 ) { <nl> - self . drawContext . fillRect ( x1 , 0 , 1 , y1 ) ; <nl> - / / Decrement to the previous full column . <nl> - y1 = self . canvas . height - 1 ; <nl> - - - x1 ; <nl> - } <nl> - / / After filling the previous leftovers with one - pixel - wide lines , we are only left with a rectangular shape of full columns to blit . <nl> - self . drawContext . fillRect ( x0 , 0 , x1 - x0 + 1 , self . canvas . height ) ; <nl> - } , <nl> - <nl> - countOpenALAudioDataSize : function countOpenALAudioDataSize ( ) { <nl> - if ( typeof AL = = " undefined " | | ! AL . currentContext ) return 0 ; <nl> - <nl> - var totalMemory = 0 ; <nl> - <nl> - for ( var i in AL . currentContext . buf ) { <nl> - var buffer = AL . currentContext . buf [ i ] ; <nl> - for ( var channel = 0 ; channel < buffer . numberOfChannels ; + + channel ) totalMemory + = buffer . getChannelData ( channel ) . length * 4 ; <nl> - } <nl> - return totalMemory ; <nl> - } , <nl> - <nl> - / / Print accurate map of individual allocations . This will show information about <nl> - / / memory fragmentation and allocation sizes . <nl> - / / Warning : This will walk through all allocations , so it is slow ! <nl> - printAllocsWithCyclingColors : function printAllocsWithCyclingColors ( colors , allocs ) { <nl> - var colorIndex = 0 ; <nl> - for ( var i in allocs ) { <nl> - emscriptenMemoryProfiler . drawContext . fillStyle = colors [ colorIndex ] ; <nl> - colorIndex = ( colorIndex + 1 ) % colors . length ; <nl> - var start = i | 0 ; <nl> - var sz = allocs [ start ] | 0 ; <nl> - emscriptenMemoryProfiler . fillLine ( start , start + sz ) ; <nl> - } <nl> - } , <nl> - <nl> - filterCallstack : function ( callstack ) { <nl> - / / Do not show Memoryprofiler ' s own callstacks in the callstack prints . <nl> - var i = callstack . indexOf ( ' emscripten_trace_record_ ' ) ; <nl> - if ( i ! = - 1 ) { <nl> - callstack = callstack . substr ( callstack . indexOf ( ' \ n ' , i ) + 1 ) ; <nl> - } <nl> - / / Hide paths from URLs to make the log more readable <nl> - callstack = callstack . replace ( / @ ( ( file ) | ( http ) ) [ \ w : \ / \ . ] * \ / ( [ \ w \ . ] * ) / g , ' @ $ 4 ' ) ; <nl> - callstack = callstack . replace ( / \ n / g , ' < br / > ' ) ; <nl> - return callstack ; <nl> - } , <nl> - <nl> - / / Main UI update entry point . <nl> - updateUi : function updateUi ( ) { <nl> - function colorBar ( color ) { <nl> - return ' < span style = " padding : 0px ; border : solid 1px black ; width : 28px ; height : 14px ; vertical - align : middle ; display : inline - block ; background - color : ' + color + ' ; " > < / span > ' ; <nl> - } <nl> - <nl> - / / Naive function to compute how many bits will be needed to represent the number ' n ' in binary . This will be our pointer ' word width ' in the UI . <nl> - function nBits ( n ) { <nl> - var i = 0 ; <nl> - while ( n > = 1 ) { <nl> - + + i ; <nl> - n / = 2 ; <nl> - } <nl> - return i ; <nl> - } <nl> - <nl> - / / Returns i formatted to string as fixed - width hexadecimal . <nl> - function toHex ( i , width ) { <nl> - var str = i . toString ( 16 ) ; <nl> - while ( str . length < width ) str = ' 0 ' + str ; <nl> - return ' 0x ' + str ; <nl> - } <nl> - <nl> - var self = emscriptenMemoryProfiler ; <nl> - <nl> - / / Poll whether user as changed the browser window , and if so , resize the profiler window and redraw it . <nl> - if ( self . canvas . width ! = document . documentElement . clientWidth - 32 ) { <nl> - self . canvas . width = document . documentElement . clientWidth - 32 ; <nl> - } <nl> - <nl> - var width = ( nBits ( HEAP8 . length ) + 3 ) / 4 ; / / Pointer ' word width ' <nl> - var html = ' Total HEAP size : ' + self . formatBytes ( HEAP8 . length ) + ' . ' ; <nl> - html + = ' < br / > ' + colorBar ( ' # 202020 ' ) + ' STATIC memory area size : ' + self . formatBytes ( STACK_BASE - STATIC_BASE ) ; <nl> - html + = ' . STATIC_BASE : ' + toHex ( STATIC_BASE , width ) ; <nl> - <nl> - html + = ' < br / > ' + colorBar ( ' # FF8080 ' ) + ' STACK memory area size : ' + self . formatBytes ( STACK_MAX - STACK_BASE ) ; <nl> - html + = ' . STACK_BASE : ' + toHex ( STACK_BASE , width ) ; <nl> - html + = ' . STACKTOP : ' + toHex ( STACKTOP , width ) ; <nl> - html + = ' . STACK_MAX : ' + toHex ( STACK_MAX , width ) + ' . ' ; <nl> - html + = ' < br / > STACK memory area used now ( should be zero ) : ' + self . formatBytes ( STACKTOP - STACK_BASE ) + ' . ' + colorBar ( ' # FFFF00 ' ) + ' STACK watermark highest seen usage ( approximate lower - bound ! ) : ' + self . formatBytes ( self . stackTopWatermark - STACK_BASE ) ; <nl> - <nl> - var DYNAMIC_BASE = 5246672 ; <nl> - var DYNAMICTOP = HEAP32 [ DYNAMICTOP_PTR > > 2 ] ; <nl> - html + = " < br / > DYNAMIC memory area size : " + self . formatBytes ( DYNAMICTOP - DYNAMIC_BASE ) ; <nl> - html + = " . DYNAMIC_BASE : " + toHex ( DYNAMIC_BASE , width ) ; <nl> - html + = " . DYNAMICTOP : " + toHex ( DYNAMICTOP , width ) + " . " ; <nl> - html + = " < br / > " + colorBar ( " # 6699CC " ) + colorBar ( " # 003366 " ) + colorBar ( " # 0000FF " ) + " DYNAMIC memory area used : " + self . formatBytes ( self . totalMemoryAllocated ) + " ( " + ( self . totalMemoryAllocated * 100 / ( HEAP8 . length - DYNAMIC_BASE ) ) . toFixed ( 2 ) + " % of all dynamic memory and unallocated heap ) " ; <nl> - html + = " < br / > Free memory : " + colorBar ( " # 70FF70 " ) + " DYNAMIC : " + self . formatBytes ( DYNAMICTOP - DYNAMIC_BASE - self . totalMemoryAllocated ) + " , " + colorBar ( ' # FFFFFF ' ) + ' Unallocated HEAP : ' + self . formatBytes ( HEAP8 . length - DYNAMICTOP ) + " ( " + ( ( HEAP8 . length - DYNAMIC_BASE - self . totalMemoryAllocated ) * 100 / ( HEAP8 . length - DYNAMIC_BASE ) ) . toFixed ( 2 ) + " % of all dynamic memory and unallocated heap ) " ; <nl> - <nl> - var preloadedMemoryUsed = 0 ; <nl> - for ( i in self . sizeOfPreRunAllocatedPtr ) preloadedMemoryUsed + = self . sizeOfPreRunAllocatedPtr [ i ] | 0 ; <nl> - html + = ' < br / > ' + colorBar ( ' # FF9900 ' ) + colorBar ( ' # FFDD33 ' ) + ' Preloaded memory used , most likely memory reserved by files in the virtual filesystem : ' + self . formatBytes ( preloadedMemoryUsed ) ; <nl> - <nl> - html + = ' < br / > OpenAL audio data : ' + self . formatBytes ( self . countOpenALAudioDataSize ( ) ) + ' ( outside HEAP ) ' ; <nl> - html + = ' < br / > # of total malloc ( ) s / free ( ) s performed in app lifetime : ' + self . totalTimesMallocCalled + ' / ' + self . totalTimesFreeCalled + ' ( currently alive pointers : ' + ( self . totalTimesMallocCalled - self . totalTimesFreeCalled ) + ' ) ' ; <nl> - <nl> - / / Background clear <nl> - self . drawContext . fillStyle = " # FFFFFF " ; <nl> - self . drawContext . fillRect ( 0 , 0 , self . canvas . width , self . canvas . height ) ; <nl> - <nl> - self . drawContext . fillStyle = " # FF8080 " ; <nl> - self . fillLine ( STACK_BASE , STACK_MAX ) ; <nl> - <nl> - self . drawContext . fillStyle = " # FFFF00 " ; <nl> - self . fillLine ( STACK_BASE , self . stackTopWatermark ) ; <nl> - <nl> - self . drawContext . fillStyle = " # FF0000 " ; <nl> - self . fillLine ( STACK_BASE , STACKTOP ) ; <nl> - <nl> - self . drawContext . fillStyle = " # 70FF70 " ; <nl> - self . fillLine ( DYNAMIC_BASE , DYNAMICTOP ) ; <nl> - <nl> - if ( self . detailedHeapUsage ) { <nl> - self . printAllocsWithCyclingColors ( [ " # 6699CC " , " # 003366 " , " # 0000FF " ] , self . sizeOfAllocatedPtr ) ; <nl> - self . printAllocsWithCyclingColors ( [ " # FF9900 " , " # FFDD33 " ] , self . sizeOfPreRunAllocatedPtr ) ; <nl> - } else { <nl> - / / Print only a single naive blob of individual allocations . This will not be accurate , but is constant - time . <nl> - self . drawContext . fillStyle = " # 0000FF " ; <nl> - self . fillLine ( DYNAMIC_BASE , DYNAMIC_BASE + self . totalMemoryAllocated ) ; <nl> - } <nl> - <nl> - self . memoryprofiler_summary . innerHTML = html ; <nl> - <nl> - var sort = document . getElementById ( ' memoryProfilerSort ' ) ; <nl> - var sortOrder = sort . options [ sort . selectedIndex ] . value ; <nl> - <nl> - var html = ' ' ; <nl> - / / Print out statistics of individual allocations if they were tracked . <nl> - if ( Object . keys ( self . allocationsAtLoc ) . length > 0 ) { <nl> - var calls = [ ] ; <nl> - for ( var i in self . allocationsAtLoc ) { <nl> - if ( self . allocationsAtLoc [ i ] [ 0 ] > = self . trackedCallstackMinAllocCount | | self . allocationsAtLoc [ i ] [ 1 ] > = self . trackedCallstackMinSizeBytes ) { <nl> - calls . push ( self . allocationsAtLoc [ i ] ) ; <nl> - } <nl> - } <nl> - if ( calls . length > 0 ) { <nl> - if ( sortOrder ! = ' fixed ' ) { <nl> - var sortIdx = ( sortOrder = = ' count ' ) ? 0 : 1 ; <nl> - calls . sort ( function ( a , b ) { return b [ sortIdx ] - a [ sortIdx ] ; } ) ; <nl> - } <nl> - html + = ' < h4 > Allocation sites with more than ' + self . formatBytes ( self . trackedCallstackMinSizeBytes ) + ' of accumulated allocations , or more than ' + self . trackedCallstackMinAllocCount + ' simultaneously outstanding allocations : < / h4 > ' <nl> - var demangler = typeof demangleAll ! = = ' undefined ' ? demangleAll : function ( x ) { return x ; } ; <nl> - for ( var i in calls ) { <nl> - if ( calls [ i ] . length = = 3 ) calls [ i ] = [ calls [ i ] [ 0 ] , calls [ i ] [ 1 ] , calls [ i ] [ 2 ] , demangler ( calls [ i ] [ 2 ] ) ] ; <nl> - html + = " < b > " + self . formatBytes ( calls [ i ] [ 1 ] ) + ' / ' + calls [ i ] [ 0 ] + " allocs < / b > : " + calls [ i ] [ 3 ] + " < br / > " ; <nl> - } <nl> - } <nl> - } <nl> - self . memoryprofiler_ptrs . innerHTML = html ; <nl> - } <nl> - } ; <nl> - <nl> - / / Backwards compatibility with previously compiled code . Don ' t call this anymore ! <nl> - function memoryprofiler_add_hooks ( ) { emscriptenMemoryProfiler . initialize ( ) ; } <nl> - <nl> - if ( typeof Module ! = = ' undefined ' & & typeof document ! = = ' undefined ' & & typeof window ! = = ' undefined ' & & typeof process = = = ' undefined ' ) emscriptenMemoryProfiler . initialize ( ) ; <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - / / show errors on likely calls to FS when it was not included <nl> - var FS = { <nl> - error : function ( ) { <nl> - abort ( ' Filesystem support ( FS ) was not included . The problem is that you are using files from JS , but files were not used from C / C + + , so filesystem support was not auto - included . You can force - include filesystem support with - s FORCE_FILESYSTEM = 1 ' ) ; <nl> - } , <nl> - init : function ( ) { FS . error ( ) } , <nl> - createDataFile : function ( ) { FS . error ( ) } , <nl> - createPreloadedFile : function ( ) { FS . error ( ) } , <nl> - createLazyFile : function ( ) { FS . error ( ) } , <nl> - open : function ( ) { FS . error ( ) } , <nl> - mkdev : function ( ) { FS . error ( ) } , <nl> - registerDevice : function ( ) { FS . error ( ) } , <nl> - analyzePath : function ( ) { FS . error ( ) } , <nl> - loadFilesFromDB : function ( ) { FS . error ( ) } , <nl> - <nl> - ErrnoError : function ErrnoError ( ) { FS . error ( ) } , <nl> - } ; <nl> - Module [ ' FS_createDataFile ' ] = FS . createDataFile ; <nl> - Module [ ' FS_createPreloadedFile ' ] = FS . createPreloadedFile ; <nl> - <nl> - <nl> - <nl> - / / Copyright 2017 The Emscripten Authors . All rights reserved . <nl> - / / Emscripten is available under two separate licenses , the MIT license and the <nl> - / / University of Illinois / NCSA Open Source License . Both these licenses can be <nl> - / / found in the LICENSE file . <nl> - <nl> - / / Prefix of data URIs emitted by SINGLE_FILE and related options . <nl> - var dataURIPrefix = ' data : application / octet - stream ; base64 , ' ; <nl> - <nl> - / / Indicates whether filename is a base64 data URI . <nl> - function isDataURI ( filename ) { <nl> - return String . prototype . startsWith ? <nl> - filename . startsWith ( dataURIPrefix ) : <nl> - filename . indexOf ( dataURIPrefix ) = = = 0 ; <nl> - } <nl> - <nl> - <nl> - <nl> - <nl> - var wasmBinaryFile = ' a . out . wasm ' ; <nl> - if ( ! isDataURI ( wasmBinaryFile ) ) { <nl> - wasmBinaryFile = locateFile ( wasmBinaryFile ) ; <nl> - } <nl> - <nl> - function getBinary ( ) { <nl> - try { <nl> - if ( wasmBinary ) { <nl> - return new Uint8Array ( wasmBinary ) ; <nl> - } <nl> - <nl> - if ( readBinary ) { <nl> - return readBinary ( wasmBinaryFile ) ; <nl> - } else { <nl> - throw " both async and sync fetching of the wasm failed " ; <nl> - } <nl> - } <nl> - catch ( err ) { <nl> - abort ( err ) ; <nl> - } <nl> - } <nl> - <nl> - function getBinaryPromise ( ) { <nl> - / / if we don ' t have the binary yet , and have the Fetch api , use that <nl> - / / in some environments , like Electron ' s render process , Fetch api may be present , but have a different context than expected , let ' s only use it on the Web <nl> - if ( ! wasmBinary & & ( ENVIRONMENT_IS_WEB | | ENVIRONMENT_IS_WORKER ) & & typeof fetch = = = ' function ' ) { <nl> - return fetch ( wasmBinaryFile , { credentials : ' same - origin ' } ) . then ( function ( response ) { <nl> - if ( ! response [ ' ok ' ] ) { <nl> - throw " failed to load wasm binary file at ' " + wasmBinaryFile + " ' " ; <nl> - } <nl> - return response [ ' arrayBuffer ' ] ( ) ; <nl> - } ) . catch ( function ( ) { <nl> - return getBinary ( ) ; <nl> - } ) ; <nl> - } <nl> - / / Otherwise , getBinary should be able to get it synchronously <nl> - return new Promise ( function ( resolve , reject ) { <nl> - resolve ( getBinary ( ) ) ; <nl> - } ) ; <nl> - } <nl> - <nl> - <nl> - <nl> - / / Create the wasm instance . <nl> - / / Receives the wasm imports , returns the exports . <nl> - function createWasm ( env ) { <nl> - <nl> - / / prepare imports <nl> - var info = { <nl> - ' env ' : env <nl> - } ; <nl> - / / Load the wasm module and create an instance of using native support in the JS engine . <nl> - / / handle a generated wasm instance , receiving its exports and <nl> - / / performing other necessary setup <nl> - function receiveInstance ( instance , module ) { <nl> - var exports = instance . exports ; <nl> - Module [ ' asm ' ] = exports ; <nl> - removeRunDependency ( ' wasm - instantiate ' ) ; <nl> - } <nl> - / / we can ' t run yet ( except in a pthread , where we have a custom sync instantiator ) <nl> - addRunDependency ( ' wasm - instantiate ' ) ; <nl> - <nl> - <nl> - / / Async compilation can be confusing when an error on the page overwrites Module <nl> - / / ( for example , if the order of elements is wrong , and the one defining Module is <nl> - / / later ) , so we save Module and check it later . <nl> - var trueModule = Module ; <nl> - function receiveInstantiatedSource ( output ) { <nl> - / / ' output ' is a WebAssemblyInstantiatedSource object which has both the module and instance . <nl> - / / receiveInstance ( ) will swap in the exports ( to Module . asm ) so they can be called <nl> - assert ( Module = = = trueModule , ' the Module object should not be replaced during async compilation - perhaps the order of HTML elements is wrong ? ' ) ; <nl> - trueModule = null ; <nl> - / / TODO : Due to Closure regression https : / / github . com / google / closure - compiler / issues / 3193 , the above line no longer optimizes out down to the following line . <nl> - / / When the regression is fixed , can restore the above USE_PTHREADS - enabled path . <nl> - receiveInstance ( output [ ' instance ' ] ) ; <nl> - } <nl> - <nl> - <nl> - function instantiateArrayBuffer ( receiver ) { <nl> - return getBinaryPromise ( ) . then ( function ( binary ) { <nl> - return WebAssembly . instantiate ( binary , info ) ; <nl> - } ) . then ( receiver , function ( reason ) { <nl> - err ( ' failed to asynchronously prepare wasm : ' + reason ) ; <nl> - abort ( reason ) ; <nl> - } ) ; <nl> - } <nl> - <nl> - / / Prefer streaming instantiation if available . <nl> - function instantiateAsync ( ) { <nl> - if ( ! wasmBinary & & <nl> - typeof WebAssembly . instantiateStreaming = = = ' function ' & & <nl> - ! isDataURI ( wasmBinaryFile ) & & <nl> - typeof fetch = = = ' function ' ) { <nl> - fetch ( wasmBinaryFile , { credentials : ' same - origin ' } ) . then ( function ( response ) { <nl> - var result = WebAssembly . instantiateStreaming ( response , info ) ; <nl> - return result . then ( receiveInstantiatedSource , function ( reason ) { <nl> - / / We expect the most common failure cause to be a bad MIME type for the binary , <nl> - / / in which case falling back to ArrayBuffer instantiation should work . <nl> - err ( ' wasm streaming compile failed : ' + reason ) ; <nl> - err ( ' falling back to ArrayBuffer instantiation ' ) ; <nl> - instantiateArrayBuffer ( receiveInstantiatedSource ) ; <nl> - } ) ; <nl> - } ) ; <nl> - } else { <nl> - return instantiateArrayBuffer ( receiveInstantiatedSource ) ; <nl> - } <nl> - } <nl> - / / User shell pages can write their own Module . instantiateWasm = function ( imports , successCallback ) callback <nl> - / / to manually instantiate the Wasm module themselves . This allows pages to run the instantiation parallel <nl> - / / to any other async startup actions they are performing . <nl> - if ( Module [ ' instantiateWasm ' ] ) { <nl> - try { <nl> - var exports = Module [ ' instantiateWasm ' ] ( info , receiveInstance ) ; <nl> - return exports ; <nl> - } catch ( e ) { <nl> - err ( ' Module . instantiateWasm callback failed with error : ' + e ) ; <nl> - return false ; <nl> - } <nl> - } <nl> - <nl> - instantiateAsync ( ) ; <nl> - return { } ; / / no exports yet ; we ' ll fill them in later <nl> - } <nl> - <nl> - / / Provide an " asm . js function " for the application , called to " link " the asm . js module . We instantiate <nl> - / / the wasm module at that time , and it receives imports and provides exports and so forth , the app <nl> - / / doesn ' t need to care that it is wasm or asm . js . <nl> - <nl> - Module [ ' asm ' ] = function ( global , env , providedBuffer ) { <nl> - / / memory was already allocated ( so js could use the buffer ) <nl> - env [ ' memory ' ] = wasmMemory <nl> - ; <nl> - / / import table <nl> - env [ ' table ' ] = wasmTable = new WebAssembly . Table ( { <nl> - ' initial ' : 7 , <nl> - ' maximum ' : 7 + 0 , <nl> - ' element ' : ' anyfunc ' <nl> - } ) ; <nl> - / / With the wasm backend __memory_base and __table_base and only needed for <nl> - / / relocatable output . <nl> - <nl> - var exports = createWasm ( env ) ; <nl> - assert ( exports , ' binaryen setup failed ( no wasm support ? ) ' ) ; <nl> - return exports ; <nl> - } ; <nl> - <nl> - / / Globals used by JS i64 conversions <nl> - var tempDouble ; <nl> - var tempI64 ; <nl> - <nl> - / / = = = Body = = = <nl> - <nl> - var ASM_CONSTS = [ ] ; <nl> - <nl> - <nl> - <nl> - <nl> - / / STATICTOP = STATIC_BASE + 2768 ; <nl> - / * global initializers * / __ATINIT__ . push ( { func : function ( ) { ___wasm_call_ctors ( ) } } ) ; <nl> - <nl> - <nl> - <nl> - / * no memory initializer * / <nl> - / / { { PRE_LIBRARY } } <nl> - <nl> - <nl> - function ___lock ( ) { } <nl> - <nl> - <nl> - <nl> - var PATH = { splitPath : function ( filename ) { <nl> - var splitPathRe = / ^ ( \ / ? | ) ( [ \ s \ S ] * ? ) ( ( ? : \ . { 1 , 2 } | [ ^ \ / ] + ? | ) ( \ . [ ^ . \ / ] * | ) ) ( ? : [ \ / ] * ) $ / ; <nl> - return splitPathRe . exec ( filename ) . slice ( 1 ) ; <nl> - } , normalizeArray : function ( parts , allowAboveRoot ) { <nl> - / / if the path tries to go above the root , ` up ` ends up > 0 <nl> - var up = 0 ; <nl> - for ( var i = parts . length - 1 ; i > = 0 ; i - - ) { <nl> - var last = parts [ i ] ; <nl> - if ( last = = = ' . ' ) { <nl> - parts . splice ( i , 1 ) ; <nl> - } else if ( last = = = ' . . ' ) { <nl> - parts . splice ( i , 1 ) ; <nl> - up + + ; <nl> - } else if ( up ) { <nl> - parts . splice ( i , 1 ) ; <nl> - up - - ; <nl> - } <nl> - } <nl> - / / if the path is allowed to go above the root , restore leading . . s <nl> - if ( allowAboveRoot ) { <nl> - for ( ; up ; up - - ) { <nl> - parts . unshift ( ' . . ' ) ; <nl> - } <nl> - } <nl> - return parts ; <nl> - } , normalize : function ( path ) { <nl> - var isAbsolute = path . charAt ( 0 ) = = = ' / ' , <nl> - trailingSlash = path . substr ( - 1 ) = = = ' / ' ; <nl> - / / Normalize the path <nl> - path = PATH . normalizeArray ( path . split ( ' / ' ) . filter ( function ( p ) { <nl> - return ! ! p ; <nl> - } ) , ! isAbsolute ) . join ( ' / ' ) ; <nl> - if ( ! path & & ! isAbsolute ) { <nl> - path = ' . ' ; <nl> - } <nl> - if ( path & & trailingSlash ) { <nl> - path + = ' / ' ; <nl> - } <nl> - return ( isAbsolute ? ' / ' : ' ' ) + path ; <nl> - } , dirname : function ( path ) { <nl> - var result = PATH . splitPath ( path ) , <nl> - root = result [ 0 ] , <nl> - dir = result [ 1 ] ; <nl> - if ( ! root & & ! dir ) { <nl> - / / No dirname whatsoever <nl> - return ' . ' ; <nl> - } <nl> - if ( dir ) { <nl> - / / It has a dirname , strip trailing slash <nl> - dir = dir . substr ( 0 , dir . length - 1 ) ; <nl> - } <nl> - return root + dir ; <nl> - } , basename : function ( path ) { <nl> - / / EMSCRIPTEN return ' / ' ' for ' / ' , not an empty string <nl> - if ( path = = = ' / ' ) return ' / ' ; <nl> - var lastSlash = path . lastIndexOf ( ' / ' ) ; <nl> - if ( lastSlash = = = - 1 ) return path ; <nl> - return path . substr ( lastSlash + 1 ) ; <nl> - } , extname : function ( path ) { <nl> - return PATH . splitPath ( path ) [ 3 ] ; <nl> - } , join : function ( ) { <nl> - var paths = Array . prototype . slice . call ( arguments , 0 ) ; <nl> - return PATH . normalize ( paths . join ( ' / ' ) ) ; <nl> - } , join2 : function ( l , r ) { <nl> - return PATH . normalize ( l + ' / ' + r ) ; <nl> - } } ; var SYSCALLS = { buffers : [ null , [ ] , [ ] ] , printChar : function ( stream , curr ) { <nl> - var buffer = SYSCALLS . buffers [ stream ] ; <nl> - assert ( buffer ) ; <nl> - if ( curr = = = 0 | | curr = = = 10 ) { <nl> - ( stream = = = 1 ? out : err ) ( UTF8ArrayToString ( buffer , 0 ) ) ; <nl> - buffer . length = 0 ; <nl> - } else { <nl> - buffer . push ( curr ) ; <nl> - } <nl> - } , varargs : 0 , get : function ( varargs ) { <nl> - SYSCALLS . varargs + = 4 ; <nl> - var ret = HEAP32 [ ( ( ( SYSCALLS . varargs ) - ( 4 ) ) > > 2 ) ] ; <nl> - return ret ; <nl> - } , getStr : function ( ) { <nl> - var ret = UTF8ToString ( SYSCALLS . get ( ) ) ; <nl> - return ret ; <nl> - } , get64 : function ( ) { <nl> - var low = SYSCALLS . get ( ) , high = SYSCALLS . get ( ) ; <nl> - if ( low > = 0 ) assert ( high = = = 0 ) ; <nl> - else assert ( high = = = - 1 ) ; <nl> - return low ; <nl> - } , getZero : function ( ) { <nl> - assert ( SYSCALLS . get ( ) = = = 0 ) ; <nl> - } } ; function ___syscall140 ( which , varargs ) { SYSCALLS . varargs = varargs ; <nl> - try { <nl> - / / llseek <nl> - var stream = SYSCALLS . getStreamFromFD ( ) , offset_high = SYSCALLS . get ( ) , offset_low = SYSCALLS . get ( ) , result = SYSCALLS . get ( ) , whence = SYSCALLS . get ( ) ; <nl> - abort ( ' it should not be possible to operate on streams when ! SYSCALLS_REQUIRE_FILESYSTEM ' ) ; <nl> - return 0 ; <nl> - } catch ( e ) { <nl> - if ( typeof FS = = = ' undefined ' | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; <nl> - return - e . errno ; <nl> - } <nl> - } <nl> - <nl> - <nl> - function flush_NO_FILESYSTEM ( ) { <nl> - / / flush anything remaining in the buffers during shutdown <nl> - var fflush = Module [ " _fflush " ] ; <nl> - if ( fflush ) fflush ( 0 ) ; <nl> - var buffers = SYSCALLS . buffers ; <nl> - if ( buffers [ 1 ] . length ) SYSCALLS . printChar ( 1 , 10 ) ; <nl> - if ( buffers [ 2 ] . length ) SYSCALLS . printChar ( 2 , 10 ) ; <nl> - } function ___syscall146 ( which , varargs ) { SYSCALLS . varargs = varargs ; <nl> - try { <nl> - / / writev <nl> - / / hack to support printf in SYSCALLS_REQUIRE_FILESYSTEM = 0 <nl> - var stream = SYSCALLS . get ( ) , iov = SYSCALLS . get ( ) , iovcnt = SYSCALLS . get ( ) ; <nl> - var ret = 0 ; <nl> - for ( var i = 0 ; i < iovcnt ; i + + ) { <nl> - var ptr = HEAP32 [ ( ( ( iov ) + ( i * 8 ) ) > > 2 ) ] ; <nl> - var len = HEAP32 [ ( ( ( iov ) + ( i * 8 + 4 ) ) > > 2 ) ] ; <nl> - for ( var j = 0 ; j < len ; j + + ) { <nl> - SYSCALLS . printChar ( stream , HEAPU8 [ ptr + j ] ) ; <nl> - } <nl> - ret + = len ; <nl> - } <nl> - return ret ; <nl> - } catch ( e ) { <nl> - if ( typeof FS = = = ' undefined ' | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; <nl> - return - e . errno ; <nl> - } <nl> - } <nl> - <nl> - function ___syscall54 ( which , varargs ) { SYSCALLS . varargs = varargs ; <nl> - try { <nl> - / / ioctl <nl> - return 0 ; <nl> - } catch ( e ) { <nl> - if ( typeof FS = = = ' undefined ' | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; <nl> - return - e . errno ; <nl> - } <nl> - } <nl> - <nl> - function ___syscall6 ( which , varargs ) { SYSCALLS . varargs = varargs ; <nl> - try { <nl> - / / close <nl> - var stream = SYSCALLS . getStreamFromFD ( ) ; <nl> - abort ( ' it should not be possible to operate on streams when ! SYSCALLS_REQUIRE_FILESYSTEM ' ) ; <nl> - return 0 ; <nl> - } catch ( e ) { <nl> - if ( typeof FS = = = ' undefined ' | | ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ; <nl> - return - e . errno ; <nl> - } <nl> - } <nl> - <nl> - function ___unlock ( ) { } <nl> - <nl> - function _emscripten_get_heap_size ( ) { <nl> - return HEAP8 . length ; <nl> - } <nl> - <nl> - function _emscripten_memcpy_big ( dest , src , num ) { <nl> - HEAPU8 . set ( HEAPU8 . subarray ( src , src + num ) , dest ) ; <nl> - } <nl> - <nl> - <nl> - <nl> - function _emscripten_trace_js_configure ( collector_url , application ) { <nl> - EmscriptenTrace . configure ( collector_url , application ) ; <nl> - } <nl> - <nl> - function _emscripten_trace_configure_for_google_wtf ( ) { <nl> - EmscriptenTrace . configureForGoogleWTF ( ) ; <nl> - } <nl> - <nl> - function _emscripten_trace_js_enter_context ( name ) { <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_ENTER_CONTEXT , <nl> - now , name ] ) ; <nl> - } <nl> - if ( EmscriptenTrace . googleWTFEnabled ) { <nl> - EmscriptenTrace . googleWTFEnterScope ( name ) ; <nl> - } <nl> - } <nl> - <nl> - function _emscripten_trace_exit_context ( ) { <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_EXIT_CONTEXT , now ] ) ; <nl> - } <nl> - if ( EmscriptenTrace . googleWTFEnabled ) { <nl> - EmscriptenTrace . googleWTFExitScope ( ) ; <nl> - } <nl> - } <nl> - <nl> - function _emscripten_trace_js_log_message ( channel , message ) { <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_LOG_MESSAGE , now , <nl> - channel , message ] ) ; <nl> - } <nl> - } <nl> - <nl> - function _emscripten_trace_js_mark ( message ) { <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_LOG_MESSAGE , now , <nl> - " MARK " , message ] ) ; <nl> - } <nl> - if ( EmscriptenTrace . googleWTFEnabled ) { <nl> - window . wtf . trace . mark ( message ) ; <nl> - } <nl> - } <nl> - <nl> - function _emscripten_get_now ( ) { abort ( ) } var EmscriptenTrace = { worker : null , collectorEnabled : false , googleWTFEnabled : false , testingEnabled : false , googleWTFData : { scopeStack : [ ] , cachedScopes : { } } , DATA_VERSION : 1 , EVENT_ALLOCATE : " allocate " , EVENT_ANNOTATE_TYPE : " annotate - type " , EVENT_APPLICATION_NAME : " application - name " , EVENT_ASSOCIATE_STORAGE_SIZE : " associate - storage - size " , EVENT_ENTER_CONTEXT : " enter - context " , EVENT_EXIT_CONTEXT : " exit - context " , EVENT_FRAME_END : " frame - end " , EVENT_FRAME_RATE : " frame - rate " , EVENT_FRAME_START : " frame - start " , EVENT_FREE : " free " , EVENT_LOG_MESSAGE : " log - message " , EVENT_MEMORY_LAYOUT : " memory - layout " , EVENT_OFF_HEAP : " off - heap " , EVENT_REALLOCATE : " reallocate " , EVENT_REPORT_ERROR : " report - error " , EVENT_SESSION_NAME : " session - name " , EVENT_TASK_ASSOCIATE_DATA : " task - associate - data " , EVENT_TASK_END : " task - end " , EVENT_TASK_RESUME : " task - resume " , EVENT_TASK_START : " task - start " , EVENT_TASK_SUSPEND : " task - suspend " , EVENT_USER_NAME : " user - name " , init : function ( ) { <nl> - Module [ ' emscripten_trace_configure ' ] = _emscripten_trace_js_configure ; <nl> - Module [ ' emscripten_trace_configure_for_google_wtf ' ] = _emscripten_trace_configure_for_google_wtf ; <nl> - Module [ ' emscripten_trace_enter_context ' ] = _emscripten_trace_js_enter_context ; <nl> - Module [ ' emscripten_trace_exit_context ' ] = _emscripten_trace_exit_context ; <nl> - Module [ ' emscripten_trace_log_message ' ] = _emscripten_trace_js_log_message ; <nl> - Module [ ' emscripten_trace_mark ' ] = _emscripten_trace_js_mark ; <nl> - } , loadWorkerViaXHR : function ( url , ready , scope ) { <nl> - var req = new XMLHttpRequest ( ) ; <nl> - req . addEventListener ( ' load ' , function ( ) { <nl> - var blob = new Blob ( [ this . responseText ] , { type : ' text / javascript ' } ) ; <nl> - var worker = new Worker ( window . URL . createObjectURL ( blob ) ) ; <nl> - if ( ready ) { <nl> - ready . call ( scope , worker ) ; <nl> - } <nl> - } , req ) ; <nl> - req . open ( " get " , url , false ) ; <nl> - req . send ( ) ; <nl> - } , configure : function ( collector_url , application ) { <nl> - EmscriptenTrace . now = _emscripten_get_now ; <nl> - var now = new Date ( ) ; <nl> - var session_id = now . getTime ( ) . toString ( ) + ' _ ' + <nl> - Math . floor ( ( Math . random ( ) * 100 ) + 1 ) . toString ( ) ; <nl> - EmscriptenTrace . loadWorkerViaXHR ( collector_url + ' worker . js ' , function ( worker ) { <nl> - EmscriptenTrace . worker = worker ; <nl> - EmscriptenTrace . worker . addEventListener ( ' error ' , function ( e ) { <nl> - console . log ( ' TRACE WORKER ERROR : ' ) ; <nl> - console . log ( e ) ; <nl> - } , false ) ; <nl> - EmscriptenTrace . worker . postMessage ( { ' cmd ' : ' configure ' , <nl> - ' data_version ' : EmscriptenTrace . DATA_VERSION , <nl> - ' session_id ' : session_id , <nl> - ' url ' : collector_url } ) ; <nl> - EmscriptenTrace . configured = true ; <nl> - EmscriptenTrace . collectorEnabled = true ; <nl> - EmscriptenTrace . postEnabled = true ; <nl> - } ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_APPLICATION_NAME , application ] ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_SESSION_NAME , now . toISOString ( ) ] ) ; <nl> - } , configureForTest : function ( ) { <nl> - EmscriptenTrace . postEnabled = true ; <nl> - EmscriptenTrace . testingEnabled = true ; <nl> - EmscriptenTrace . now = function ( ) { return 0 . 0 ; } ; <nl> - } , configureForGoogleWTF : function ( ) { <nl> - if ( window & & window . wtf ) { <nl> - EmscriptenTrace . googleWTFEnabled = true ; <nl> - } else { <nl> - console . log ( ' GOOGLE WTF NOT AVAILABLE TO ENABLE ' ) ; <nl> - } <nl> - } , post : function ( entry ) { <nl> - if ( EmscriptenTrace . postEnabled & & EmscriptenTrace . collectorEnabled ) { <nl> - EmscriptenTrace . worker . postMessage ( { ' cmd ' : ' post ' , <nl> - ' entry ' : entry } ) ; <nl> - } else if ( EmscriptenTrace . postEnabled & & EmscriptenTrace . testingEnabled ) { <nl> - out ( ' Tracing ' + entry ) ; <nl> - } <nl> - } , googleWTFEnterScope : function ( name ) { <nl> - var scopeEvent = EmscriptenTrace . googleWTFData [ ' cachedScopes ' ] [ name ] ; <nl> - if ( ! scopeEvent ) { <nl> - scopeEvent = window . wtf . trace . events . createScope ( name ) ; <nl> - EmscriptenTrace . googleWTFData [ ' cachedScopes ' ] [ name ] = scopeEvent ; <nl> - } <nl> - var scope = scopeEvent ( ) ; <nl> - EmscriptenTrace . googleWTFData [ ' scopeStack ' ] . push ( scope ) ; <nl> - } , googleWTFExitScope : function ( ) { <nl> - var scope = EmscriptenTrace . googleWTFData [ ' scopeStack ' ] . pop ( ) ; <nl> - window . wtf . trace . leaveScope ( scope ) ; <nl> - } } ; function _emscripten_trace_record_allocation ( address , size ) { <nl> - if ( typeof Module [ ' onMalloc ' ] = = = ' function ' ) Module [ ' onMalloc ' ] ( address , size ) ; <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_ALLOCATE , <nl> - now , address , size ] ) ; <nl> - } <nl> - } <nl> - <nl> - function _emscripten_trace_record_free ( address ) { <nl> - if ( typeof Module [ ' onFree ' ] = = = ' function ' ) Module [ ' onFree ' ] ( address ) ; <nl> - if ( EmscriptenTrace . postEnabled ) { <nl> - var now = EmscriptenTrace . now ( ) ; <nl> - EmscriptenTrace . post ( [ EmscriptenTrace . EVENT_FREE , <nl> - now , address ] ) ; <nl> - } <nl> - } <nl> - <nl> - <nl> - function _memcpy ( dest , src , num ) { <nl> - dest = dest | 0 ; src = src | 0 ; num = num | 0 ; <nl> - var ret = 0 ; <nl> - var aligned_dest_end = 0 ; <nl> - var block_aligned_dest_end = 0 ; <nl> - var dest_end = 0 ; <nl> - / / Test against a benchmarked cutoff limit for when HEAPU8 . set ( ) becomes faster to use . <nl> - if ( ( num | 0 ) > = 8192 ) { <nl> - _emscripten_memcpy_big ( dest | 0 , src | 0 , num | 0 ) | 0 ; <nl> - return dest | 0 ; <nl> - } <nl> - <nl> - ret = dest | 0 ; <nl> - dest_end = ( dest + num ) | 0 ; <nl> - if ( ( dest & 3 ) = = ( src & 3 ) ) { <nl> - / / The initial unaligned < 4 - byte front . <nl> - while ( dest & 3 ) { <nl> - if ( ( num | 0 ) = = 0 ) return ret | 0 ; <nl> - HEAP8 [ ( ( dest ) > > 0 ) ] = ( ( HEAP8 [ ( ( src ) > > 0 ) ] ) | 0 ) ; <nl> - dest = ( dest + 1 ) | 0 ; <nl> - src = ( src + 1 ) | 0 ; <nl> - num = ( num - 1 ) | 0 ; <nl> - } <nl> - aligned_dest_end = ( dest_end & - 4 ) | 0 ; <nl> - block_aligned_dest_end = ( aligned_dest_end - 64 ) | 0 ; <nl> - while ( ( dest | 0 ) < = ( block_aligned_dest_end | 0 ) ) { <nl> - HEAP32 [ ( ( dest ) > > 2 ) ] = ( ( HEAP32 [ ( ( src ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 4 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 4 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 8 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 8 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 12 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 12 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 16 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 16 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 20 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 20 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 24 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 24 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 28 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 28 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 32 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 32 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 36 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 36 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 40 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 40 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 44 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 44 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 48 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 48 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 52 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 52 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 56 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 56 ) ) > > 2 ) ] ) | 0 ) ; <nl> - HEAP32 [ ( ( ( dest ) + ( 60 ) ) > > 2 ) ] = ( ( HEAP32 [ ( ( ( src ) + ( 60 ) ) > > 2 ) ] ) | 0 ) ; <nl> - dest = ( dest + 64 ) | 0 ; <nl> - src = ( src + 64 ) | 0 ; <nl> - } <nl> - while ( ( dest | 0 ) < ( aligned_dest_end | 0 ) ) { <nl> - HEAP32 [ ( ( dest ) > > 2 ) ] = ( ( HEAP32 [ ( ( src ) > > 2 ) ] ) | 0 ) ; <nl> - dest = ( dest + 4 ) | 0 ; <nl> - src = ( src + 4 ) | 0 ; <nl> - } <nl> - } else { <nl> - / / In the unaligned copy case , unroll a bit as well . <nl> - aligned_dest_end = ( dest_end - 4 ) | 0 ; <nl> - while ( ( dest | 0 ) < ( aligned_dest_end | 0 ) ) { <nl> - HEAP8 [ ( ( dest ) > > 0 ) ] = ( ( HEAP8 [ ( ( src ) > > 0 ) ] ) | 0 ) ; <nl> - HEAP8 [ ( ( ( dest ) + ( 1 ) ) > > 0 ) ] = ( ( HEAP8 [ ( ( ( src ) + ( 1 ) ) > > 0 ) ] ) | 0 ) ; <nl> - HEAP8 [ ( ( ( dest ) + ( 2 ) ) > > 0 ) ] = ( ( HEAP8 [ ( ( ( src ) + ( 2 ) ) > > 0 ) ] ) | 0 ) ; <nl> - HEAP8 [ ( ( ( dest ) + ( 3 ) ) > > 0 ) ] = ( ( HEAP8 [ ( ( ( src ) + ( 3 ) ) > > 0 ) ] ) | 0 ) ; <nl> - dest = ( dest + 4 ) | 0 ; <nl> - src = ( src + 4 ) | 0 ; <nl> - } <nl> - } <nl> - / / The remaining unaligned < 4 byte tail . <nl> - while ( ( dest | 0 ) < ( dest_end | 0 ) ) { <nl> - HEAP8 [ ( ( dest ) > > 0 ) ] = ( ( HEAP8 [ ( ( src ) > > 0 ) ] ) | 0 ) ; <nl> - dest = ( dest + 1 ) | 0 ; <nl> - src = ( src + 1 ) | 0 ; <nl> - } <nl> - return ret | 0 ; <nl> - } <nl> - <nl> - function _memset ( ptr , value , num ) { <nl> - ptr = ptr | 0 ; value = value | 0 ; num = num | 0 ; <nl> - var end = 0 , aligned_end = 0 , block_aligned_end = 0 , value4 = 0 ; <nl> - end = ( ptr + num ) | 0 ; <nl> - <nl> - value = value & 0xff ; <nl> - if ( ( num | 0 ) > = 67 / * 64 bytes for an unrolled loop + 3 bytes for unaligned head * / ) { <nl> - while ( ( ptr & 3 ) ! = 0 ) { <nl> - HEAP8 [ ( ( ptr ) > > 0 ) ] = value ; <nl> - ptr = ( ptr + 1 ) | 0 ; <nl> - } <nl> - <nl> - aligned_end = ( end & - 4 ) | 0 ; <nl> - value4 = value | ( value < < 8 ) | ( value < < 16 ) | ( value < < 24 ) ; <nl> - <nl> - block_aligned_end = ( aligned_end - 64 ) | 0 ; <nl> - <nl> - while ( ( ptr | 0 ) < = ( block_aligned_end | 0 ) ) { <nl> - HEAP32 [ ( ( ptr ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 4 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 8 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 12 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 16 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 20 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 24 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 28 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 32 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 36 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 40 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 44 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 48 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 52 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 56 ) ) > > 2 ) ] = value4 ; <nl> - HEAP32 [ ( ( ( ptr ) + ( 60 ) ) > > 2 ) ] = value4 ; <nl> - ptr = ( ptr + 64 ) | 0 ; <nl> - } <nl> - <nl> - while ( ( ptr | 0 ) < ( aligned_end | 0 ) ) { <nl> - HEAP32 [ ( ( ptr ) > > 2 ) ] = value4 ; <nl> - ptr = ( ptr + 4 ) | 0 ; <nl> - } <nl> - } <nl> - / / The remaining bytes . <nl> - while ( ( ptr | 0 ) < ( end | 0 ) ) { <nl> - HEAP8 [ ( ( ptr ) > > 0 ) ] = value ; <nl> - ptr = ( ptr + 1 ) | 0 ; <nl> - } <nl> - return ( end - num ) | 0 ; <nl> - } <nl> - <nl> - <nl> - function ___setErrNo ( value ) { <nl> - if ( Module [ ' ___errno_location ' ] ) HEAP32 [ ( ( Module [ ' ___errno_location ' ] ( ) ) > > 2 ) ] = value ; <nl> - else err ( ' failed to set errno from JS ' ) ; <nl> - return value ; <nl> - } <nl> - <nl> - <nl> - function abortOnCannotGrowMemory ( requestedSize ) { <nl> - abort ( ' Cannot enlarge memory arrays to size ' + requestedSize + ' bytes ( OOM ) . Either ( 1 ) compile with - s TOTAL_MEMORY = X with X higher than the current value ' + HEAP8 . length + ' , ( 2 ) compile with - s ALLOW_MEMORY_GROWTH = 1 which allows increasing the size at runtime , or ( 3 ) if you want malloc to return NULL ( 0 ) instead of this abort , compile with - s ABORTING_MALLOC = 0 ' ) ; <nl> - } function _emscripten_resize_heap ( requestedSize ) { <nl> - abortOnCannotGrowMemory ( requestedSize ) ; <nl> - } function _sbrk ( increment ) { <nl> - increment = increment | 0 ; <nl> - var oldDynamicTop = 0 ; <nl> - var oldDynamicTopOnChange = 0 ; <nl> - var newDynamicTop = 0 ; <nl> - var totalMemory = 0 ; <nl> - totalMemory = _emscripten_get_heap_size ( ) | 0 ; <nl> - <nl> - oldDynamicTop = HEAP32 [ DYNAMICTOP_PTR > > 2 ] | 0 ; <nl> - newDynamicTop = oldDynamicTop + increment | 0 ; <nl> - <nl> - if ( ( ( increment | 0 ) > 0 & ( newDynamicTop | 0 ) < ( oldDynamicTop | 0 ) ) / / Detect and fail if we would wrap around signed 32 - bit int . <nl> - | ( newDynamicTop | 0 ) < 0 ) { / / Also underflow , sbrk ( ) should be able to be used to subtract . <nl> - abortOnCannotGrowMemory ( newDynamicTop | 0 ) | 0 ; <nl> - ___setErrNo ( 12 ) ; <nl> - return - 1 ; <nl> - } <nl> - <nl> - if ( ( newDynamicTop | 0 ) > ( totalMemory | 0 ) ) { <nl> - if ( _emscripten_resize_heap ( newDynamicTop | 0 ) | 0 ) { <nl> - / / We resized the heap . Start another loop iteration if we need to . <nl> - } else { <nl> - / / We failed to resize the heap . <nl> - ___setErrNo ( 12 ) ; <nl> - return - 1 ; <nl> - } <nl> - } <nl> - <nl> - HEAP32 [ DYNAMICTOP_PTR > > 2 ] = newDynamicTop | 0 ; <nl> - <nl> - return oldDynamicTop | 0 ; <nl> - } <nl> - <nl> - function _setTempRet0 ( $ i ) { <nl> - setTempRet0 ( ( $ i ) | 0 ) ; <nl> - } <nl> - EmscriptenTrace . init ( ) ; <nl> - if ( ENVIRONMENT_IS_NODE ) { <nl> - _emscripten_get_now = function _emscripten_get_now_actual ( ) { <nl> - var t = process [ ' hrtime ' ] ( ) ; <nl> - return t [ 0 ] * 1e3 + t [ 1 ] / 1e6 ; <nl> - } ; <nl> - } else if ( typeof dateNow ! = = ' undefined ' ) { <nl> - _emscripten_get_now = dateNow ; <nl> - } else if ( typeof performance = = = ' object ' & & performance & & typeof performance [ ' now ' ] = = = ' function ' ) { <nl> - _emscripten_get_now = function ( ) { return performance [ ' now ' ] ( ) ; } ; <nl> - } else { <nl> - _emscripten_get_now = Date . now ; <nl> - } ; <nl> - var ASSERTIONS = true ; <nl> - <nl> - / / Copyright 2017 The Emscripten Authors . All rights reserved . <nl> - / / Emscripten is available under two separate licenses , the MIT license and the <nl> - / / University of Illinois / NCSA Open Source License . Both these licenses can be <nl> - / / found in the LICENSE file . <nl> - <nl> - / * * @ type { function ( string , boolean = , number = ) } * / <nl> - function intArrayFromString ( stringy , dontAddNull , length ) { <nl> - var len = length > 0 ? length : lengthBytesUTF8 ( stringy ) + 1 ; <nl> - var u8array = new Array ( len ) ; <nl> - var numBytesWritten = stringToUTF8Array ( stringy , u8array , 0 , u8array . length ) ; <nl> - if ( dontAddNull ) u8array . length = numBytesWritten ; <nl> - return u8array ; <nl> - } <nl> - <nl> - function intArrayToString ( array ) { <nl> - var ret = [ ] ; <nl> - for ( var i = 0 ; i < array . length ; i + + ) { <nl> - var chr = array [ i ] ; <nl> - if ( chr > 0xFF ) { <nl> - if ( ASSERTIONS ) { <nl> - assert ( false , ' Character code ' + chr + ' ( ' + String . fromCharCode ( chr ) + ' ) at offset ' + i + ' not in 0x00 - 0xFF . ' ) ; <nl> - } <nl> - chr & = 0xFF ; <nl> - } <nl> - ret . push ( String . fromCharCode ( chr ) ) ; <nl> - } <nl> - return ret . join ( ' ' ) ; <nl> - } <nl> - <nl> - <nl> - / / ASM_LIBRARY EXTERN PRIMITIVES : Int8Array , Int32Array <nl> - <nl> - var asmGlobalArg = { } ; <nl> - var asmLibraryArg = { " DYNAMICTOP_PTR " : DYNAMICTOP_PTR , " __lock " : ___lock , " __setErrNo " : ___setErrNo , " __syscall140 " : ___syscall140 , " __syscall146 " : ___syscall146 , " __syscall54 " : ___syscall54 , " __syscall6 " : ___syscall6 , " __unlock " : ___unlock , " abortOnCannotGrowMemory " : abortOnCannotGrowMemory , " emscripten_get_heap_size " : _emscripten_get_heap_size , " emscripten_get_now " : _emscripten_get_now , " emscripten_memcpy_big " : _emscripten_memcpy_big , " emscripten_resize_heap " : _emscripten_resize_heap , " emscripten_trace_configure_for_google_wtf " : _emscripten_trace_configure_for_google_wtf , " emscripten_trace_exit_context " : _emscripten_trace_exit_context , " emscripten_trace_js_configure " : _emscripten_trace_js_configure , " emscripten_trace_js_enter_context " : _emscripten_trace_js_enter_context , " emscripten_trace_js_log_message " : _emscripten_trace_js_log_message , " emscripten_trace_js_mark " : _emscripten_trace_js_mark , " emscripten_trace_record_allocation " : _emscripten_trace_record_allocation , " emscripten_trace_record_free " : _emscripten_trace_record_free , " flush_NO_FILESYSTEM " : flush_NO_FILESYSTEM , " memcpy " : _memcpy , " memset " : _memset , " sbrk " : _sbrk , " setTempRet0 " : _setTempRet0 } ; <nl> - var asm = Module [ ' asm ' ] ( asmGlobalArg , asmLibraryArg , buffer ) ; <nl> - var real____wasm_call_ctors = asm [ " __wasm_call_ctors " ] ; <nl> - asm [ " __wasm_call_ctors " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real____wasm_call_ctors . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real__main = asm [ " main " ] ; <nl> - asm [ " main " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real__main . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real____errno_location = asm [ " __errno_location " ] ; <nl> - asm [ " __errno_location " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real____errno_location . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real__fflush = asm [ " fflush " ] ; <nl> - asm [ " fflush " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real__fflush . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real__malloc = asm [ " malloc " ] ; <nl> - asm [ " malloc " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real__malloc . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real__free = asm [ " free " ] ; <nl> - asm [ " free " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real__free . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real__setThrew = asm [ " setThrew " ] ; <nl> - asm [ " setThrew " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real__setThrew . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_stackSave = asm [ " stackSave " ] ; <nl> - asm [ " stackSave " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_stackSave . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_stackAlloc = asm [ " stackAlloc " ] ; <nl> - asm [ " stackAlloc " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_stackAlloc . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_stackRestore = asm [ " stackRestore " ] ; <nl> - asm [ " stackRestore " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_stackRestore . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real___growWasmMemory = asm [ " __growWasmMemory " ] ; <nl> - asm [ " __growWasmMemory " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real___growWasmMemory . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_dynCall_iidiiii = asm [ " dynCall_iidiiii " ] ; <nl> - asm [ " dynCall_iidiiii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_dynCall_iidiiii . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_dynCall_vii = asm [ " dynCall_vii " ] ; <nl> - asm [ " dynCall_vii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_dynCall_vii . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_dynCall_iiii = asm [ " dynCall_iiii " ] ; <nl> - asm [ " dynCall_iiii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_dynCall_iiii . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_dynCall_ii = asm [ " dynCall_ii " ] ; <nl> - asm [ " dynCall_ii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_dynCall_ii . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - var real_dynCall_jiji = asm [ " dynCall_jiji " ] ; <nl> - asm [ " dynCall_jiji " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return real_dynCall_jiji . apply ( null , arguments ) ; <nl> - } ; <nl> - <nl> - Module [ " asm " ] = asm ; <nl> - var ___wasm_call_ctors = Module [ " ___wasm_call_ctors " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " __wasm_call_ctors " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var _main = Module [ " _main " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " main " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var ___errno_location = Module [ " ___errno_location " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " __errno_location " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var _fflush = Module [ " _fflush " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " fflush " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var _malloc = Module [ " _malloc " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " malloc " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var _free = Module [ " _free " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " free " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var _setThrew = Module [ " _setThrew " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " setThrew " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var stackSave = Module [ " stackSave " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " stackSave " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var stackAlloc = Module [ " stackAlloc " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " stackAlloc " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var stackRestore = Module [ " stackRestore " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " stackRestore " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var __growWasmMemory = Module [ " __growWasmMemory " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " __growWasmMemory " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var dynCall_iidiiii = Module [ " dynCall_iidiiii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " dynCall_iidiiii " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var dynCall_vii = Module [ " dynCall_vii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " dynCall_vii " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var dynCall_iiii = Module [ " dynCall_iiii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " dynCall_iiii " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var dynCall_ii = Module [ " dynCall_ii " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " dynCall_ii " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - var dynCall_jiji = Module [ " dynCall_jiji " ] = function ( ) { <nl> - assert ( runtimeInitialized , ' you need to wait for the runtime to be ready ( e . g . wait for main ( ) to be called ) ' ) ; <nl> - assert ( ! runtimeExited , ' the runtime was exited ( use NO_EXIT_RUNTIME to keep it alive after main ( ) exits ) ' ) ; <nl> - return Module [ " asm " ] [ " dynCall_jiji " ] . apply ( null , arguments ) <nl> - } ; <nl> - <nl> - <nl> - <nl> - <nl> - / / = = = Auto - generated postamble setup entry stuff = = = <nl> - <nl> - Module [ ' asm ' ] = asm ; <nl> - <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " intArrayFromString " ) ) Module [ " intArrayFromString " ] = function ( ) { abort ( " ' intArrayFromString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " intArrayToString " ) ) Module [ " intArrayToString " ] = function ( ) { abort ( " ' intArrayToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " ccall " ) ) Module [ " ccall " ] = function ( ) { abort ( " ' ccall ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " cwrap " ) ) Module [ " cwrap " ] = function ( ) { abort ( " ' cwrap ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " setValue " ) ) Module [ " setValue " ] = function ( ) { abort ( " ' setValue ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getValue " ) ) Module [ " getValue " ] = function ( ) { abort ( " ' getValue ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " allocate " ) ) Module [ " allocate " ] = function ( ) { abort ( " ' allocate ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getMemory " ) ) Module [ " getMemory " ] = function ( ) { abort ( " ' getMemory ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " AsciiToString " ) ) Module [ " AsciiToString " ] = function ( ) { abort ( " ' AsciiToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stringToAscii " ) ) Module [ " stringToAscii " ] = function ( ) { abort ( " ' stringToAscii ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " UTF8ArrayToString " ) ) Module [ " UTF8ArrayToString " ] = function ( ) { abort ( " ' UTF8ArrayToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " UTF8ToString " ) ) Module [ " UTF8ToString " ] = function ( ) { abort ( " ' UTF8ToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stringToUTF8Array " ) ) Module [ " stringToUTF8Array " ] = function ( ) { abort ( " ' stringToUTF8Array ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stringToUTF8 " ) ) Module [ " stringToUTF8 " ] = function ( ) { abort ( " ' stringToUTF8 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " lengthBytesUTF8 " ) ) Module [ " lengthBytesUTF8 " ] = function ( ) { abort ( " ' lengthBytesUTF8 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " UTF16ToString " ) ) Module [ " UTF16ToString " ] = function ( ) { abort ( " ' UTF16ToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stringToUTF16 " ) ) Module [ " stringToUTF16 " ] = function ( ) { abort ( " ' stringToUTF16 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " lengthBytesUTF16 " ) ) Module [ " lengthBytesUTF16 " ] = function ( ) { abort ( " ' lengthBytesUTF16 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " UTF32ToString " ) ) Module [ " UTF32ToString " ] = function ( ) { abort ( " ' UTF32ToString ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stringToUTF32 " ) ) Module [ " stringToUTF32 " ] = function ( ) { abort ( " ' stringToUTF32 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " lengthBytesUTF32 " ) ) Module [ " lengthBytesUTF32 " ] = function ( ) { abort ( " ' lengthBytesUTF32 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " allocateUTF8 " ) ) Module [ " allocateUTF8 " ] = function ( ) { abort ( " ' allocateUTF8 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stackTrace " ) ) Module [ " stackTrace " ] = function ( ) { abort ( " ' stackTrace ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addOnPreRun " ) ) Module [ " addOnPreRun " ] = function ( ) { abort ( " ' addOnPreRun ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addOnInit " ) ) Module [ " addOnInit " ] = function ( ) { abort ( " ' addOnInit ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addOnPreMain " ) ) Module [ " addOnPreMain " ] = function ( ) { abort ( " ' addOnPreMain ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addOnExit " ) ) Module [ " addOnExit " ] = function ( ) { abort ( " ' addOnExit ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addOnPostRun " ) ) Module [ " addOnPostRun " ] = function ( ) { abort ( " ' addOnPostRun ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " writeStringToMemory " ) ) Module [ " writeStringToMemory " ] = function ( ) { abort ( " ' writeStringToMemory ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " writeArrayToMemory " ) ) Module [ " writeArrayToMemory " ] = function ( ) { abort ( " ' writeArrayToMemory ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " writeAsciiToMemory " ) ) Module [ " writeAsciiToMemory " ] = function ( ) { abort ( " ' writeAsciiToMemory ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addRunDependency " ) ) Module [ " addRunDependency " ] = function ( ) { abort ( " ' addRunDependency ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " removeRunDependency " ) ) Module [ " removeRunDependency " ] = function ( ) { abort ( " ' removeRunDependency ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " ENV " ) ) Module [ " ENV " ] = function ( ) { abort ( " ' ENV ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS " ) ) Module [ " FS " ] = function ( ) { abort ( " ' FS ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createFolder " ) ) Module [ " FS_createFolder " ] = function ( ) { abort ( " ' FS_createFolder ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createPath " ) ) Module [ " FS_createPath " ] = function ( ) { abort ( " ' FS_createPath ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createDataFile " ) ) Module [ " FS_createDataFile " ] = function ( ) { abort ( " ' FS_createDataFile ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createPreloadedFile " ) ) Module [ " FS_createPreloadedFile " ] = function ( ) { abort ( " ' FS_createPreloadedFile ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createLazyFile " ) ) Module [ " FS_createLazyFile " ] = function ( ) { abort ( " ' FS_createLazyFile ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createLink " ) ) Module [ " FS_createLink " ] = function ( ) { abort ( " ' FS_createLink ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_createDevice " ) ) Module [ " FS_createDevice " ] = function ( ) { abort ( " ' FS_createDevice ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " FS_unlink " ) ) Module [ " FS_unlink " ] = function ( ) { abort ( " ' FS_unlink ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " GL " ) ) Module [ " GL " ] = function ( ) { abort ( " ' GL ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " dynamicAlloc " ) ) Module [ " dynamicAlloc " ] = function ( ) { abort ( " ' dynamicAlloc ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " loadDynamicLibrary " ) ) Module [ " loadDynamicLibrary " ] = function ( ) { abort ( " ' loadDynamicLibrary ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " loadWebAssemblyModule " ) ) Module [ " loadWebAssemblyModule " ] = function ( ) { abort ( " ' loadWebAssemblyModule ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getLEB " ) ) Module [ " getLEB " ] = function ( ) { abort ( " ' getLEB ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getFunctionTables " ) ) Module [ " getFunctionTables " ] = function ( ) { abort ( " ' getFunctionTables ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " alignFunctionTables " ) ) Module [ " alignFunctionTables " ] = function ( ) { abort ( " ' alignFunctionTables ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " registerFunctions " ) ) Module [ " registerFunctions " ] = function ( ) { abort ( " ' registerFunctions ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " addFunction " ) ) Module [ " addFunction " ] = function ( ) { abort ( " ' addFunction ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " removeFunction " ) ) Module [ " removeFunction " ] = function ( ) { abort ( " ' removeFunction ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getFuncWrapper " ) ) Module [ " getFuncWrapper " ] = function ( ) { abort ( " ' getFuncWrapper ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " prettyPrint " ) ) Module [ " prettyPrint " ] = function ( ) { abort ( " ' prettyPrint ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " makeBigInt " ) ) Module [ " makeBigInt " ] = function ( ) { abort ( " ' makeBigInt ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " dynCall " ) ) Module [ " dynCall " ] = function ( ) { abort ( " ' dynCall ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getCompilerSetting " ) ) Module [ " getCompilerSetting " ] = function ( ) { abort ( " ' getCompilerSetting ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stackSave " ) ) Module [ " stackSave " ] = function ( ) { abort ( " ' stackSave ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stackRestore " ) ) Module [ " stackRestore " ] = function ( ) { abort ( " ' stackRestore ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " stackAlloc " ) ) Module [ " stackAlloc " ] = function ( ) { abort ( " ' stackAlloc ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " establishStackSpace " ) ) Module [ " establishStackSpace " ] = function ( ) { abort ( " ' establishStackSpace ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " print " ) ) Module [ " print " ] = function ( ) { abort ( " ' print ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " printErr " ) ) Module [ " printErr " ] = function ( ) { abort ( " ' printErr ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " getTempRet0 " ) ) Module [ " getTempRet0 " ] = function ( ) { abort ( " ' getTempRet0 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " setTempRet0 " ) ) Module [ " setTempRet0 " ] = function ( ) { abort ( " ' setTempRet0 ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " callMain " ) ) Module [ " callMain " ] = function ( ) { abort ( " ' callMain ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " Pointer_stringify " ) ) Module [ " Pointer_stringify " ] = function ( ) { abort ( " ' Pointer_stringify ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " warnOnce " ) ) Module [ " warnOnce " ] = function ( ) { abort ( " ' warnOnce ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } ; if ( ! Object . getOwnPropertyDescriptor ( Module , " ALLOC_NORMAL " ) ) Object . defineProperty ( Module , " ALLOC_NORMAL " , { get : function ( ) { abort ( " ' ALLOC_NORMAL ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " ALLOC_STACK " ) ) Object . defineProperty ( Module , " ALLOC_STACK " , { get : function ( ) { abort ( " ' ALLOC_STACK ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " ALLOC_DYNAMIC " ) ) Object . defineProperty ( Module , " ALLOC_DYNAMIC " , { get : function ( ) { abort ( " ' ALLOC_DYNAMIC ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " ALLOC_NONE " ) ) Object . defineProperty ( Module , " ALLOC_NONE " , { get : function ( ) { abort ( " ' ALLOC_NONE ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) " ) } } ) ; <nl> - if ( ! Object . getOwnPropertyDescriptor ( Module , " calledRun " ) ) Object . defineProperty ( Module , " calledRun " , { get : function ( ) { abort ( " ' calledRun ' was not exported . add it to EXTRA_EXPORTED_RUNTIME_METHODS ( see the FAQ ) . Alternatively , forcing filesystem support ( - s FORCE_FILESYSTEM = 1 ) can export this for you " ) } } ) ; <nl> - <nl> - <nl> - <nl> - var calledRun ; <nl> - <nl> - <nl> - / * * <nl> - * @ constructor <nl> - * @ this { ExitStatus } <nl> - * / <nl> - function ExitStatus ( status ) { <nl> - this . name = " ExitStatus " ; <nl> - this . message = " Program terminated with exit ( " + status + " ) " ; <nl> - this . status = status ; <nl> - } <nl> - <nl> - var calledMain = false ; <nl> - <nl> - dependenciesFulfilled = function runCaller ( ) { <nl> - / / If run has never been called , and we should call run ( INVOKE_RUN is true , and Module . noInitialRun is not false ) <nl> - if ( ! calledRun ) run ( ) ; <nl> - if ( ! calledRun ) dependenciesFulfilled = runCaller ; / / try this again later , after new deps are fulfilled <nl> - } ; <nl> - <nl> - function callMain ( args ) { <nl> - assert ( runDependencies = = 0 , ' cannot call main when async dependencies remain ! ( listen on Module [ " onRuntimeInitialized " ] ) ' ) ; <nl> - assert ( __ATPRERUN__ . length = = 0 , ' cannot call main when preRun functions remain to be called ' ) ; <nl> - <nl> - <nl> - args = args | | [ ] ; <nl> - <nl> - var argc = args . length + 1 ; <nl> - var argv = stackAlloc ( ( argc + 1 ) * 4 ) ; <nl> - HEAP32 [ argv > > 2 ] = allocateUTF8OnStack ( thisProgram ) ; <nl> - for ( var i = 1 ; i < argc ; i + + ) { <nl> - HEAP32 [ ( argv > > 2 ) + i ] = allocateUTF8OnStack ( args [ i - 1 ] ) ; <nl> - } <nl> - HEAP32 [ ( argv > > 2 ) + argc ] = 0 ; <nl> - <nl> - <nl> - try { <nl> - <nl> - <nl> - var ret = Module [ ' _main ' ] ( argc , argv ) ; <nl> - <nl> - <nl> - / / if we ' re not running an evented main loop , it ' s time to exit <nl> - exit ( ret , / * implicit = * / true ) ; <nl> - } <nl> - catch ( e ) { <nl> - if ( e instanceof ExitStatus ) { <nl> - / / exit ( ) throws this once it ' s done to make sure execution <nl> - / / has been stopped completely <nl> - return ; <nl> - } else if ( e = = ' SimulateInfiniteLoop ' ) { <nl> - / / running an evented main loop , don ' t immediately exit <nl> - Module [ ' noExitRuntime ' ] = true ; <nl> - return ; <nl> - } else { <nl> - var toLog = e ; <nl> - if ( e & & typeof e = = = ' object ' & & e . stack ) { <nl> - toLog = [ e , e . stack ] ; <nl> - } <nl> - err ( ' exception thrown : ' + toLog ) ; <nl> - quit_ ( 1 , e ) ; <nl> - } <nl> - } finally { <nl> - calledMain = true ; <nl> - } <nl> - } <nl> - <nl> - <nl> - <nl> - <nl> - / * * @ type { function ( Array = ) } * / <nl> - function run ( args ) { <nl> - args = args | | arguments_ ; <nl> - <nl> - if ( runDependencies > 0 ) { <nl> - return ; <nl> - } <nl> - <nl> - writeStackCookie ( ) ; <nl> - <nl> - preRun ( ) ; <nl> - <nl> - if ( runDependencies > 0 ) return ; / / a preRun added a dependency , run will be called later <nl> - <nl> - function doRun ( ) { <nl> - / / run may have just been called through dependencies being fulfilled just in this very frame , <nl> - / / or while the async setStatus time below was happening <nl> - if ( calledRun ) return ; <nl> - calledRun = true ; <nl> - <nl> - if ( ABORT ) return ; <nl> - <nl> - initRuntime ( ) ; <nl> - <nl> - preMain ( ) ; <nl> - <nl> - if ( Module [ ' onRuntimeInitialized ' ] ) Module [ ' onRuntimeInitialized ' ] ( ) ; <nl> - <nl> - if ( shouldRunNow ) callMain ( args ) ; <nl> - <nl> - postRun ( ) ; <nl> - } <nl> - <nl> - if ( Module [ ' setStatus ' ] ) { <nl> - Module [ ' setStatus ' ] ( ' Running . . . ' ) ; <nl> - setTimeout ( function ( ) { <nl> - setTimeout ( function ( ) { <nl> - Module [ ' setStatus ' ] ( ' ' ) ; <nl> - } , 1 ) ; <nl> - doRun ( ) ; <nl> - } , 1 ) ; <nl> - } else <nl> - { <nl> - doRun ( ) ; <nl> - } <nl> - checkStackCookie ( ) ; <nl> - } <nl> - Module [ ' run ' ] = run ; <nl> - <nl> - function checkUnflushedContent ( ) { <nl> - / / Compiler settings do not allow exiting the runtime , so flushing <nl> - / / the streams is not possible . but in ASSERTIONS mode we check <nl> - / / if there was something to flush , and if so tell the user they <nl> - / / should request that the runtime be exitable . <nl> - / / Normally we would not even include flush ( ) at all , but in ASSERTIONS <nl> - / / builds we do so just for this check , and here we see if there is any <nl> - / / content to flush , that is , we check if there would have been <nl> - / / something a non - ASSERTIONS build would have not seen . <nl> - / / How we flush the streams depends on whether we are in SYSCALLS_REQUIRE_FILESYSTEM = 0 <nl> - / / mode ( which has its own special function for this ; otherwise , all <nl> - / / the code is inside libc ) <nl> - var print = out ; <nl> - var printErr = err ; <nl> - var has = false ; <nl> - out = err = function ( x ) { <nl> - has = true ; <nl> - } <nl> - try { / / it doesn ' t matter if it fails <nl> - var flush = flush_NO_FILESYSTEM ; <nl> - if ( flush ) flush ( 0 ) ; <nl> - } catch ( e ) { } <nl> - out = print ; <nl> - err = printErr ; <nl> - if ( has ) { <nl> - warnOnce ( ' stdio streams had content in them that was not flushed . you should set EXIT_RUNTIME to 1 ( see the FAQ ) , or make sure to emit a newline when you printf etc . ' ) ; <nl> - warnOnce ( ' ( this may also be due to not including full filesystem support - try building with - s FORCE_FILESYSTEM = 1 ) ' ) ; <nl> - } <nl> - } <nl> - <nl> - function exit ( status , implicit ) { <nl> - checkUnflushedContent ( ) ; <nl> - <nl> - / / if this is just main exit - ing implicitly , and the status is 0 , then we <nl> - / / don ' t need to do anything here and can just leave . if the status is <nl> - / / non - zero , though , then we need to report it . <nl> - / / ( we may have warned about this earlier , if a situation justifies doing so ) <nl> - if ( implicit & & Module [ ' noExitRuntime ' ] & & status = = = 0 ) { <nl> - return ; <nl> - } <nl> - <nl> - if ( Module [ ' noExitRuntime ' ] ) { <nl> - / / if exit ( ) was called , we may warn the user if the runtime isn ' t actually being shut down <nl> - if ( ! implicit ) { <nl> - err ( ' exit ( ' + status + ' ) called , but EXIT_RUNTIME is not set , so halting execution but not exiting the runtime or preventing further async execution ( build with EXIT_RUNTIME = 1 , if you want a true shutdown ) ' ) ; <nl> - } <nl> - } else { <nl> - <nl> - ABORT = true ; <nl> - EXITSTATUS = status ; <nl> - <nl> - exitRuntime ( ) ; <nl> - <nl> - if ( Module [ ' onExit ' ] ) Module [ ' onExit ' ] ( status ) ; <nl> - } <nl> - <nl> - quit_ ( status , new ExitStatus ( status ) ) ; <nl> - } <nl> - <nl> - var abortDecorators = [ ] ; <nl> - <nl> - function abort ( what ) { <nl> - if ( Module [ ' onAbort ' ] ) { <nl> - Module [ ' onAbort ' ] ( what ) ; <nl> - } <nl> - <nl> - what + = ' ' ; <nl> - out ( what ) ; <nl> - err ( what ) ; <nl> - <nl> - ABORT = true ; <nl> - EXITSTATUS = 1 ; <nl> - <nl> - var extra = ' ' ; <nl> - var output = ' abort ( ' + what + ' ) at ' + stackTrace ( ) + extra ; <nl> - if ( abortDecorators ) { <nl> - abortDecorators . forEach ( function ( decorator ) { <nl> - output = decorator ( output , what ) ; <nl> - } ) ; <nl> - } <nl> - throw output ; <nl> - } <nl> - Module [ ' abort ' ] = abort ; <nl> - <nl> - if ( Module [ ' preInit ' ] ) { <nl> - if ( typeof Module [ ' preInit ' ] = = ' function ' ) Module [ ' preInit ' ] = [ Module [ ' preInit ' ] ] ; <nl> - while ( Module [ ' preInit ' ] . length > 0 ) { <nl> - Module [ ' preInit ' ] . pop ( ) ( ) ; <nl> - } <nl> - } <nl> - <nl> - / / shouldRunNow refers to calling main ( ) , not run ( ) . <nl> - var shouldRunNow = true ; <nl> - <nl> - if ( Module [ ' noInitialRun ' ] ) shouldRunNow = false ; <nl> - <nl> - <nl> - Module [ " noExitRuntime " ] = true ; <nl> - <nl> - run ( ) ; <nl> - <nl> - <nl> - <nl> - <nl> - <nl> - / / { { MODULE_ADDITIONS } } <nl> - <nl> - <nl> - <nl> - <nl> deleted file mode 100644 <nl> index 59b59aeed90 . . 00000000000 <nl> Binary files a / a . out . wasm and / dev / null differ <nl> | Remove files accidentally committed in | emscripten-core/emscripten | 6d6cb961cf3264747bd3a4ead95fc631f7f0de66 | 2019-08-14T01:11:10Z |
mmm a / stdlib / public / SwiftShims / LibcShims . h <nl> ppp b / stdlib / public / SwiftShims / LibcShims . h <nl> typedef int __swift_ssize_t ; <nl> # elif defined ( _WIN32 ) <nl> # if defined ( _M_ARM ) | | defined ( _M_IX86 ) <nl> typedef int __swift_ssize_t ; <nl> - # elif defined ( _M_X64 ) <nl> + # elif defined ( _M_X64 ) | | defined ( _M_ARM64 ) <nl> typedef long long int __swift_ssize_t ; <nl> # else <nl> # error unsupported machine type <nl> | shims : handle Windows aarch64 as well | apple/swift | 1ccb740676a39ef0ea8a44744d9a88a8886308d3 | 2018-03-01T00:15:21Z |
mmm a / contracts / eosiolib / transaction . hpp <nl> ppp b / contracts / eosiolib / transaction . hpp <nl> namespace eosio { <nl> time expiration ; <nl> region_id region ; <nl> uint16_t ref_block_num ; <nl> - uint32_t ref_block_id ; <nl> + uint32_t ref_block_prefix ; <nl> + uint16_t packed_bandwidth_words = 0 ; / / / number of 8 byte words this transaction can compress into <nl> + uint16_t context_free_cpu_bandwidth = 0 ; / / / number of CPU usage units to bill transaction for <nl> <nl> + vector < action > context_free_actions ; <nl> vector < action > actions ; <nl> <nl> - EOSLIB_SERIALIZE ( transaction , ( expiration ) ( region ) ( ref_block_num ) ( ref_block_id ) ( actions ) ) ; <nl> + EOSLIB_SERIALIZE ( transaction , ( expiration ) ( region ) ( ref_block_num ) ( ref_block_prefix ) ( packed_bandwidth_words ) ( context_free_cpu_bandwidth ) ( context_free_actions ) ( actions ) ) ; <nl> } ; <nl> <nl> class deferred_transaction : public transaction { <nl> mmm a / libraries / chain / apply_context . cpp <nl> ppp b / libraries / chain / apply_context . cpp <nl> void apply_context : : update_db_usage ( const account_name & payer , int64_t delta ) <nl> } <nl> <nl> <nl> + int apply_context : : get_action ( uint32_t type , uint32_t index , char * buffer , size_t buffer_size ) const <nl> + { <nl> + const transaction & trx = trx_meta . trx ( ) ; <nl> + const action * act = nullptr ; <nl> + if ( type = = 0 ) { <nl> + if ( index > = trx . context_free_actions . size ( ) ) <nl> + return - 1 ; <nl> + act = & trx . context_free_actions [ index ] ; <nl> + } <nl> + else if ( type = = 1 ) { <nl> + if ( index > = trx . actions . size ( ) ) <nl> + return - 1 ; <nl> + act = & trx . actions [ index ] ; <nl> + } <nl> + <nl> + auto ps = fc : : raw : : pack_size ( * act ) ; <nl> + if ( ps < = buffer_size ) { <nl> + fc : : datastream < char * > ds ( buffer , buffer_size ) ; <nl> + fc : : raw : : pack ( ds , * act ) ; <nl> + } <nl> + return ps ; <nl> + } <nl> + <nl> + int apply_context : : get_context_free_data ( uint32_t index , char * buffer , size_t buffer_size ) const { <nl> + if ( index > = trx_meta . context_free_data . size ( ) ) return - 1 ; <nl> + <nl> + auto s = trx_meta . context_free_data [ index ] . size ( ) ; <nl> + <nl> + if ( buffer_size = = 0 ) return s ; <nl> + <nl> + if ( buffer_size < s ) <nl> + memcpy ( buffer , trx_meta . context_free_data . data ( ) , buffer_size ) ; <nl> + else <nl> + memcpy ( buffer , trx_meta . context_free_data . data ( ) , s ) ; <nl> + <nl> + return s ; <nl> + } <nl> + <nl> + <nl> int apply_context : : db_store_i64 ( uint64_t scope , uint64_t table , const account_name & payer , uint64_t id , const char * buffer , size_t buffer_size ) { <nl> require_write_lock ( scope ) ; <nl> const auto & tab = find_or_create_table ( receiver , scope , table ) ; <nl> mmm a / libraries / chain / chain_controller . cpp <nl> ppp b / libraries / chain / chain_controller . cpp <nl> static void log_handled_exceptions ( const transaction & trx ) { <nl> <nl> transaction_trace chain_controller : : __apply_transaction ( transaction_metadata & meta ) { <nl> transaction_trace result ( meta . id ) ; <nl> + <nl> + for ( const auto & act : meta . trx ( ) . context_free_actions ) { <nl> + FC_ASSERT ( act . authorization . size ( ) = = 0 , " context free actions cannot require authorization " ) ; <nl> + apply_context context ( * this , _db , act , meta ) ; <nl> + context . context_free = true ; <nl> + context . exec ( ) ; <nl> + fc : : move_append ( result . action_traces , std : : move ( context . results . applied_actions ) ) ; <nl> + FC_ASSERT ( result . deferred_transactions . size ( ) = = 0 ) ; <nl> + FC_ASSERT ( result . canceled_deferred . size ( ) = = 0 ) ; <nl> + } <nl> + <nl> for ( const auto & act : meta . trx ( ) . actions ) { <nl> apply_context context ( * this , _db , act , meta ) ; <nl> context . exec ( ) ; <nl> + context . used_context_free_api | = act . authorization . size ( ) ; <nl> + <nl> + FC_ASSERT ( context . used_context_free_api , " action did not reference database state , it should be moved to context_free_actions " , ( " act " , act ) ) ; <nl> fc : : move_append ( result . action_traces , std : : move ( context . results . applied_actions ) ) ; <nl> fc : : move_append ( result . deferred_transactions , std : : move ( context . results . generated_transactions ) ) ; <nl> fc : : move_append ( result . canceled_deferred , std : : move ( context . results . canceled_deferred ) ) ; <nl> mmm a / libraries / chain / include / eosio / chain / apply_context . hpp <nl> ppp b / libraries / chain / include / eosio / chain / apply_context . hpp <nl> class apply_context { <nl> const chainbase : : database & db ; / / / < database where state is stored <nl> const action & act ; / / / < message being applied <nl> account_name receiver ; / / / < the code that is currently running <nl> - bool privileged = false ; <nl> + bool privileged = false ; <nl> + bool context_free = false ; <nl> + bool used_context_free_api = false ; <nl> <nl> chain_controller & mutable_controller ; <nl> chainbase : : database & mutable_db ; <nl> class apply_context { <nl> <nl> void checktime ( uint32_t instruction_count ) const ; <nl> <nl> + int get_action ( uint32_t type , uint32_t index , char * buffer , size_t buffer_size ) const ; <nl> + int get_context_free_data ( uint32_t index , char * buffer , size_t buffer_size ) const ; <nl> + <nl> void update_db_usage ( const account_name & payer , int64_t delta ) ; <nl> int db_store_i64 ( uint64_t scope , uint64_t table , const account_name & payer , uint64_t id , const char * buffer , size_t buffer_size ) ; <nl> void db_update_i64 ( int iterator , account_name payer , const char * buffer , size_t buffer_size ) ; <nl> mmm a / libraries / chain / include / eosio / chain / transaction . hpp <nl> ppp b / libraries / chain / include / eosio / chain / transaction . hpp <nl> namespace eosio { namespace chain { <nl> uint16_t region = 0 ; / / / < the computational memory region this transaction applies to . <nl> uint16_t ref_block_num = 0 ; / / / < specifies a block num in the last 2 ^ 16 blocks . <nl> uint32_t ref_block_prefix = 0 ; / / / < specifies the lower 32 bits of the blockid at get_ref_blocknum <nl> + uint16_t packed_bandwidth_words = 0 ; / / / number of 8 byte words this transaction can compress into <nl> + uint16_t context_free_cpu_bandwidth = 0 ; / / / number of CPU usage units to bill transaction for <nl> <nl> / * * <nl> * @ return the absolute block number given the relative ref_block_num <nl> namespace eosio { namespace chain { <nl> * read and write scopes . <nl> * / <nl> struct transaction : public transaction_header { <nl> + vector < action > context_free_actions ; <nl> vector < action > actions ; <nl> <nl> transaction_id_type id ( ) const ; <nl> namespace eosio { namespace chain { <nl> } <nl> <nl> vector < signature_type > signatures ; <nl> + vector < vector < char > > context_free_data ; / / / < for each context - free action , there is an entry here <nl> <nl> const signature_type & sign ( const private_key_type & key , const chain_id_type & chain_id ) ; <nl> signature_type sign ( const private_key_type & key , const chain_id_type & chain_id ) const ; <nl> namespace eosio { namespace chain { <nl> <nl> FC_REFLECT ( eosio : : chain : : permission_level , ( actor ) ( permission ) ) <nl> FC_REFLECT ( eosio : : chain : : action , ( account ) ( name ) ( authorization ) ( data ) ) <nl> - FC_REFLECT ( eosio : : chain : : transaction_header , ( expiration ) ( region ) ( ref_block_num ) ( ref_block_prefix ) ) <nl> - FC_REFLECT_DERIVED ( eosio : : chain : : transaction , ( eosio : : chain : : transaction_header ) , ( actions ) ) <nl> - FC_REFLECT_DERIVED ( eosio : : chain : : signed_transaction , ( eosio : : chain : : transaction ) , ( signatures ) ) <nl> + FC_REFLECT ( eosio : : chain : : transaction_header , ( expiration ) ( region ) ( ref_block_num ) ( ref_block_prefix ) ( packed_bandwidth_words ) ( context_free_cpu_bandwidth ) ) <nl> + FC_REFLECT_DERIVED ( eosio : : chain : : transaction , ( eosio : : chain : : transaction_header ) , ( context_free_actions ) ( actions ) ) <nl> + FC_REFLECT_DERIVED ( eosio : : chain : : signed_transaction , ( eosio : : chain : : transaction ) , ( signatures ) ( context_free_data ) ) <nl> FC_REFLECT_ENUM ( eosio : : chain : : packed_transaction : : compression_type , ( none ) ( zlib ) ) <nl> FC_REFLECT ( eosio : : chain : : packed_transaction , ( signatures ) ( compression ) ( data ) ) <nl> FC_REFLECT_DERIVED ( eosio : : chain : : deferred_transaction , ( eosio : : chain : : transaction ) , ( sender_id ) ( sender ) ( execute_after ) ) <nl> mmm a / libraries / chain / include / eosio / chain / transaction_metadata . hpp <nl> ppp b / libraries / chain / include / eosio / chain / transaction_metadata . hpp <nl> class transaction_metadata { <nl> / / things for packed_transaction <nl> optional < bytes > raw_trx ; <nl> optional < transaction > decompressed_trx ; <nl> + vector < vector < char > > context_free_data ; <nl> <nl> / / things for signed / packed transactions <nl> optional < flat_set < public_key_type > > signing_keys ; <nl> mmm a / libraries / chain / wasm_interface . cpp <nl> ppp b / libraries / chain / wasm_interface . cpp <nl> namespace eosio { namespace chain { <nl> <nl> class context_aware_api { <nl> public : <nl> - context_aware_api ( wasm_interface & wasm ) <nl> - : context ( intrinsics_accessor : : get_context ( wasm ) . context ) , code ( intrinsics_accessor : : get_context ( wasm ) . code ) , <nl> - sbrk_bytes ( intrinsics_accessor : : get_context ( wasm ) . sbrk_bytes ) <nl> - { } <nl> + context_aware_api ( wasm_interface & wasm , bool context_free = false ) <nl> + : sbrk_bytes ( intrinsics_accessor : : get_context ( wasm ) . sbrk_bytes ) , <nl> + code ( intrinsics_accessor : : get_context ( wasm ) . code ) , <nl> + context ( intrinsics_accessor : : get_context ( wasm ) . context ) <nl> + { <nl> + if ( context . context_free ) <nl> + FC_ASSERT ( context_free , " only context free api ' s can be used in this context " ) ; <nl> + context . used_context_free_api | = ! context_free ; <nl> + } <nl> <nl> protected : <nl> uint32_t & sbrk_bytes ; <nl> class context_aware_api { <nl> apply_context & context ; <nl> } ; <nl> <nl> + class context_free_api : public context_aware_api { <nl> + public : <nl> + context_free_api ( wasm_interface & wasm ) <nl> + : context_aware_api ( wasm , true ) { <nl> + / * the context_free_data is not available during normal application because it is prunable * / <nl> + FC_ASSERT ( context . context_free , " this API may only be called from context_free apply " ) ; <nl> + } <nl> + <nl> + int get_context_free_data ( uint32_t index , array_ptr < char > buffer , size_t buffer_size ) const { <nl> + return context . get_context_free_data ( index , buffer , buffer_size ) ; <nl> + } <nl> + } ; <nl> class privileged_api : public context_aware_api { <nl> public : <nl> privileged_api ( wasm_interface & wasm ) <nl> class db_index_api : public context_aware_api { <nl> <nl> class memory_api : public context_aware_api { <nl> public : <nl> - using context_aware_api : : context_aware_api ; <nl> + memory_api ( wasm_interface & wasm ) <nl> + : context_aware_api ( wasm , true ) { } <nl> <nl> char * memcpy ( array_ptr < char > dest , array_ptr < const char > src , size_t length ) { <nl> return ( char * ) : : memcpy ( dest , src , length ) ; <nl> class transaction_api : public context_aware_api { <nl> public : <nl> using context_aware_api : : context_aware_api ; <nl> <nl> + void send_inline ( array_ptr < char > data , size_t data_len ) { <nl> + / / TODO : use global properties object for dynamic configuration of this default_max_gen_trx_size <nl> + FC_ASSERT ( data_len < config : : default_max_inline_action_size , " inline action too big " ) ; <nl> + <nl> + action act ; <nl> + fc : : raw : : unpack < action > ( data , data_len , act ) ; <nl> + context . execute_inline ( std : : move ( act ) ) ; <nl> + } <nl> + <nl> + void send_deferred ( uint32_t sender_id , const fc : : time_point_sec & execute_after , array_ptr < char > data , size_t data_len ) { <nl> + try { <nl> + / / TODO : use global properties object for dynamic configuration of this default_max_gen_trx_size <nl> + FC_ASSERT ( data_len < config : : default_max_gen_trx_size , " generated transaction too big " ) ; <nl> + <nl> + deferred_transaction dtrx ; <nl> + fc : : raw : : unpack < transaction > ( data , data_len , dtrx ) ; <nl> + dtrx . sender = context . receiver ; <nl> + dtrx . sender_id = sender_id ; <nl> + dtrx . execute_after = execute_after ; <nl> + context . execute_deferred ( std : : move ( dtrx ) ) ; <nl> + } FC_CAPTURE_AND_RETHROW ( ( fc : : to_hex ( data , data_len ) ) ) ; <nl> + } <nl> + } ; <nl> + <nl> + <nl> + class context_free_transaction_api : public context_aware_api { <nl> + public : <nl> + context_free_transaction_api ( wasm_interface & wasm ) <nl> + : context_aware_api ( wasm , true ) { } <nl> + <nl> int read_transaction ( array_ptr < char > data , size_t data_len ) { <nl> bytes trx = context . get_packed_transaction ( ) ; <nl> if ( data_len > = trx . size ( ) ) { <nl> class transaction_api : public context_aware_api { <nl> return context . trx_meta . trx ( ) . ref_block_prefix ; <nl> } <nl> <nl> - void send_inline ( array_ptr < char > data , size_t data_len ) { <nl> - / / TODO : use global properties object for dynamic configuration of this default_max_gen_trx_size <nl> - FC_ASSERT ( data_len < config : : default_max_inline_action_size , " inline action too big " ) ; <nl> - <nl> - action act ; <nl> - fc : : raw : : unpack < action > ( data , data_len , act ) ; <nl> - context . execute_inline ( std : : move ( act ) ) ; <nl> - } <nl> - <nl> - <nl> - void send_deferred ( uint32_t sender_id , const fc : : time_point_sec & execute_after , array_ptr < char > data , size_t data_len ) { <nl> - try { <nl> - / / TODO : use global properties object for dynamic configuration of this default_max_gen_trx_size <nl> - FC_ASSERT ( data_len < config : : default_max_gen_trx_size , " generated transaction too big " ) ; <nl> - <nl> - deferred_transaction dtrx ; <nl> - fc : : raw : : unpack < transaction > ( data , data_len , dtrx ) ; <nl> - dtrx . sender = context . receiver ; <nl> - dtrx . sender_id = sender_id ; <nl> - dtrx . execute_after = execute_after ; <nl> - context . execute_deferred ( std : : move ( dtrx ) ) ; <nl> - } FC_CAPTURE_AND_RETHROW ( ( fc : : to_hex ( data , data_len ) ) ) ; <nl> + int get_action ( uint32_t type , uint32_t index , array_ptr < char > buffer , size_t buffer_size ) const { <nl> + return context . get_action ( type , index , buffer , buffer_size ) ; <nl> } <nl> <nl> } ; <nl> REGISTER_INTRINSICS ( console_api , <nl> ( printhex , void ( int , int ) ) <nl> ) ; <nl> <nl> - REGISTER_INTRINSICS ( transaction_api , <nl> + REGISTER_INTRINSICS ( context_free_transaction_api , <nl> ( read_transaction , int ( int , int ) ) <nl> ( transaction_size , int ( ) ) <nl> ( expiration , int ( ) ) <nl> ( tapos_block_prefix , int ( ) ) <nl> ( tapos_block_num , int ( ) ) <nl> + ( get_action , int ( int , int , int , int ) ) <nl> + ) ; <nl> + <nl> + REGISTER_INTRINSICS ( transaction_api , <nl> ( send_inline , void ( int , int ) ) <nl> ( send_deferred , void ( int , int , int , int ) ) <nl> ) ; <nl> <nl> + REGISTER_INTRINSICS ( context_free_api , <nl> + ( get_context_free_data , int ( int , int , int ) ) <nl> + ) <nl> + <nl> REGISTER_INTRINSICS ( memory_api , <nl> ( memcpy , int ( int , int , int ) ) <nl> ( memmove , int ( int , int , int ) ) <nl> | Merge pull request from EOSIO / contextfree | EOSIO/eos | 9a8c2c4db6f10d50fe7d1c9278db38c8a4eb458f | 2018-02-16T16:56:57Z |
mmm a / modules / tools / mapshow / map . py <nl> ppp b / modules / tools / mapshow / map . py <nl> def draw_lanes ( self , ax , is_show_lane_ids , laneids ) : <nl> for lane in self . map_pb . lane : <nl> color_val = self . colors [ cnt % len ( self . colors ) ] <nl> self . _draw_lane_boundary ( lane , ax , color_val ) <nl> + self . _draw_lane_central ( lane , ax , color_val ) <nl> if is_show_lane_ids : <nl> self . _draw_lane_id ( lane , ax , color_val ) <nl> elif lane . id . id in laneids : <nl> def _draw_lane_boundary ( lane , ax , color_val ) : <nl> px . append ( float ( p . x ) ) <nl> py . append ( float ( p . y ) ) <nl> ax . plot ( px , py , ls = ' - ' , c = color_val , alpha = 0 . 5 ) <nl> + <nl> + @ staticmethod <nl> + def _draw_lane_central ( lane , ax , color_val ) : <nl> + " " " draw boundary " " " <nl> + for curve in lane . central_curve . segment : <nl> + if curve . HasField ( ' line_segment ' ) : <nl> + px = [ ] <nl> + py = [ ] <nl> + for p in curve . line_segment . point : <nl> + px . append ( float ( p . x ) ) <nl> + py . append ( float ( p . y ) ) <nl> + ax . plot ( px , py , ls = ' : ' , c = color_val , alpha = 0 . 5 ) <nl> | [ tools ] updated mapshow to add central lines | ApolloAuto/apollo | f67b1fe3afefb7a77383296217277d709766fe04 | 2017-08-21T23:15:57Z |
mmm a / lib / Sema / TypeCheckStorage . cpp <nl> ppp b / lib / Sema / TypeCheckStorage . cpp <nl> static Expr * buildStorageReference ( AccessorDecl * accessor , <nl> } <nl> } <nl> <nl> + / / If the base is not ' self ' , default get access to nonmutating and set access to mutating . <nl> + bool getterMutatesBase = selfDecl & & storage - > isGetterMutating ( ) ; <nl> + bool setterMutatesBase = ! selfDecl | | storage - > isSetterMutating ( ) ; <nl> + / / If we ' re not accessing via a property wrapper , we don ' t need to adjust <nl> + / / the mutability . <nl> + if ( target = = TargetImpl : : Wrapper | | target = = TargetImpl : : WrapperStorage ) { <nl> + auto var = cast < VarDecl > ( accessor - > getStorage ( ) ) ; <nl> + auto mutability = var - > getPropertyWrapperMutability ( ) ; <nl> + / / Only adjust mutability if it ' s possible to mutate the base . <nl> + if ( mutability & & ! var - > isStatic ( ) & & <nl> + ! ( selfDecl & & selfTypeForAccess - > hasReferenceSemantics ( ) ) ) { <nl> + getterMutatesBase = ( mutability - > Getter = = PropertyWrapperMutability : : Mutating ) ; <nl> + setterMutatesBase = ( mutability - > Setter = = PropertyWrapperMutability : : Mutating ) ; <nl> + } <nl> + } <nl> + <nl> + / / If the accessor is mutating , then the base should be referred as an l - value <nl> + bool isBaseLValue = ( getterMutatesBase & & isUsedForGetAccess ) | | <nl> + ( setterMutatesBase & & isUsedForSetAccess ) ; <nl> + <nl> if ( ! selfDecl ) { <nl> assert ( target ! = TargetImpl : : Super ) ; <nl> auto * storageDRE = new ( ctx ) DeclRefExpr ( storage , DeclNameLoc ( ) , <nl> / * IsImplicit = * / true , semantics ) ; <nl> auto type = storage - > getValueInterfaceType ( ) . subst ( subs ) ; <nl> - if ( isLValue ) <nl> + if ( isBaseLValue ) <nl> type = LValueType : : get ( type ) ; <nl> storageDRE - > setType ( type ) ; <nl> <nl> static Expr * buildStorageReference ( AccessorDecl * accessor , <nl> } <nl> <nl> / / Build self <nl> - <nl> - bool isGetterMutating = storage - > isGetterMutating ( ) ; <nl> - bool isSetterMutating = storage - > isSetterMutating ( ) ; <nl> - / / If we ' re not accessing via a property wrapper , we don ' t need to adjust <nl> - / / the mutability . <nl> - if ( target = = TargetImpl : : Wrapper | | target = = TargetImpl : : WrapperStorage ) { <nl> - auto var = cast < VarDecl > ( accessor - > getStorage ( ) ) ; <nl> - if ( auto mutability = var - > getPropertyWrapperMutability ( ) ) { <nl> - / / We consider the storage ' s mutability too because the wrapped property <nl> - / / might be part of a class , in case of which nothing is mutating . <nl> - isGetterMutating = ( mutability - > Getter = = PropertyWrapperMutability : : Mutating ) <nl> - ? ( storage - > isGetterMutating ( ) | | storage - > isSetterMutating ( ) ) <nl> - : storage - > isGetterMutating ( ) ; <nl> - isSetterMutating = ( mutability - > Setter = = PropertyWrapperMutability : : Mutating ) <nl> - ? ( storage - > isGetterMutating ( ) | | storage - > isSetterMutating ( ) ) <nl> - : storage - > isGetterMutating ( ) ; <nl> - } <nl> - } <nl> - <nl> - / / If the accessor is mutating , then self should be referred as an l - value <nl> - bool isSelfLValue = ( isGetterMutating & & isUsedForGetAccess ) | | <nl> - ( isSetterMutating & & isUsedForSetAccess ) ; <nl> - <nl> - Expr * selfDRE = buildSelfReference ( selfDecl , selfAccessKind , isSelfLValue , <nl> + Expr * selfDRE = buildSelfReference ( selfDecl , selfAccessKind , isBaseLValue , <nl> / * convertTy * / selfTypeForAccess ) ; <nl> - if ( isSelfLValue ) <nl> + if ( isBaseLValue ) <nl> selfTypeForAccess = LValueType : : get ( selfTypeForAccess ) ; <nl> <nl> if ( ! selfDRE - > getType ( ) - > isEqual ( selfTypeForAccess ) ) { <nl> synthesizeTrivialGetterBody ( AccessorDecl * getter , TargetImpl target , <nl> body . push_back ( returnStmt ) ; <nl> } <nl> <nl> - / / Don ' t mark local accessors as type - checked - captures still need to be computed . <nl> return { BraceStmt : : create ( ctx , loc , body , loc , true ) , <nl> - / * isTypeChecked = * / ! getter - > getDeclContext ( ) - > isLocalContext ( ) } ; <nl> + / * isTypeChecked = * / true } ; <nl> } <nl> <nl> / / / Synthesize the body of a getter which just directly accesses the <nl> synthesizeTrivialSetterBodyWithStorage ( AccessorDecl * setter , <nl> <nl> createPropertyStoreOrCallSuperclassSetter ( setter , valueDRE , storageToUse , <nl> target , setterBody , ctx ) ; <nl> - / / Don ' t mark local accessors as type - checked - captures still need to be computed . <nl> return { BraceStmt : : create ( ctx , loc , setterBody , loc , true ) , <nl> - / * isTypeChecked = * / ! setter - > getDeclContext ( ) - > isLocalContext ( ) } ; <nl> + / * isTypeChecked = * / true } ; <nl> } <nl> <nl> static std : : pair < BraceStmt * , bool > <nl> | [ Sema ] Adjust mutability in buildStorageRef appropriately for accessors | apple/swift | 456a50527c5948d0acfb2a155c6be3e414a8bfd0 | 2020-10-07T23:06:49Z |
mmm a / lib / TBDGen / tapi / YAML . cpp <nl> ppp b / lib / TBDGen / tapi / YAML . cpp <nl> <nl> namespace llvm { <nl> namespace yaml { <nl> <nl> - using Impl = ScalarTraits < StringRef > ; <nl> - void ScalarTraits < FlowStringRef > : : output ( const FlowStringRef & value , void * ctx , <nl> - raw_ostream & os ) { <nl> - Impl : : output ( value , ctx , os ) ; <nl> - } <nl> - StringRef ScalarTraits < FlowStringRef > : : input ( StringRef value , void * ctx , <nl> - FlowStringRef & out ) { <nl> - return Impl : : input ( value , ctx , out . value ) ; <nl> - } <nl> - QuotingType ScalarTraits < FlowStringRef > : : mustQuote ( StringRef name ) { <nl> - return Impl : : mustQuote ( name ) ; <nl> - } <nl> - <nl> using tapi : : ObjCConstraint ; <nl> void ScalarEnumerationTraits < ObjCConstraint > : : enumeration ( <nl> IO & io , ObjCConstraint & constraint ) { <nl> QuotingType ScalarTraits < PackedVersion > : : mustQuote ( StringRef ) { <nl> return QuotingType : : None ; <nl> } <nl> <nl> - void ScalarTraits < SwiftVersion > : : output ( const SwiftVersion & value , void * , <nl> - raw_ostream & os ) { <nl> - switch ( value ) { <nl> - case 1 : <nl> - os < < " 1 . 0 " ; <nl> - break ; <nl> - case 2 : <nl> - os < < " 1 . 1 " ; <nl> - break ; <nl> - case 3 : <nl> - os < < " 2 . 0 " ; <nl> - break ; <nl> - case 4 : <nl> - os < < " 3 . 0 " ; <nl> - break ; <nl> - default : <nl> - os < < ( unsigned ) value ; <nl> - break ; <nl> - } <nl> - } <nl> - StringRef ScalarTraits < SwiftVersion > : : input ( StringRef scalar , void * , <nl> - SwiftVersion & value ) { <nl> - value = StringSwitch < SwiftVersion > ( scalar ) <nl> - . Case ( " 1 . 0 " , 1 ) <nl> - . Case ( " 1 . 1 " , 2 ) <nl> - . Case ( " 2 . 0 " , 3 ) <nl> - . Case ( " 3 . 0 " , 4 ) <nl> - . Default ( 0 ) ; <nl> - if ( value ! = SwiftVersion ( 0 ) ) <nl> - return { } ; <nl> - <nl> - if ( scalar . getAsInteger ( 10 , value ) ) <nl> - return " invalid Swift ABI version . " ; <nl> - <nl> - return StringRef ( ) ; <nl> - } <nl> - QuotingType ScalarTraits < SwiftVersion > : : mustQuote ( StringRef ) { <nl> - return QuotingType : : None ; <nl> - } <nl> - <nl> using TAPI_INTERNAL : : AvailabilityInfo ; <nl> void ScalarTraits < AvailabilityInfo > : : output ( const AvailabilityInfo & value , <nl> void * , raw_ostream & os ) { <nl> | Merge pull request from gmittert / Vector | apple/swift | b326f735821bc4d1074a6b79a4c3471d5d227288 | 2019-08-22T19:27:50Z |
mmm a / js / common / bootstrap / modules . js <nl> ppp b / js / common / bootstrap / modules . js <nl> function require ( path ) { <nl> libpath = fs . join ( this . _root , this . _path , this . _manifest [ type ] ) ; <nl> } <nl> else { <nl> - libpath = fs . join ( this . _root , this . _path , type ) ; <nl> + libpath = fs . join ( this . _root , this . _path ) ; <nl> } <nl> <nl> var pkg = new Package ( " application " , <nl> | Merge pull request from pluma / manifest - lib - default - local | arangodb/arangodb | 13d1e5e5ab2f4a50b296b050ad586fe7fcb6f6a9 | 2014-01-11T11:02:25Z |
mmm a / modules / planning / scenarios / side_pass / side_pass_stop_on_wait_point . cc <nl> ppp b / modules / planning / scenarios / side_pass / side_pass_stop_on_wait_point . cc <nl> constexpr double kExtraMarginforStopOnWaitPointStage = 3 . 0 ; <nl> <nl> Stage : : StageStatus SidePassStopOnWaitPoint : : Process ( <nl> const TrajectoryPoint & planning_start_point , Frame * frame ) { <nl> + <nl> const ReferenceLineInfo & reference_line_info = <nl> frame - > reference_line_info ( ) . front ( ) ; <nl> const ReferenceLine & reference_line = reference_line_info . reference_line ( ) ; <nl> Stage : : StageStatus SidePassStopOnWaitPoint : : Process ( <nl> <nl> / / If the nearest obstacle , provided it exists , is moving , <nl> / / then quit the side_pass stage . <nl> - if ( ! nearest_obstacle ) { <nl> + if ( nearest_obstacle ) { <nl> if ( nearest_obstacle - > speed ( ) > <nl> GetContext ( ) - > scenario_config_ . block_obstacle_min_speed ( ) ) { <nl> next_stage_ = ScenarioConfig : : NO_STAGE ; <nl> | Planning : minor bug fix on null ptr issue . | ApolloAuto/apollo | c576a82178dec1a303482bcb1c3efc43c5cc3a21 | 2018-12-13T23:19:49Z |
mmm a / Marlin / Makefile <nl> ppp b / Marlin / Makefile <nl> LDFLAGS = - lm <nl> AVRDUDE_PORT = $ ( UPLOAD_PORT ) <nl> AVRDUDE_WRITE_FLASH = - Uflash : w : $ ( BUILD_DIR ) / $ ( TARGET ) . hex : i <nl> ifeq ( $ ( shell uname - s ) , Linux ) <nl> - AVRDUDE_CONF = $ ( ARDUINO_INSTALL_DIR ) / hardware / tools / avrdude . conf <nl> + AVRDUDE_CONF = / etc / avrdude / avrdude . conf <nl> else <nl> AVRDUDE_CONF = $ ( ARDUINO_INSTALL_DIR ) / hardware / tools / avr / etc / avrdude . conf <nl> endif <nl> | makefile : use / etc / avrdude / avrdude . conf on linux | MarlinFirmware/Marlin | 0e064357f4d0012d0e3386ef19e2f110c79a20a1 | 2015-07-27T01:31:31Z |
mmm a / tools / distrib / python / grpcio_tools / protoc_lib_deps . py <nl> ppp b / tools / distrib / python / grpcio_tools / protoc_lib_deps . py <nl> <nl> CC_INCLUDE = ' third_party / protobuf / src ' <nl> PROTO_INCLUDE = ' third_party / protobuf / src ' <nl> <nl> - PROTOBUF_SUBMODULE_VERSION = " 678da4f76eb9168c9965afc2149944a66cd48546 " <nl> + PROTOBUF_SUBMODULE_VERSION = " fde7cf7358ec7cd69e8db9be4f1fa6a5c431386a " <nl> | run tools / distrib / python / make_grpcio_tools . py | grpc/grpc | 2e86c865406c82d1345ee4faf34afe7b22ddfac8 | 2020-08-25T10:57:21Z |
mmm a / test / SILOptimizer / OSLogMandatoryOptTest . swift <nl> ppp b / test / SILOptimizer / OSLogMandatoryOptTest . swift <nl> func testSimpleInterpolation ( ) { <nl> / / CHECK - DAG is used here as it is easier to perform the checks backwards <nl> / / from uses to the definitions . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / We need to wade through some borrows and copy values here . <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Minimum integer value : % ld " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Minimum integer value : % ld " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testInterpolationWithFormatOptions ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Maximum unsigned integer value : % lx " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Maximum unsigned integer value : % lx " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testInterpolationWithFormatOptionsAndPrivacy ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Private Identifier : % { private } lx " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Private Identifier : % { private } lx " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testInterpolationWithMultipleArguments ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Access prevented : process % { public } ld initiated by user : % { private } ld attempted resetting permissions to % lo " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Access prevented : process % { public } ld initiated by user : % { private } ld attempted resetting permissions to % lo " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testLogMessageWithoutData ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " A message with no data " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " A message with no data " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testLogMessageWithoutData ( ) { <nl> / / CHECK - LABEL : @ $ { { . * } } testEscapingOfPercentsyy <nl> func testEscapingOfPercents ( ) { <nl> _osLogTestHelper ( " Process failed after 99 % completion " ) <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Process failed after 99 % % completion " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Process failed after 99 % % completion " <nl> } <nl> <nl> / / CHECK - LABEL : @ $ { { . * } } testDoublePercentsyy <nl> func testDoublePercents ( ) { <nl> _osLogTestHelper ( " Double percents : % % " ) <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Double percents : % % % % " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Double percents : % % % % " <nl> } <nl> <nl> / / CHECK - LABEL : @ $ { { . * } } testSmallFormatStringsyy <nl> func testSmallFormatStrings ( ) { <nl> _osLogTestHelper ( " a " ) <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " a " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " a " <nl> } <nl> <nl> / / / A stress test that checks whether the optimizer handle messages with more <nl> func testMessageWithTooManyArguments ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld % ld " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testInt32Interpolation ( ) { <nl> <nl> / / Check if there is a call to _os_log_impl with a literal format string . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " 32 - bit integer value : % d " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " 32 - bit integer value : % d " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testDynamicStringArguments ( ) { <nl> / / CHECK - DAG is used here as it is easier to perform the checks backwards <nl> / / from uses to the definitions . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " concat : % { public } s interpolated : % { private } s " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " concat : % { public } s interpolated : % { private } s " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testNSObjectInterpolation ( ) { <nl> / / CHECK - DAG is used here as it is easier to perform the checks backwards <nl> / / from uses to the definitions . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " NSArray : % { public } @ NSDictionary : % { private } @ " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " NSArray : % { public } @ NSDictionary : % { private } @ " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> func testDoubleInterpolation ( ) { <nl> / / CHECK - DAG is used here as it is easier to perform the checks backwards <nl> / / from uses to the definitions . <nl> <nl> - / / CHECK - DAG : builtin " globalStringTablePointer " ( [ [ STRING : % [ 0 - 9 ] + ] ] : $ String ) <nl> - / / CHECK - DAG : [ [ STRING ] ] = begin_borrow [ [ STRING2 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING2 ] ] = copy_value [ [ STRING3 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING3 ] ] = begin_borrow [ [ STRING4 : % [ 0 - 9 ] + ] ] <nl> - / / CHECK - DAG : [ [ STRING4 ] ] = apply [ [ STRING_INIT : % [ 0 - 9 ] + ] ] ( [ [ LIT : % [ 0 - 9 ] + ] ] , <nl> - / / CHECK - DAG : [ [ STRING_INIT ] ] = function_ref @ $ sSS21_builtinStringLiteral17utf8CodeUnitCount7isASCIISSBp_BwBi1_tcfC <nl> - / / CHECK - DAG : [ [ LIT ] ] = string_literal utf8 " Tau = % f " <nl> + / / Match the format string first . <nl> + / / CHECK : string_literal utf8 " Tau = % f " <nl> <nl> / / Check if the size of the argument buffer is a constant . <nl> <nl> | [ OSLogOptimization ] [ Tests ] Make the OSLogMandatoryOptTest more liberal in admitting SIL changes . | apple/swift | 3698aa844262f9df256ed4f7c0d001b7b9c8c487 | 2020-07-11T00:57:19Z |
mmm a / Code / CryEngine / Cry3DEngine / 3dEngineLoad . cpp <nl> ppp b / Code / CryEngine / Cry3DEngine / 3dEngineLoad . cpp <nl> void C3DEngine : : UnloadLevel ( ) <nl> <nl> if ( ! gEnv - > IsDedicated ( ) ) <nl> { <nl> - SAFE_RELEASE_FORCE ( m_ptexIconLowMemoryUsage ) ; <nl> - SAFE_RELEASE_FORCE ( m_ptexIconAverageMemoryUsage ) ; <nl> - SAFE_RELEASE_FORCE ( m_ptexIconHighMemoryUsage ) ; <nl> - SAFE_RELEASE_FORCE ( m_ptexIconEditorConnectedToConsole ) ; <nl> - <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / Releases loaded default loaded textures . <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - { <nl> - SAFE_RELEASE ( m_ptexIconAverageMemoryUsage ) ; <nl> - SAFE_RELEASE ( m_ptexIconLowMemoryUsage ) ; <nl> - SAFE_RELEASE ( m_ptexIconHighMemoryUsage ) ; <nl> - SAFE_RELEASE ( m_ptexIconEditorConnectedToConsole ) ; <nl> - } <nl> + SAFE_RELEASE ( m_ptexIconAverageMemoryUsage ) ; <nl> + SAFE_RELEASE ( m_ptexIconLowMemoryUsage ) ; <nl> + SAFE_RELEASE ( m_ptexIconHighMemoryUsage ) ; <nl> + SAFE_RELEASE ( m_ptexIconEditorConnectedToConsole ) ; <nl> } <nl> else <nl> { <nl> mmm a / Code / Sandbox / EditorQt / GameEngine . cpp <nl> ppp b / Code / Sandbox / EditorQt / GameEngine . cpp <nl> <nl> # include " Particles / ParticleManager . h " <nl> # include " Prefabs / PrefabEvents . h " <nl> # include " Prefabs / PrefabManager . h " <nl> - # include " ProjectManagement / UI / SelectProjectDialog . h " <nl> + # include " ProjectManagement / Utils . h " <nl> # include " Terrain / Heightmap . h " <nl> # include " Terrain / SurfaceType . h " <nl> # include " Terrain / TerrainGrid . h " <nl> bool CGameEngine : : Init ( bool bPreviewMode , bool bTestMode , bool bShaderCacheGen , <nl> <nl> if ( strstr ( sInCmdLine , " - project " ) = = 0 ) <nl> { <nl> - CSelectProjectDialog dlg ( SplashScreen : : GetSplashScreen ( ) , true ) ; <nl> - if ( dlg . exec ( ) ! = QDialog : : Accepted ) <nl> + const string engineFolder = FindCryEngineRootFolder ( ) ; <nl> + if ( IsProjectSpecifiedInSystemConfig ( engineFolder ) ) <nl> { <nl> - return false ; <nl> + / / 1 . it is responsibility of a user to check correctness of system . cfg file <nl> + / / 2 . It is engine responsibility to run itself with this information ( it will parse and use system . cfg as game_launcher ) <nl> } <nl> - <nl> - const string projPath = dlg . GetPathToProject ( ) ; <nl> - if ( projPath . empty ( ) ) <nl> + else <nl> { <nl> - CRY_ASSERT_MESSAGE ( false , " Expected non - empty path to a project " ) ; <nl> - return false ; <nl> - } <nl> + string projPath = FindProjectInFolder ( engineFolder ) ; <nl> + if ( projPath . empty ( ) ) <nl> + { <nl> + projPath = AskUserToSpecifyProject ( SplashScreen : : GetSplashScreen ( ) , true ) ; <nl> + } <nl> <nl> - cry_strcat ( startupParams . szSystemCmdLine , " - project " ) ; <nl> - cry_strcat ( startupParams . szSystemCmdLine , projPath ) ; <nl> + if ( projPath . empty ( ) ) <nl> + { <nl> + / / Exit Sandbox <nl> + return false ; <nl> + } <nl> + <nl> + cry_strcat ( startupParams . szSystemCmdLine , " - project " ) ; <nl> + cry_strcat ( startupParams . szSystemCmdLine , projPath ) ; <nl> + } <nl> } <nl> <nl> if ( ! CryInitializeEngine ( startupParams , true ) ) <nl> mmm a / Code / Sandbox / EditorQt / ProjectManagement / Utils . cpp <nl> ppp b / Code / Sandbox / EditorQt / ProjectManagement / Utils . cpp <nl> <nl> # include " StdAfx . h " <nl> # include " Utils . h " <nl> <nl> + # include " ProjectManagement / UI / SelectProjectDialog . h " <nl> + <nl> # include < CryIcon . h > <nl> # include < FileUtils . h > <nl> # include < PathUtils . h > <nl> + # include < QtUtil . h > <nl> <nl> # include < CrySerialization / yasli / JSONIArchive . h > <nl> <nl> + # include < QApplication > <nl> + # include < QDirIterator > <nl> + # include < QFileInfo > <nl> # include < QStandardPaths > <nl> + # include < QTextStream > <nl> + <nl> + string FindCryEngineRootFolder ( ) <nl> + { <nl> + QDir dir ( qApp - > applicationDirPath ( ) ) ; <nl> + while ( dir . exists ( ) ) <nl> + { <nl> + <nl> + QDirIterator iterator ( dir . absolutePath ( ) , QStringList ( ) < < " cryengine . cryengine " , QDir : : Files ) ; <nl> + if ( iterator . hasNext ( ) ) <nl> + { <nl> + return QtUtil : : ToString ( dir . absolutePath ( ) ) ; <nl> + } <nl> + <nl> + if ( ! dir . cdUp ( ) ) <nl> + { <nl> + break ; <nl> + } <nl> + } <nl> + <nl> + return " " ; <nl> + } <nl> + <nl> + bool IsProjectSpecifiedInSystemConfig ( const string & engineFolder ) <nl> + { <nl> + const string fileName = PathUtil : : Make ( engineFolder , " system . cfg " ) ; <nl> + <nl> + QFile configFile ( fileName . c_str ( ) ) ; <nl> + if ( ! configFile . open ( QIODevice : : ReadOnly ) ) <nl> + { <nl> + return false ; <nl> + } <nl> + <nl> + QTextStream stream ( & configFile ) ; <nl> + while ( ! stream . atEnd ( ) ) <nl> + { <nl> + QString line = stream . readLine ( ) ; <nl> + <nl> + / / Simplest check : the line should be started from it , and not commented <nl> + if ( 0 = = line . indexOf ( " sys_project " ) ) <nl> + { <nl> + return true ; <nl> + } <nl> + } <nl> + <nl> + return false ; <nl> + } <nl> + <nl> + string FindProjectInFolder ( const string & folder ) <nl> + { <nl> + QFileInfo fileInfo ( PathUtil : : AddSlash ( folder ) . c_str ( ) ) ; <nl> + QDirIterator iterator ( fileInfo . absolutePath ( ) , QStringList ( ) < < " * . cryproject " , QDir : : Files ) ; <nl> + if ( ! iterator . hasNext ( ) ) <nl> + { <nl> + return " " ; <nl> + } <nl> + <nl> + iterator . next ( ) ; <nl> + return QtUtil : : ToString ( iterator . fileInfo ( ) . absoluteFilePath ( ) ) ; <nl> + } <nl> + <nl> + string AskUserToSpecifyProject ( QWidget * pParent , bool runOnSandboxInit ) <nl> + { <nl> + CSelectProjectDialog dlg ( pParent , runOnSandboxInit ) ; <nl> + if ( dlg . exec ( ) ! = QDialog : : Accepted ) <nl> + { <nl> + return " " ; <nl> + } <nl> + <nl> + return dlg . GetPathToProject ( ) ; <nl> + } <nl> <nl> string GetCryEngineProgramDataFolder ( ) <nl> { <nl> bool SCryEngineVersion : : IsValid ( ) const <nl> <nl> SCryEngineVersion GetCurrentCryEngineVersion ( ) <nl> { <nl> - constexpr unsigned int buffSize = 2048 ; <nl> - char buff [ buffSize ] ; <nl> - CryGetExecutableFolder ( buffSize , buff ) ; <nl> - string strPathToConfig = buff ; <nl> - strPathToConfig + = " . . / . . / cryengine . cryengine " ; <nl> + const string strPathToConfig = PathUtil : : Make ( FindCryEngineRootFolder ( ) , " cryengine . cryengine " ) ; <nl> <nl> yasli : : JSONIArchive ia ; <nl> ia . load ( strPathToConfig ) ; <nl> mmm a / Code / Sandbox / EditorQt / ProjectManagement / Utils . h <nl> ppp b / Code / Sandbox / EditorQt / ProjectManagement / Utils . h <nl> <nl> # include < CrySerialization / Forward . h > <nl> <nl> class CryIcon ; <nl> + class QWidget ; <nl> + <nl> + / / Searches for cryengine . cryengine from this EXE - folder , and moving higher <nl> + string FindCryEngineRootFolder ( ) ; <nl> + <nl> + / / In the engineFolder , find system . cfg ; <nl> + / / Return true , if any line of this file starts with " sys_project " <nl> + bool IsProjectSpecifiedInSystemConfig ( const string & engineFolder ) ; <nl> + <nl> + / / Find first * . cryproject file in the folder ( without search in subfolders ) <nl> + string FindProjectInFolder ( const string & folder ) ; <nl> + <nl> + / / Ask user to select a project <nl> + string AskUserToSpecifyProject ( QWidget * pParent , bool runOnSandboxInit ) ; <nl> <nl> / / Engine version description , taken from " cryengine . cryengine " <nl> struct SCryEngineVersion <nl> | ! I integrate from / / ce / main . . . | CRYTEK/CRYENGINE | 86543ca153c8217f740b3e84d5fdbb91754e080f | 2018-12-18T17:02:11Z |
new file mode 100644 <nl> index 00000000000 . . 4a66842c0d1 <nl> mmm / dev / null <nl> ppp b / ports / graphqlparser / CONTROL <nl> <nl> + Source : graphqlparser <nl> + Version : v0 . 7 . 0 <nl> + Description : A GraphQL query parser in C + + with C and C + + APIs <nl> new file mode 100644 <nl> index 00000000000 . . 0e1414583d4 <nl> mmm / dev / null <nl> ppp b / ports / graphqlparser / portfile . cmake <nl> <nl> + # Common Ambient Variables : <nl> + # CURRENT_BUILDTREES_DIR = $ { VCPKG_ROOT_DIR } \ buildtrees \ $ { PORT } <nl> + # CURRENT_PACKAGES_DIR = $ { VCPKG_ROOT_DIR } \ packages \ $ { PORT } _ $ { TARGET_TRIPLET } <nl> + # CURRENT_PORT_DIR = $ { VCPKG_ROOT_DIR } \ ports \ $ { PORT } <nl> + # PORT = current port name ( zlib , etc ) <nl> + # TARGET_TRIPLET = current triplet ( x86 - windows , x64 - windows - static , etc ) <nl> + # VCPKG_CRT_LINKAGE = C runtime linkage type ( static , dynamic ) <nl> + # VCPKG_LIBRARY_LINKAGE = target library linkage type ( static , dynamic ) <nl> + # VCPKG_ROOT_DIR = < C : \ path \ to \ current \ vcpkg > <nl> + # VCPKG_TARGET_ARCHITECTURE = target architecture ( x64 , x86 , arm ) <nl> + # <nl> + <nl> + include ( vcpkg_common_functions ) <nl> + <nl> + vcpkg_from_github ( <nl> + OUT_SOURCE_PATH SOURCE_PATH <nl> + REPO graphql / libgraphqlparser <nl> + REF v0 . 7 . 0 <nl> + SHA512 973292b164d0d2cfe453a2f01559dbdb1b9d22b6304f6a3aabf71e2c0a3e24ab69dfd72a086764ad5befecf0005620f8e86f552dacc324f9615a05f31de7cede <nl> + HEAD_REF master <nl> + PATCHES $ { CMAKE_CURRENT_LIST_DIR } / win - cmake . patch <nl> + ) <nl> + <nl> + if ( UNIX ) <nl> + vcpkg_configure_cmake ( <nl> + SOURCE_PATH $ { SOURCE_PATH } <nl> + PREFER_NINJA <nl> + ) <nl> + elseif ( WIN32 ) <nl> + vcpkg_find_acquire_program ( PYTHON2 ) <nl> + vcpkg_find_acquire_program ( FLEX ) <nl> + vcpkg_find_acquire_program ( BISON ) <nl> + <nl> + get_filename_component ( VCPKG_DOWNLOADS_PYTHON2_DIR " $ { PYTHON2 } " DIRECTORY ) <nl> + get_filename_component ( VCPKG_DOWNLOADS_FLEX_DIR " $ { FLEX } " DIRECTORY ) <nl> + get_filename_component ( VCPKG_DOWNLOADS_BISON_DIR " $ { BISON } " DIRECTORY ) <nl> + <nl> + vcpkg_configure_cmake ( <nl> + SOURCE_PATH $ { SOURCE_PATH } <nl> + PREFER_NINJA <nl> + OPTIONS <nl> + - DVCPKG_DOWNLOADS_PYTHON2_DIR = $ { VCPKG_DOWNLOADS_PYTHON2_DIR } <nl> + - DVCPKG_DOWNLOADS_FLEX_DIR = $ { VCPKG_DOWNLOADS_FLEX_DIR } <nl> + - DVCPKG_DOWNLOADS_BISON_DIR = $ { VCPKG_DOWNLOADS_BISON_DIR } <nl> + ) <nl> + endif ( ) <nl> + <nl> + vcpkg_install_cmake ( ) <nl> + <nl> + # Handle copyright <nl> + file ( INSTALL $ { SOURCE_PATH } / LICENSE DESTINATION $ { CURRENT_PACKAGES_DIR } / share / graphqlparser RENAME copyright ) <nl> new file mode 100644 <nl> index 00000000000 . . c71ba97ff07 <nl> mmm / dev / null <nl> ppp b / ports / graphqlparser / win - cmake . patch <nl> <nl> pppmmm a / CMakeLists . txt <nl> ppp + b / CMakeLists . txt <nl> + PROJECT ( libgraphqlparser C CXX ) <nl> + <nl> + SET ( CMAKE_MODULE_PATH " $ { PROJECT_SOURCE_DIR } / cmake " " $ { CMAKE_MODULE_PATH } " ) <nl> + <nl> + - INCLUDE ( version ) <nl> + + IF ( UNIX ) <nl> + + SET ( CMAKE_CXX_FLAGS " $ { CMAKE_CXX_FLAGS } - std = gnu + + 11 " ) <nl> + + SET ( FLEX_COMPILE_FLAGS " - - header - file = lexer . h " ) <nl> + + ELSEIF ( WIN32 ) <nl> + + # If we ' re building this with vcpkg on Windows , let portfile . cmake tell us where it <nl> + + # stored these tools . Otherwise these variables should be empty and we ' ll fall back <nl> + + # to the normal CMake FIND_PACKAGE logic for each of these programs . <nl> + + SET ( CMAKE_PROGRAM_PATH <nl> + + " $ { VCPKG_DOWNLOADS_PYTHON2_DIR } " <nl> + + " $ { VCPKG_DOWNLOADS_FLEX_DIR } " <nl> + + " $ { VCPKG_DOWNLOADS_BISON_DIR } " <nl> + + " $ { CMAKE_PROGRAM_PATH } " ) <nl> + + <nl> + + SET ( FLEX_COMPILE_FLAGS " - - header - file = lexer . h - - wincompat " ) <nl> + + <nl> + + # Let CMake figure out the exports for the SHARED library ( DLL ) on Windows . <nl> + + SET ( CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS TRUE ) <nl> + + ENDIF ( ) <nl> + <nl> + - SET ( CMAKE_CXX_FLAGS " $ { CMAKE_CXX_FLAGS } - std = gnu + + 11 " ) <nl> + + INCLUDE ( version ) <nl> + <nl> + FIND_PACKAGE ( PythonInterp 2 REQUIRED ) <nl> + IF ( NOT PYTHON_VERSION_MAJOR EQUAL 2 ) <nl> + MESSAGE ( FATAL_ERROR " Python 2 is required . " ) <nl> + ENDIF ( ) <nl> + <nl> + - FIND_PROGRAM ( CTYPESGEN_FOUND ctypesgen . py ) <nl> + + IF ( UNIX ) <nl> + + SET ( FLEX_COMPILE_FLAGS " - - header - file = lexer . h " ) <nl> + + ELSEIF ( WIN32 ) <nl> + + SET ( FLEX_COMPILE_FLAGS " - - header - file = lexer . h - - wincompat " ) <nl> + + ENDIF ( ) <nl> + <nl> + FIND_PACKAGE ( BISON 3 ) <nl> + FIND_PACKAGE ( FLEX ) <nl> + IF ( BISON_FOUND ) <nl> + ENDIF ( ) <nl> + <nl> + IF ( FLEX_FOUND ) <nl> + - FLEX_TARGET ( GraphQLScanner lexer . lpp $ { CMAKE_CURRENT_SOURCE_DIR } / lexer . cpp COMPILE_FLAGS " - - header - file = lexer . h " ) <nl> + + FLEX_TARGET ( GraphQLScanner lexer . lpp $ { CMAKE_CURRENT_SOURCE_DIR } / lexer . cpp COMPILE_FLAGS $ { FLEX_COMPILE_FLAGS } ) <nl> + IF ( BISON_FOUND ) <nl> + ADD_FLEX_BISON_DEPENDENCY ( GraphQLScanner graphqlparser ) <nl> + ENDIF ( ) <nl> + GENERATE_AST_FILE ( cxx_json_visitor_header JsonVisitor . h . inc ) <nl> + <nl> + GENERATE_AST_FILE ( cxx_json_visitor_impl JsonVisitor . cpp . inc ) <nl> + <nl> + - ADD_SUBDIRECTORY ( python ) <nl> + - <nl> + - OPTION ( test " Build tests . " OFF ) <nl> + - <nl> + INSTALL ( DIRECTORY c $ { CMAKE_CURRENT_BINARY_DIR } / c DESTINATION include / graphqlparser <nl> + + CONFIGURATIONS Release <nl> + FILES_MATCHING PATTERN " * . h " <nl> + PATTERN " build " EXCLUDE ) <nl> + <nl> + INSTALL ( FILES <nl> + position . hh <nl> + stack . hh <nl> + syntaxdefs . h <nl> + - DESTINATION include / graphqlparser ) <nl> + + DESTINATION include / graphqlparser <nl> + + CONFIGURATIONS Release ) <nl> + + <nl> + INSTALL ( TARGETS graphqlparser <nl> + - LIBRARY DESTINATION lib ) <nl> + + LIBRARY DESTINATION lib <nl> + + RUNTIME DESTINATION bin ) <nl> + <nl> + if ( UNIX ) <nl> + # generate pkgconfig file <nl> + if ( UNIX ) <nl> + install ( FILES " $ { CMAKE_CURRENT_BINARY_DIR } / libgraphqlparser . pc " <nl> + DESTINATION " $ { CMAKE_INSTALL_PREFIX } / lib / pkgconfig " ) <nl> + endif ( ) <nl> + + elseif ( WIN32 ) <nl> + + INSTALL ( FILES $ { CMAKE_CURRENT_BINARY_DIR } / graphqlparser . lib <nl> + + DESTINATION lib ) <nl> + + INSTALL ( FILES $ { CMAKE_CURRENT_BINARY_DIR } / graphqlparser . pdb <nl> + + DESTINATION bin ) <nl> + endif ( ) <nl> + - <nl> + - IF ( test ) <nl> + - ADD_SUBDIRECTORY ( test ) <nl> + - <nl> + - if ( UNIX ) <nl> + - # setup valgrind <nl> + - ADD_CUSTOM_TARGET ( memcheck <nl> + - valgrind - - leak - check = full - - suppressions = . / test / valgrind . supp - - dsymutil = yes - - error - exitcode = 1 . / test / runTests > / dev / null <nl> + - ) <nl> + - endif ( ) <nl> + - <nl> + - ENDIF ( ) <nl> | Add a port for https : / / github . com / graphql / libgraphqlparser ( ) | microsoft/vcpkg | 09ccd053dbb0b48833fa82331d1fbdccd9cb054a | 2018-08-03T07:11:55Z |
mmm a / tests / runner . py <nl> ppp b / tests / runner . py <nl> def find_files ( * ext_list ) : <nl> # # Does a complete test - builds , runs , checks output , etc . <nl> def do_run ( self , src , expected_output , args = [ ] , output_nicerizer = None , <nl> no_build = False , <nl> - js_engines = None , post_build = None , basename = ' src . cpp ' , libraries = [ ] , <nl> + js_engines = None , post_build = None , libraries = [ ] , <nl> includes = [ ] , force_c = False , <nl> assert_returncode = 0 , assert_identical = False , assert_all = False , <nl> check_for_error = True ) : <nl> if force_c : <nl> basename = ' src . c ' <nl> + else : <nl> + basename = ' src . cpp ' <nl> <nl> if no_build : <nl> if src : <nl> mmm a / tests / test_core . py <nl> ppp b / tests / test_core . py <nl> def test_template_class_deduction ( self ) : <nl> def test_asan_no_error ( self , name ) : <nl> self . emcc_args + = [ ' - fsanitize = address ' , ' - s ' , ' ALLOW_MEMORY_GROWTH = 1 ' ] <nl> self . do_run ( open ( path_from_root ( ' tests ' , ' core ' , name ) ) . read ( ) , <nl> - basename = name , expected_output = [ ' ' ] , assert_returncode = NON_ZERO ) <nl> + force_c = name . endswith ( ' . c ' ) , expected_output = [ ' ' ] , assert_returncode = NON_ZERO ) <nl> <nl> # note : these tests have things like - fno - builtin - memset in order to avoid <nl> # clang optimizing things away . for example , a memset might be optimized into <nl> def test_asan ( self , name , expected_output , cflags = None ) : <nl> if cflags : <nl> self . emcc_args + = cflags <nl> self . do_run ( open ( path_from_root ( ' tests ' , ' core ' , name ) ) . read ( ) , <nl> - basename = ' src . c ' if name . endswith ( ' . c ' ) else ' src . cpp ' , <nl> + force_c = name . endswith ( ' . c ' ) , <nl> expected_output = expected_output , assert_all = True , <nl> check_for_error = False , assert_returncode = NON_ZERO ) <nl> <nl> def test_asan ( self , name , expected_output , cflags = None ) : <nl> def test_asan_js_stack_op ( self ) : <nl> self . emcc_args + = [ ' - fsanitize = address ' , ' - s ' , ' ALLOW_MEMORY_GROWTH = 1 ' ] <nl> self . do_run ( open ( path_from_root ( ' tests ' , ' core ' , ' test_asan_js_stack_op . c ' ) ) . read ( ) , <nl> - basename = ' src . c ' , expected_output = ' Hello , World ! ' ) <nl> + force_c = True , expected_output = ' Hello , World ! ' ) <nl> <nl> @ no_wasm2js ( ' TODO : ASAN in wasm2js ' ) <nl> def test_asan_api ( self ) : <nl> | tests : Remove another unneeded argument from Runner . do_build ( ) | emscripten-core/emscripten | 8cadca88dd4f1752a6cac08d1d0c595f5b055fad | 2020-08-18T10:07:56Z |
mmm a / lib / ClangImporter / ImportDecl . cpp <nl> ppp b / lib / ClangImporter / ImportDecl . cpp <nl> static void finishInheritedConformances ( <nl> } <nl> } <nl> <nl> + / / / A stripped - down version of Type : : subst that only works on non - generic <nl> + / / / associated types . <nl> + / / / <nl> + / / / This is used to finish a conformance for a concrete imported type that may <nl> + / / / rely on default associated types defined in protocol extensions . . . without <nl> + / / / having to do all the work of gathering conformances from scratch . <nl> static Type <nl> recursivelySubstituteBaseType ( const NormalProtocolConformance * conformance , <nl> DependentMemberType * depMemTy ) { <nl> | [ ClangImporter ] Add a comment to ' recursivelySubstituteBaseType ' ( ) | apple/swift | 0c8ad65027419314f6749422b1b78cf671c02370 | 2018-03-20T23:07:43Z |
mmm a / dbms / src / Functions / flatten . cpp <nl> ppp b / dbms / src / Functions / flatten . cpp <nl> <nl> # include < Functions / IFunction . h > <nl> # include < Functions / FunctionFactory . h > <nl> - # include < Interpreters / castColumn . h > <nl> - # include < DataTypes / DataTypesNumber . h > <nl> - # include < Columns / ColumnsNumber . h > <nl> - # include < Functions / FunctionArrayMapped . h > <nl> - # include < Functions / FunctionFactory . h > <nl> - # include < ext / range . h > <nl> + # include < Functions / FunctionHelpers . h > <nl> + # include < DataTypes / DataTypeArray . h > <nl> + # include < Columns / ColumnArray . h > <nl> <nl> namespace DB <nl> { <nl> <nl> - / / / flatten ( [ [ 1 , 2 , 3 ] , [ 4 , 5 ] ] ) = [ 1 , 2 , 3 , 4 , 5 ] - flatten array . <nl> - class FunctionFlatten : public IFunction <nl> - { <nl> - public : <nl> - static constexpr auto name = " flatten " ; <nl> - static FunctionPtr create ( const Context & context ) <nl> - { <nl> - return std : : make_shared < FunctionFlatten > ( context ) ; <nl> - } <nl> + / / / flatten ( [ [ 1 , 2 , 3 ] , [ 4 , 5 ] ] ) = [ 1 , 2 , 3 , 4 , 5 ] - flatten array . <nl> + class FunctionFlatten : public IFunction <nl> + { <nl> + public : <nl> + static constexpr auto name = " flatten " ; <nl> <nl> - FunctionFlatten ( const Context & context ) <nl> - : context ( context ) <nl> - { <nl> - } <nl> + static FunctionPtr create ( const Context & context ) { return std : : make_shared < FunctionFlatten > ( context ) ; } <nl> <nl> - size_t getNumberOfArguments ( ) const override { return 1 ; } <nl> + size_t getNumberOfArguments ( ) const override { return 1 ; } <nl> <nl> - DataTypePtr getReturnTypeImpl ( const DataTypes & arguments ) const override <nl> - { <nl> - if ( ! isArray ( arguments [ 0 ] ) ) <nl> - throw Exception ( " Illegal type " + arguments [ 0 ] - > getName ( ) + <nl> - " of argument of function " + getName ( ) + <nl> - " , expected Array " , ErrorCodes : : ILLEGAL_TYPE_OF_ARGUMENT ) ; <nl> + DataTypePtr getReturnTypeImpl ( const DataTypes & arguments ) const override <nl> + { <nl> + if ( ! isArray ( arguments [ 0 ] ) ) <nl> + throw Exception ( " Illegal type " + arguments [ 0 ] - > getName ( ) + <nl> + " of argument of function " + getName ( ) + <nl> + " , expected Array " , ErrorCodes : : ILLEGAL_TYPE_OF_ARGUMENT ) ; <nl> <nl> - DataTypePtr nested_type = arguments [ 0 ] ; <nl> - while ( isArray ( nested_type ) ) { <nl> - nested_type = checkAndGetDataType < DataTypeArray > ( nested_type . get ( ) ) - > getNestedType ( ) ; <nl> - } <nl> + DataTypePtr nested_type = arguments [ 0 ] ; <nl> + while ( isArray ( nested_type ) ) <nl> + nested_type = checkAndGetDataType < DataTypeArray > ( nested_type . get ( ) ) - > getNestedType ( ) ; <nl> <nl> - return std : : make_shared < DataTypeArray > ( nested_type ) ; <nl> - } <nl> + return std : : make_shared < DataTypeArray > ( nested_type ) ; <nl> + } <nl> <nl> - void executeImpl ( Block & block , const ColumnNumbers & arguments , size_t result , size_t input_rows_count ) override <nl> - { <nl> - const auto & array_with_type_and_name = block . getByPosition ( arguments [ 0 ] ) ; <nl> - ColumnPtr preprocessed_column = array_with_type_and_name . column ; <nl> - preprocessed_column = preprocessed_column - > convertToFullColumnIfConst ( ) ; <nl> - const auto * arg_col = checkAndGetColumn < ColumnArray > ( preprocessed_column . get ( ) ) ; <nl> - const IColumn & arg_data = arg_col - > getData ( ) ; <nl> - const IColumn : : Offsets & arg_offsets = arg_col - > getOffsets ( ) ; <nl> - <nl> - const DataTypePtr & result_type = block . getByPosition ( result ) . type ; <nl> - const DataTypePtr & result_nested_type = dynamic_cast < const DataTypeArray & > ( * result_type ) . getNestedType ( ) ; <nl> - auto result_col = ColumnArray : : create ( result_nested_type - > createColumn ( ) ) ; <nl> - IColumn & result_data = result_col - > getData ( ) ; <nl> - IColumn : : Offsets & result_offsets = result_col - > getOffsets ( ) ; <nl> - <nl> - result_data . reserve ( input_rows_count * 10 ) ; <nl> - result_offsets . resize ( input_rows_count ) ; <nl> - <nl> - IColumn : : Offset current_offset = 0 ; <nl> - for ( size_t i = 0 ; i < input_rows_count ; + + i ) <nl> - { <nl> - for ( size_t j = current_offset ; j < arg_offsets [ i ] ; + + j ) <nl> - result_data . insertFrom ( arg_data , j ) ; <nl> - <nl> - current_offset + = arg_offsets [ i ] ; <nl> - result_offsets [ i ] = current_offset ; <nl> - } <nl> - <nl> - block . getByPosition ( result ) . column = std : : move ( result_col ) ; <nl> - } <nl> + void executeImpl ( Block & block , const ColumnNumbers & arguments , size_t result , size_t input_rows_count ) override <nl> + { <nl> + const auto * arg_col = checkAndGetColumn < ColumnArray > ( block . getByPosition ( arguments [ 0 ] ) . column . get ( ) ) ; <nl> + const IColumn & arg_data = arg_col - > getData ( ) ; <nl> + const IColumn : : Offsets & arg_offsets = arg_col - > getOffsets ( ) ; <nl> + <nl> + const DataTypePtr & result_type = block . getByPosition ( result ) . type ; <nl> + const DataTypePtr & result_nested_type = dynamic_cast < const DataTypeArray & > ( * result_type ) . getNestedType ( ) ; <nl> + auto result_col = ColumnArray : : create ( result_nested_type - > createColumn ( ) ) ; <nl> + IColumn & result_data = result_col - > getData ( ) ; <nl> + IColumn : : Offsets & result_offsets = result_col - > getOffsets ( ) ; <nl> <nl> - private : <nl> - String getName ( ) const override <nl> + result_data . reserve ( input_rows_count * 10 ) ; <nl> + result_offsets . resize ( input_rows_count ) ; <nl> + <nl> + IColumn : : Offset current_offset = 0 ; <nl> + for ( size_t i = 0 ; i < input_rows_count ; + + i ) <nl> { <nl> - return name ; <nl> + const auto flatten_data = flatten ( arg_data , current_offset , arg_offsets [ i ] ) ; <nl> + result_data . insertRangeFrom ( flatten_data , 0 , flatten_data . size ( ) ) ; <nl> + current_offset + = flatten_data . size ( ) ; <nl> + result_offsets [ i ] = current_offset ; <nl> } <nl> <nl> - bool addField ( DataTypePtr type_res , const Field & f , Array & arr ) const ; <nl> + block . getByPosition ( result ) . column = std : : move ( result_col ) ; <nl> + } <nl> <nl> - private : <nl> - const Context & context ; <nl> - } ; <nl> + private : <nl> + String getName ( ) const override <nl> + { <nl> + return name ; <nl> + } <nl> <nl> + bool addField ( DataTypePtr type_res , const Field & f , Array & arr ) const ; <nl> <nl> - void registerFunctionFlatten ( FunctionFactory & factory ) <nl> + const ColumnArray flatten ( const IColumn & / * data * / , size_t / * from * / , size_t / * to * / ) const <nl> { <nl> - factory . registerFunction < FunctionFlatten > ( ) ; <nl> + / / todo <nl> } <nl> + } ; <nl> + <nl> + <nl> + void registerFunctionFlatten ( FunctionFactory & factory ) <nl> + { <nl> + factory . registerFunction < FunctionFlatten > ( ) ; <nl> + } <nl> <nl> } <nl> | Basic implementation | ClickHouse/ClickHouse | 5cb04155d7a9964460000ae67cd1e0e90ca1b357 | 2019-02-15T05:47:49Z |
mmm a / dlib / image_processing / box_overlap_testing_abstract . h <nl> ppp b / dlib / image_processing / box_overlap_testing_abstract . h <nl> namespace dlib <nl> <nl> class test_box_overlap <nl> { <nl> + / * ! <nl> + WHAT THIS OBJECT REPRESENTS <nl> + This object is a simple function object for determining if two rectangles <nl> + overlap . <nl> + ! * / <nl> + <nl> public : <nl> test_box_overlap ( <nl> - ) : overlap_thresh ( 0 . 5 ) <nl> - { } <nl> + ) ; <nl> + / * ! <nl> + ensures <nl> + - # get_overlap_thresh ( ) = = 0 . 5 <nl> + ! * / <nl> <nl> test_box_overlap ( <nl> - double overlap_thresh_ <nl> - ) : overlap_thresh ( overlap_thresh_ ) { } <nl> + double overlap_thresh <nl> + ) ; <nl> + / * ! <nl> + requires <nl> + - 0 < = overlap_thresh < = 1 <nl> + ensures <nl> + - # get_overlap_thresh ( ) = = overlap_thresh <nl> + ! * / <nl> <nl> bool operator ( ) ( <nl> const dlib : : rectangle & a , <nl> const dlib : : rectangle & b <nl> - ) const <nl> - { <nl> - const double inner = a . intersect ( b ) . area ( ) ; <nl> - const double outer = ( a + b ) . area ( ) ; <nl> - if ( inner / outer > overlap_thresh ) <nl> - return true ; <nl> - else <nl> - return false ; <nl> - } <nl> + ) const ; <nl> + / * ! <nl> + ensures <nl> + - returns true if a . intersect ( b ) . area ( ) / ( a + b ) . area > get_overlap_thresh ( ) <nl> + and false otherwise . ( i . e . returns true if a and b overlap enough ) <nl> + ! * / <nl> <nl> double get_overlap_thresh ( <nl> ) const ; <nl> + / * ! <nl> + ensures <nl> + - returns the threshold used to determine if two rectangles overlap . <nl> + <nl> + ! * / <nl> <nl> } ; <nl> <nl> | filled out spec | davisking/dlib | 91dbcb33af3e91d1dbd96371041dace33852827e | 2011-09-08T23:14:26Z |
mmm a / include / gtest / internal / gtest - port . h <nl> ppp b / include / gtest / internal / gtest - port . h <nl> <nl> / / Brings in definitions for functions used in the testing : : internal : : posix <nl> / / namespace ( read , write , close , chdir , isatty , stat ) . We do not currently <nl> / / use them on Windows Mobile . <nl> - # if ! GTEST_OS_WINDOWS <nl> + # if GTEST_OS_WINDOWS <nl> + # if ! GTEST_OS_WINDOWS_MOBILE <nl> + # include < direct . h > <nl> + # include < io . h > <nl> + # endif <nl> + / / In order to avoid having to include < windows . h > , use forward declaration <nl> + / / assuming CRITICAL_SECTION is a typedef of _RTL_CRITICAL_SECTION . <nl> + / / This assumption is verified by <nl> + / / WindowsTypesTest . CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION . <nl> + struct _RTL_CRITICAL_SECTION ; <nl> + # else <nl> / / This assumes that non - Windows OSes provide unistd . h . For OSes where this <nl> / / is not the case , we need to include headers that provide the functions <nl> / / mentioned above . <nl> # include < unistd . h > <nl> # include < strings . h > <nl> - # elif ! GTEST_OS_WINDOWS_MOBILE <nl> - # include < direct . h > <nl> - # include < io . h > <nl> - # endif <nl> + # endif / / GTEST_OS_WINDOWS <nl> <nl> # if GTEST_OS_LINUX_ANDROID <nl> / / Used to define __ANDROID_API__ matching the target NDK API level . <nl> using : : std : : tuple_size ; <nl> # define GTEST_HAS_SEH 0 <nl> # endif <nl> <nl> + # define GTEST_IS_THREADSAFE \ <nl> + ( GTEST_OS_WINDOWS | | GTEST_HAS_PTHREAD ) <nl> + <nl> # endif / / GTEST_HAS_SEH <nl> <nl> # ifdef _MSC_VER <nl> extern : : std : : vector < testing : : internal : : string > g_argvs ; <nl> # endif / / GTEST_HAS_DEATH_TEST <nl> <nl> / / Defines synchronization primitives . <nl> - <nl> - # if GTEST_HAS_PTHREAD <nl> - <nl> - / / Sleeps for ( roughly ) n milli - seconds . This function is only for <nl> - / / testing Google Test ' s own constructs . Don ' t use it in user tests , <nl> - / / either directly or indirectly . <nl> + # if GTEST_IS_THREADSAFE <nl> + # if GTEST_HAS_PTHREAD <nl> + / / Sleeps for ( roughly ) n milliseconds . This function is only for testing <nl> + / / Google Test ' s own constructs . Don ' t use it in user tests , either <nl> + / / directly or indirectly . <nl> inline void SleepMilliseconds ( int n ) { <nl> const timespec time = { <nl> 0 , / / 0 seconds . <nl> inline void SleepMilliseconds ( int n ) { <nl> } ; <nl> nanosleep ( & time , NULL ) ; <nl> } <nl> + # endif / / GTEST_HAS_PTHREAD <nl> <nl> + # if 0 / / OS detection <nl> + # elif GTEST_HAS_PTHREAD <nl> / / Allows a controller thread to pause execution of newly created <nl> / / threads until notified . Instances of this class must be created <nl> / / and destroyed in the controller thread . <nl> class Notification { <nl> GTEST_DISALLOW_COPY_AND_ASSIGN_ ( Notification ) ; <nl> } ; <nl> <nl> + # elif GTEST_OS_WINDOWS <nl> + <nl> + GTEST_API_ void SleepMilliseconds ( int n ) ; <nl> + <nl> + / / Provides leak - safe Windows kernel handle ownership . <nl> + / / Used in death tests and in threading support . <nl> + class GTEST_API_ AutoHandle { <nl> + public : <nl> + / / Assume that Win32 HANDLE type is equivalent to void * . Doing so allows us to <nl> + / / avoid including < windows . h > in this header file . Including < windows . h > is <nl> + / / undesirable because it defines a lot of symbols and macros that tend to <nl> + / / conflict with client code . This assumption is verified by <nl> + / / WindowsTypesTest . HANDLEIsVoidStar . <nl> + typedef void * Handle ; <nl> + AutoHandle ( ) ; <nl> + explicit AutoHandle ( Handle handle ) ; <nl> + <nl> + ~ AutoHandle ( ) ; <nl> + <nl> + Handle Get ( ) const ; <nl> + void Reset ( ) ; <nl> + void Reset ( Handle handle ) ; <nl> + <nl> + private : <nl> + / / Returns true iff the handle is a valid handle object that can be closed . <nl> + bool IsCloseable ( ) const ; <nl> + <nl> + Handle handle_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( AutoHandle ) ; <nl> + } ; <nl> + <nl> + / / Allows a controller thread to pause execution of newly created <nl> + / / threads until notified . Instances of this class must be created <nl> + / / and destroyed in the controller thread . <nl> + / / <nl> + / / This class is only for testing Google Test ' s own constructs . Do not <nl> + / / use it in user tests , either directly or indirectly . <nl> + class GTEST_API_ Notification { <nl> + public : <nl> + Notification ( ) ; <nl> + void Notify ( ) ; <nl> + void WaitForNotification ( ) ; <nl> + <nl> + private : <nl> + AutoHandle event_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( Notification ) ; <nl> + } ; <nl> + # endif / / OS detection <nl> + <nl> + / / On MinGW , we can have both GTEST_OS_WINDOWS and GTEST_HAS_PTHREAD <nl> + / / defined , but we don ' t want to use MinGW ' s pthreads implementation , which <nl> + / / has conformance problems with some versions of the POSIX standard . <nl> + # if GTEST_HAS_PTHREAD & & ! GTEST_OS_WINDOWS_MINGW <nl> + <nl> / / As a C - function , ThreadFuncWithCLinkage cannot be templated itself . <nl> / / Consequently , it cannot select a correct instantiation of ThreadWithParam <nl> / / in order to call its Run ( ) . Introducing ThreadWithParamBase as a <nl> extern " C " inline void * ThreadFuncWithCLinkage ( void * thread ) { <nl> template < typename T > <nl> class ThreadWithParam : public ThreadWithParamBase { <nl> public : <nl> - typedef void ( * UserThreadFunc ) ( T ) ; <nl> + typedef void UserThreadFunc ( T ) ; <nl> <nl> - ThreadWithParam ( <nl> - UserThreadFunc func , T param , Notification * thread_can_start ) <nl> + ThreadWithParam ( UserThreadFunc * func , T param , Notification * thread_can_start ) <nl> : func_ ( func ) , <nl> param_ ( param ) , <nl> thread_can_start_ ( thread_can_start ) , <nl> class ThreadWithParam : public ThreadWithParamBase { <nl> } <nl> <nl> private : <nl> - const UserThreadFunc func_ ; / / User - supplied thread function . <nl> + UserThreadFunc * const func_ ; / / User - supplied thread function . <nl> const T param_ ; / / User - supplied parameter to the thread function . <nl> / / When non - NULL , used to block execution until the controller thread <nl> / / notifies . <nl> class ThreadWithParam : public ThreadWithParamBase { <nl> <nl> GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadWithParam ) ; <nl> } ; <nl> + # endif / / GTEST_HAS_PTHREAD & & ! GTEST_OS_WINDOWS_MINGW <nl> <nl> - / / MutexBase and Mutex implement mutex on pthreads - based platforms . They <nl> - / / are used in conjunction with class MutexLock : <nl> + # if 0 / / OS detection <nl> + # elif GTEST_OS_WINDOWS <nl> + <nl> + / / Mutex implements mutex on Windows platforms . It is used in conjunction <nl> + / / with class MutexLock : <nl> / / <nl> / / Mutex mutex ; <nl> / / . . . <nl> - / / MutexLock lock ( & mutex ) ; / / Acquires the mutex and releases it at the end <nl> - / / / / of the current scope . <nl> - / / <nl> - / / MutexBase implements behavior for both statically and dynamically <nl> - / / allocated mutexes . Do not use MutexBase directly . Instead , write <nl> - / / the following to define a static mutex : <nl> + / / MutexLock lock ( & mutex ) ; / / Acquires the mutex and releases it at the <nl> + / / / / end of the current scope . <nl> / / <nl> + / / A static Mutex * must * be defined or declared using one of the following <nl> + / / macros : <nl> / / GTEST_DEFINE_STATIC_MUTEX_ ( g_some_mutex ) ; <nl> + / / GTEST_DECLARE_STATIC_MUTEX_ ( g_some_mutex ) ; <nl> + / / <nl> + / / ( A non - static Mutex is defined / declared in the usual way ) . <nl> + class GTEST_API_ Mutex { <nl> + public : <nl> + enum MutexType { kStatic = 0 , kDynamic = 1 } ; <nl> + / / We rely on kStaticMutex being 0 as it is to what the linker initializes <nl> + / / type_ in static mutexes . critical_section_ will be initialized lazily <nl> + / / in ThreadSafeLazyInit ( ) . <nl> + enum StaticConstructorSelector { kStaticMutex = 0 } ; <nl> + <nl> + / / This constructor intentionally does nothing . It relies on type_ being <nl> + / / statically initialized to 0 ( effectively setting it to kStatic ) and on <nl> + / / ThreadSafeLazyInit ( ) to lazily initialize the rest of the members . <nl> + explicit Mutex ( StaticConstructorSelector / * dummy * / ) { } <nl> + <nl> + Mutex ( ) ; <nl> + ~ Mutex ( ) ; <nl> + <nl> + void Lock ( ) ; <nl> + <nl> + void Unlock ( ) ; <nl> + <nl> + / / Does nothing if the current thread holds the mutex . Otherwise , crashes <nl> + / / with high probability . <nl> + void AssertHeld ( ) ; <nl> + <nl> + private : <nl> + / / Initializes owner_thread_id_ and critical_section_ in static mutexes . <nl> + void ThreadSafeLazyInit ( ) ; <nl> + <nl> + / / Per http : / / blogs . msdn . com / b / oldnewthing / archive / 2004 / 02 / 23 / 78395 . aspx , <nl> + / / we assume that 0 is an invalid value for thread IDs . <nl> + unsigned int owner_thread_id_ ; <nl> + <nl> + / / For static mutexes , we rely on these members being initialized to zeros <nl> + / / by the linker . <nl> + MutexType type_ ; <nl> + long critical_section_init_phase_ ; / / NOLINT <nl> + _RTL_CRITICAL_SECTION * critical_section_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( Mutex ) ; <nl> + } ; <nl> + <nl> + # define GTEST_DECLARE_STATIC_MUTEX_ ( mutex ) \ <nl> + extern : : testing : : internal : : Mutex mutex <nl> + <nl> + # define GTEST_DEFINE_STATIC_MUTEX_ ( mutex ) \ <nl> + : : testing : : internal : : Mutex mutex ( : : testing : : internal : : Mutex : : kStaticMutex ) <nl> + <nl> + / / We cannot name this class MutexLock because the ctor declaration would <nl> + / / conflict with a macro named MutexLock , which is defined on some <nl> + / / platforms . That macro is used as a defensive measure to prevent against <nl> + / / inadvertent misuses of MutexLock like " MutexLock ( & mu ) " rather than <nl> + / / " MutexLock l ( & mu ) " . Hence the typedef trick below . <nl> + class GTestMutexLock { <nl> + public : <nl> + explicit GTestMutexLock ( Mutex * mutex ) <nl> + : mutex_ ( mutex ) { mutex_ - > Lock ( ) ; } <nl> + <nl> + ~ GTestMutexLock ( ) { mutex_ - > Unlock ( ) ; } <nl> + <nl> + private : <nl> + Mutex * const mutex_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( GTestMutexLock ) ; <nl> + } ; <nl> + <nl> + typedef GTestMutexLock MutexLock ; <nl> + <nl> + / / Base class for ValueHolder < T > . Allows a caller to hold and delete a value <nl> + / / without knowing its type . <nl> + class ThreadLocalValueHolderBase { <nl> + public : <nl> + virtual ~ ThreadLocalValueHolderBase ( ) { } <nl> + } ; <nl> + <nl> + / / Provides a way for a thread to send notifications to a ThreadLocal <nl> + / / regardless of its parameter type . <nl> + class ThreadLocalBase { <nl> + public : <nl> + / / Creates a new ValueHolder < T > object holding a default value passed to <nl> + / / this ThreadLocal < T > ' s constructor and returns it . It is the caller ' s <nl> + / / responsibility not to call this when the ThreadLocal < T > instance already <nl> + / / has a value on the current thread . <nl> + virtual ThreadLocalValueHolderBase * NewValueForCurrentThread ( ) const = 0 ; <nl> + <nl> + protected : <nl> + ThreadLocalBase ( ) { } <nl> + virtual ~ ThreadLocalBase ( ) { } <nl> + <nl> + private : <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadLocalBase ) ; <nl> + } ; <nl> + <nl> + / / Maps a thread to a set of ThreadLocals that have values instantiated on that <nl> + / / thread and notifies them when the thread exits . A ThreadLocal instance is <nl> + / / expected to persist until all threads it has values on have terminated . <nl> + class GTEST_API_ ThreadLocalRegistry { <nl> + public : <nl> + / / Registers thread_local_instance as having value on the current thread . <nl> + / / Returns a value that can be used to identify the thread from other threads . <nl> + static ThreadLocalValueHolderBase * GetValueOnCurrentThread ( <nl> + const ThreadLocalBase * thread_local_instance ) ; <nl> + <nl> + / / Invoked when a ThreadLocal instance is destroyed . <nl> + static void OnThreadLocalDestroyed ( <nl> + const ThreadLocalBase * thread_local_instance ) ; <nl> + } ; <nl> + <nl> + class GTEST_API_ ThreadWithParamBase { <nl> + public : <nl> + void Join ( ) ; <nl> + <nl> + protected : <nl> + class Runnable { <nl> + public : <nl> + virtual ~ Runnable ( ) { } <nl> + virtual void Run ( ) = 0 ; <nl> + } ; <nl> + <nl> + ThreadWithParamBase ( Runnable * runnable , Notification * thread_can_start ) ; <nl> + virtual ~ ThreadWithParamBase ( ) ; <nl> + <nl> + private : <nl> + AutoHandle thread_ ; <nl> + } ; <nl> + <nl> + / / Helper class for testing Google Test ' s multi - threading constructs . <nl> + template < typename T > <nl> + class ThreadWithParam : public ThreadWithParamBase { <nl> + public : <nl> + typedef void UserThreadFunc ( T ) ; <nl> + <nl> + ThreadWithParam ( UserThreadFunc * func , T param , Notification * thread_can_start ) <nl> + : ThreadWithParamBase ( new RunnableImpl ( func , param ) , thread_can_start ) { <nl> + } <nl> + virtual ~ ThreadWithParam ( ) { } <nl> + <nl> + private : <nl> + class RunnableImpl : public Runnable { <nl> + public : <nl> + RunnableImpl ( UserThreadFunc * func , T param ) <nl> + : func_ ( func ) , <nl> + param_ ( param ) { <nl> + } <nl> + virtual ~ RunnableImpl ( ) { } <nl> + virtual void Run ( ) { <nl> + func_ ( param_ ) ; <nl> + } <nl> + <nl> + private : <nl> + UserThreadFunc * const func_ ; <nl> + const T param_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( RunnableImpl ) ; <nl> + } ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadWithParam ) ; <nl> + } ; <nl> + <nl> + / / Implements thread - local storage on Windows systems . <nl> / / <nl> - / / You can forward declare a static mutex like this : <nl> + / / / / Thread 1 <nl> + / / ThreadLocal < int > tl ( 100 ) ; / / 100 is the default value for each thread . <nl> / / <nl> - / / GTEST_DECLARE_STATIC_MUTEX_ ( g_some_mutex ) ; <nl> + / / / / Thread 2 <nl> + / / tl . set ( 150 ) ; / / Changes the value for thread 2 only . <nl> + / / EXPECT_EQ ( 150 , tl . get ( ) ) ; <nl> / / <nl> - / / To create a dynamic mutex , just define an object of type Mutex . <nl> + / / / / Thread 1 <nl> + / / EXPECT_EQ ( 100 , tl . get ( ) ) ; / / In thread 1 , tl has the original value . <nl> + / / tl . set ( 200 ) ; <nl> + / / EXPECT_EQ ( 200 , tl . get ( ) ) ; <nl> + / / <nl> + / / The template type argument T must have a public copy constructor . <nl> + / / In addition , the default ThreadLocal constructor requires T to have <nl> + / / a public default constructor . <nl> + / / <nl> + / / The users of a TheadLocal instance have to make sure that all but one <nl> + / / threads ( including the main one ) using that instance have exited before <nl> + / / destroying it . Otherwise , the per - thread objects managed for them by the <nl> + / / ThreadLocal instance are not guaranteed to be destroyed on all platforms . <nl> + / / <nl> + / / Google Test only uses global ThreadLocal objects . That means they <nl> + / / will die after main ( ) has returned . Therefore , no per - thread <nl> + / / object managed by Google Test will be leaked as long as all threads <nl> + / / using Google Test have exited when main ( ) returns . <nl> + template < typename T > <nl> + class ThreadLocal : public ThreadLocalBase { <nl> + public : <nl> + ThreadLocal ( ) : default_ ( ) { } <nl> + explicit ThreadLocal ( const T & value ) : default_ ( value ) { } <nl> + <nl> + ~ ThreadLocal ( ) { ThreadLocalRegistry : : OnThreadLocalDestroyed ( this ) ; } <nl> + <nl> + T * pointer ( ) { return GetOrCreateValue ( ) ; } <nl> + const T * pointer ( ) const { return GetOrCreateValue ( ) ; } <nl> + const T & get ( ) const { return * pointer ( ) ; } <nl> + void set ( const T & value ) { * pointer ( ) = value ; } <nl> + <nl> + private : <nl> + / / Holds a value of T . Can be deleted via its base class without the caller <nl> + / / knowing the type of T . <nl> + class ValueHolder : public ThreadLocalValueHolderBase { <nl> + public : <nl> + explicit ValueHolder ( const T & value ) : value_ ( value ) { } <nl> + <nl> + T * pointer ( ) { return & value_ ; } <nl> + <nl> + private : <nl> + T value_ ; <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ValueHolder ) ; <nl> + } ; <nl> + <nl> + <nl> + T * GetOrCreateValue ( ) const { <nl> + return static_cast < ValueHolder * > ( <nl> + ThreadLocalRegistry : : GetValueOnCurrentThread ( this ) ) - > pointer ( ) ; <nl> + } <nl> + <nl> + virtual ThreadLocalValueHolderBase * NewValueForCurrentThread ( ) const { <nl> + return new ValueHolder ( default_ ) ; <nl> + } <nl> + <nl> + const T default_ ; / / The default value for each thread . <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadLocal ) ; <nl> + } ; <nl> + <nl> + # elif GTEST_HAS_PTHREAD <nl> + <nl> + / / MutexBase and Mutex implement mutex on pthreads - based platforms . <nl> class MutexBase { <nl> public : <nl> / / Acquires this mutex . <nl> class MutexBase { <nl> } ; <nl> <nl> / / Forward - declares a static mutex . <nl> - # define GTEST_DECLARE_STATIC_MUTEX_ ( mutex ) \ <nl> - extern : : testing : : internal : : MutexBase mutex <nl> + # define GTEST_DECLARE_STATIC_MUTEX_ ( mutex ) \ <nl> + extern : : testing : : internal : : MutexBase mutex <nl> <nl> / / Defines and statically ( i . e . at link time ) initializes a static mutex . <nl> / / The initialization list here does not explicitly initialize each field , <nl> class MutexBase { <nl> / / particular , the owner_ field ( a pthread_t ) is not explicitly initialized . <nl> / / This allows initialization to work whether pthread_t is a scalar or struct . <nl> / / The flag - Wmissing - field - initializers must not be specified for this to work . <nl> - # define GTEST_DEFINE_STATIC_MUTEX_ ( mutex ) \ <nl> - : : testing : : internal : : MutexBase mutex = { PTHREAD_MUTEX_INITIALIZER , false } <nl> + # define GTEST_DEFINE_STATIC_MUTEX_ ( mutex ) \ <nl> + : : testing : : internal : : MutexBase mutex = { PTHREAD_MUTEX_INITIALIZER , false } <nl> <nl> / / The Mutex class can only be used for mutexes created at runtime . It <nl> / / shares its API with MutexBase otherwise . <nl> class Mutex : public MutexBase { <nl> GTEST_DISALLOW_COPY_AND_ASSIGN_ ( Mutex ) ; <nl> } ; <nl> <nl> - / / We cannot name this class MutexLock as the ctor declaration would <nl> + / / We cannot name this class MutexLock because the ctor declaration would <nl> / / conflict with a macro named MutexLock , which is defined on some <nl> - / / platforms . Hence the typedef trick below . <nl> + / / platforms . That macro is used as a defensive measure to prevent against <nl> + / / inadvertent misuses of MutexLock like " MutexLock ( & mu ) " rather than <nl> + / / " MutexLock l ( & mu ) " . Hence the typedef trick below . <nl> class GTestMutexLock { <nl> public : <nl> explicit GTestMutexLock ( MutexBase * mutex ) <nl> extern " C " inline void DeleteThreadLocalValue ( void * value_holder ) { <nl> } <nl> <nl> / / Implements thread - local storage on pthreads - based systems . <nl> - / / <nl> - / / / / Thread 1 <nl> - / / ThreadLocal < int > tl ( 100 ) ; / / 100 is the default value for each thread . <nl> - / / <nl> - / / / / Thread 2 <nl> - / / tl . set ( 150 ) ; / / Changes the value for thread 2 only . <nl> - / / EXPECT_EQ ( 150 , tl . get ( ) ) ; <nl> - / / <nl> - / / / / Thread 1 <nl> - / / EXPECT_EQ ( 100 , tl . get ( ) ) ; / / In thread 1 , tl has the original value . <nl> - / / tl . set ( 200 ) ; <nl> - / / EXPECT_EQ ( 200 , tl . get ( ) ) ; <nl> - / / <nl> - / / The template type argument T must have a public copy constructor . <nl> - / / In addition , the default ThreadLocal constructor requires T to have <nl> - / / a public default constructor . <nl> - / / <nl> - / / An object managed for a thread by a ThreadLocal instance is deleted <nl> - / / when the thread exits . Or , if the ThreadLocal instance dies in <nl> - / / that thread , when the ThreadLocal dies . It ' s the user ' s <nl> - / / responsibility to ensure that all other threads using a ThreadLocal <nl> - / / have exited when it dies , or the per - thread objects for those <nl> - / / threads will not be deleted . <nl> - / / <nl> - / / Google Test only uses global ThreadLocal objects . That means they <nl> - / / will die after main ( ) has returned . Therefore , no per - thread <nl> - / / object managed by Google Test will be leaked as long as all threads <nl> - / / using Google Test have exited when main ( ) returns . <nl> template < typename T > <nl> class ThreadLocal { <nl> public : <nl> class ThreadLocal { <nl> GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadLocal ) ; <nl> } ; <nl> <nl> - # define GTEST_IS_THREADSAFE 1 <nl> + # endif / / OS detection <nl> <nl> - # else / / GTEST_HAS_PTHREAD <nl> + # else / / GTEST_IS_THREADSAFE <nl> <nl> / / A dummy implementation of synchronization primitives ( mutex , lock , <nl> / / and thread - local variable ) . Necessary for compiling Google Test where <nl> class Mutex { <nl> <nl> # define GTEST_DEFINE_STATIC_MUTEX_ ( mutex ) : : testing : : internal : : Mutex mutex <nl> <nl> + / / We cannot name this class MutexLock because the ctor declaration would <nl> + / / conflict with a macro named MutexLock , which is defined on some <nl> + / / platforms . That macro is used as a defensive measure to prevent against <nl> + / / inadvertent misuses of MutexLock like " MutexLock ( & mu ) " rather than <nl> + / / " MutexLock l ( & mu ) " . Hence the typedef trick below . <nl> class GTestMutexLock { <nl> public : <nl> explicit GTestMutexLock ( Mutex * ) { } / / NOLINT <nl> class ThreadLocal { <nl> T value_ ; <nl> } ; <nl> <nl> - / / The above synchronization primitives have dummy implementations . <nl> - / / Therefore Google Test is not thread - safe . <nl> - # define GTEST_IS_THREADSAFE 0 <nl> - <nl> - # endif / / GTEST_HAS_PTHREAD <nl> + # endif / / GTEST_IS_THREADSAFE <nl> <nl> / / Returns the number of threads running in the process , or 0 to indicate that <nl> / / we cannot detect it . <nl> mmm a / src / gtest - internal - inl . h <nl> ppp b / src / gtest - internal - inl . h <nl> GTEST_API_ void ParseGoogleTestFlagsOnly ( int * argc , wchar_t * * argv ) ; <nl> / / platform . <nl> GTEST_API_ std : : string GetLastErrnoDescription ( ) ; <nl> <nl> - # if GTEST_OS_WINDOWS <nl> - / / Provides leak - safe Windows kernel handle ownership . <nl> - class AutoHandle { <nl> - public : <nl> - AutoHandle ( ) : handle_ ( INVALID_HANDLE_VALUE ) { } <nl> - explicit AutoHandle ( HANDLE handle ) : handle_ ( handle ) { } <nl> - <nl> - ~ AutoHandle ( ) { Reset ( ) ; } <nl> - <nl> - HANDLE Get ( ) const { return handle_ ; } <nl> - void Reset ( ) { Reset ( INVALID_HANDLE_VALUE ) ; } <nl> - void Reset ( HANDLE handle ) { <nl> - if ( handle ! = handle_ ) { <nl> - if ( handle_ ! = INVALID_HANDLE_VALUE ) <nl> - : : CloseHandle ( handle_ ) ; <nl> - handle_ = handle ; <nl> - } <nl> - } <nl> - <nl> - private : <nl> - HANDLE handle_ ; <nl> - <nl> - GTEST_DISALLOW_COPY_AND_ASSIGN_ ( AutoHandle ) ; <nl> - } ; <nl> - # endif / / GTEST_OS_WINDOWS <nl> - <nl> / / Attempts to parse a string into a positive integer pointed to by the <nl> / / number parameter . Returns true if that is possible . <nl> / / GTEST_HAS_DEATH_TEST implies that we have : : std : : string , so we can use <nl> mmm a / src / gtest - port . cc <nl> ppp b / src / gtest - port . cc <nl> <nl> # include < stdio . h > <nl> # include < string . h > <nl> <nl> - # if GTEST_OS_WINDOWS_MOBILE <nl> - # include < windows . h > / / For TerminateProcess ( ) <nl> - # elif GTEST_OS_WINDOWS <nl> + # if GTEST_OS_WINDOWS <nl> + # include < windows . h > <nl> # include < io . h > <nl> # include < sys / stat . h > <nl> + # include < map > / / Used in ThreadLocal . <nl> # else <nl> # include < unistd . h > <nl> - # endif / / GTEST_OS_WINDOWS_MOBILE <nl> + # endif / / GTEST_OS_WINDOWS <nl> <nl> # if GTEST_OS_MAC <nl> # include < mach / mach_init . h > <nl> size_t GetThreadCount ( ) { <nl> <nl> # endif / / GTEST_OS_MAC <nl> <nl> + # if GTEST_IS_THREADSAFE & & GTEST_OS_WINDOWS <nl> + <nl> + void SleepMilliseconds ( int n ) { <nl> + : : Sleep ( n ) ; <nl> + } <nl> + <nl> + AutoHandle : : AutoHandle ( ) <nl> + : handle_ ( INVALID_HANDLE_VALUE ) { } <nl> + <nl> + AutoHandle : : AutoHandle ( Handle handle ) <nl> + : handle_ ( handle ) { } <nl> + <nl> + AutoHandle : : ~ AutoHandle ( ) { <nl> + Reset ( ) ; <nl> + } <nl> + <nl> + AutoHandle : : Handle AutoHandle : : Get ( ) const { <nl> + return handle_ ; <nl> + } <nl> + <nl> + void AutoHandle : : Reset ( ) { <nl> + Reset ( INVALID_HANDLE_VALUE ) ; <nl> + } <nl> + <nl> + void AutoHandle : : Reset ( HANDLE handle ) { <nl> + / / Resetting with the same handle we already own is invalid . <nl> + if ( handle_ ! = handle ) { <nl> + if ( IsCloseable ( ) ) { <nl> + : : CloseHandle ( handle_ ) ; <nl> + } <nl> + handle_ = handle ; <nl> + } else { <nl> + GTEST_CHECK_ ( ! IsCloseable ( ) ) <nl> + < < " Resetting a valid handle to itself is likely a programmer error " <nl> + " and thus not allowed . " ; <nl> + } <nl> + } <nl> + <nl> + bool AutoHandle : : IsCloseable ( ) const { <nl> + / / Different Windows APIs may use either of these values to represent an <nl> + / / invalid handle . <nl> + return handle_ ! = NULL & & handle_ ! = INVALID_HANDLE_VALUE ; <nl> + } <nl> + <nl> + Notification : : Notification ( ) <nl> + : event_ ( : : CreateEvent ( NULL , / / Default security attributes . <nl> + TRUE , / / Do not reset automatically . <nl> + FALSE , / / Initially unset . <nl> + NULL ) ) { / / Anonymous event . <nl> + GTEST_CHECK_ ( event_ . Get ( ) ! = NULL ) ; <nl> + } <nl> + <nl> + void Notification : : Notify ( ) { <nl> + GTEST_CHECK_ ( : : SetEvent ( event_ . Get ( ) ) ! = FALSE ) ; <nl> + } <nl> + <nl> + void Notification : : WaitForNotification ( ) { <nl> + GTEST_CHECK_ ( <nl> + : : WaitForSingleObject ( event_ . Get ( ) , INFINITE ) = = WAIT_OBJECT_0 ) ; <nl> + } <nl> + <nl> + Mutex : : Mutex ( ) <nl> + : type_ ( kDynamic ) , <nl> + owner_thread_id_ ( 0 ) , <nl> + critical_section_init_phase_ ( 0 ) , <nl> + critical_section_ ( new CRITICAL_SECTION ) { <nl> + : : InitializeCriticalSection ( critical_section_ ) ; <nl> + } <nl> + <nl> + Mutex : : ~ Mutex ( ) { <nl> + / / Static mutexes are leaked intentionally . It is not thread - safe to try <nl> + / / to clean them up . <nl> + / / TODO ( yukawa ) : Switch to Slim Reader / Writer ( SRW ) Locks , which requires <nl> + / / nothing to clean it up but is available only on Vista and later . <nl> + / / http : / / msdn . microsoft . com / en - us / library / windows / desktop / aa904937 . aspx <nl> + if ( type_ = = kDynamic ) { <nl> + : : DeleteCriticalSection ( critical_section_ ) ; <nl> + delete critical_section_ ; <nl> + critical_section_ = NULL ; <nl> + } <nl> + } <nl> + <nl> + void Mutex : : Lock ( ) { <nl> + ThreadSafeLazyInit ( ) ; <nl> + : : EnterCriticalSection ( critical_section_ ) ; <nl> + owner_thread_id_ = : : GetCurrentThreadId ( ) ; <nl> + } <nl> + <nl> + void Mutex : : Unlock ( ) { <nl> + ThreadSafeLazyInit ( ) ; <nl> + / / We don ' t protect writing to owner_thread_id_ here , as it ' s the <nl> + / / caller ' s responsibility to ensure that the current thread holds the <nl> + / / mutex when this is called . <nl> + owner_thread_id_ = 0 ; <nl> + : : LeaveCriticalSection ( critical_section_ ) ; <nl> + } <nl> + <nl> + / / Does nothing if the current thread holds the mutex . Otherwise , crashes <nl> + / / with high probability . <nl> + void Mutex : : AssertHeld ( ) { <nl> + ThreadSafeLazyInit ( ) ; <nl> + GTEST_CHECK_ ( owner_thread_id_ = = : : GetCurrentThreadId ( ) ) <nl> + < < " The current thread is not holding the mutex @ " < < this ; <nl> + } <nl> + <nl> + / / Initializes owner_thread_id_ and critical_section_ in static mutexes . <nl> + void Mutex : : ThreadSafeLazyInit ( ) { <nl> + / / Dynamic mutexes are initialized in the constructor . <nl> + if ( type_ = = kStatic ) { <nl> + switch ( <nl> + : : InterlockedCompareExchange ( & critical_section_init_phase_ , 1L , 0L ) ) { <nl> + case 0 : <nl> + / / If critical_section_init_phase_ was 0 before the exchange , we <nl> + / / are the first to test it and need to perform the initialization . <nl> + owner_thread_id_ = 0 ; <nl> + critical_section_ = new CRITICAL_SECTION ; <nl> + : : InitializeCriticalSection ( critical_section_ ) ; <nl> + / / Updates the critical_section_init_phase_ to 2 to signal <nl> + / / initialization complete . <nl> + GTEST_CHECK_ ( : : InterlockedCompareExchange ( <nl> + & critical_section_init_phase_ , 2L , 1L ) = = <nl> + 1L ) ; <nl> + break ; <nl> + case 1 : <nl> + / / Somebody else is already initializing the mutex ; spin until they <nl> + / / are done . <nl> + while ( : : InterlockedCompareExchange ( & critical_section_init_phase_ , <nl> + 2L , <nl> + 2L ) ! = 2L ) { <nl> + / / Possibly yields the rest of the thread ' s time slice to other <nl> + / / threads . <nl> + : : Sleep ( 0 ) ; <nl> + } <nl> + break ; <nl> + <nl> + case 2 : <nl> + break ; / / The mutex is already initialized and ready for use . <nl> + <nl> + default : <nl> + GTEST_CHECK_ ( false ) <nl> + < < " Unexpected value of critical_section_init_phase_ " <nl> + < < " while initializing a static mutex . " ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + namespace { <nl> + <nl> + class ThreadWithParamSupport : public ThreadWithParamBase { <nl> + public : <nl> + static HANDLE CreateThread ( Runnable * runnable , <nl> + Notification * thread_can_start ) { <nl> + ThreadMainParam * param = new ThreadMainParam ( runnable , thread_can_start ) ; <nl> + DWORD thread_id ; <nl> + / / TODO ( yukawa ) : Consider to use _beginthreadex instead . <nl> + HANDLE thread_handle = : : CreateThread ( <nl> + NULL , / / Default security . <nl> + 0 , / / Default stack size . <nl> + & ThreadWithParamSupport : : ThreadMain , <nl> + param , / / Parameter to ThreadMainStatic <nl> + 0x0 , / / Default creation flags . <nl> + & thread_id ) ; / / Need a valid pointer for the call to work under Win98 . <nl> + GTEST_CHECK_ ( thread_handle ! = NULL ) < < " CreateThread failed with error " <nl> + < < : : GetLastError ( ) < < " . " ; <nl> + if ( thread_handle = = NULL ) { <nl> + delete param ; <nl> + } <nl> + return thread_handle ; <nl> + } <nl> + <nl> + private : <nl> + struct ThreadMainParam { <nl> + ThreadMainParam ( Runnable * runnable , Notification * thread_can_start ) <nl> + : runnable_ ( runnable ) , <nl> + thread_can_start_ ( thread_can_start ) { <nl> + } <nl> + scoped_ptr < Runnable > runnable_ ; <nl> + / / Does not own . <nl> + Notification * thread_can_start_ ; <nl> + } ; <nl> + <nl> + static DWORD WINAPI ThreadMain ( void * ptr ) { <nl> + / / Transfers ownership . <nl> + scoped_ptr < ThreadMainParam > param ( static_cast < ThreadMainParam * > ( ptr ) ) ; <nl> + if ( param - > thread_can_start_ ! = NULL ) <nl> + param - > thread_can_start_ - > WaitForNotification ( ) ; <nl> + param - > runnable_ - > Run ( ) ; <nl> + return 0 ; <nl> + } <nl> + <nl> + / / Prohibit instantiation . <nl> + ThreadWithParamSupport ( ) ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( ThreadWithParamSupport ) ; <nl> + } ; <nl> + <nl> + } / / namespace <nl> + <nl> + ThreadWithParamBase : : ThreadWithParamBase ( Runnable * runnable , <nl> + Notification * thread_can_start ) <nl> + : thread_ ( ThreadWithParamSupport : : CreateThread ( runnable , <nl> + thread_can_start ) ) { <nl> + } <nl> + <nl> + ThreadWithParamBase : : ~ ThreadWithParamBase ( ) { <nl> + Join ( ) ; <nl> + } <nl> + <nl> + void ThreadWithParamBase : : Join ( ) { <nl> + GTEST_CHECK_ ( : : WaitForSingleObject ( thread_ . Get ( ) , INFINITE ) = = WAIT_OBJECT_0 ) <nl> + < < " Failed to join the thread with error " < < : : GetLastError ( ) < < " . " ; <nl> + } <nl> + <nl> + / / Maps a thread to a set of ThreadIdToThreadLocals that have values <nl> + / / instantiated on that thread and notifies them when the thread exits . A <nl> + / / ThreadLocal instance is expected to persist until all threads it has <nl> + / / values on have terminated . <nl> + class ThreadLocalRegistryImpl { <nl> + public : <nl> + / / Registers thread_local_instance as having value on the current thread . <nl> + / / Returns a value that can be used to identify the thread from other threads . <nl> + static ThreadLocalValueHolderBase * GetValueOnCurrentThread ( <nl> + const ThreadLocalBase * thread_local_instance ) { <nl> + DWORD current_thread = : : GetCurrentThreadId ( ) ; <nl> + MutexLock lock ( & mutex_ ) ; <nl> + ThreadIdToThreadLocals * const thread_to_thread_locals = <nl> + GetThreadLocalsMapLocked ( ) ; <nl> + ThreadIdToThreadLocals : : iterator thread_local_pos = <nl> + thread_to_thread_locals - > find ( current_thread ) ; <nl> + if ( thread_local_pos = = thread_to_thread_locals - > end ( ) ) { <nl> + thread_local_pos = thread_to_thread_locals - > insert ( <nl> + std : : make_pair ( current_thread , ThreadLocalValues ( ) ) ) . first ; <nl> + StartWatcherThreadFor ( current_thread ) ; <nl> + } <nl> + ThreadLocalValues & thread_local_values = thread_local_pos - > second ; <nl> + ThreadLocalValues : : iterator value_pos = <nl> + thread_local_values . find ( thread_local_instance ) ; <nl> + if ( value_pos = = thread_local_values . end ( ) ) { <nl> + value_pos = <nl> + thread_local_values <nl> + . insert ( std : : make_pair ( <nl> + thread_local_instance , <nl> + linked_ptr < ThreadLocalValueHolderBase > ( <nl> + thread_local_instance - > NewValueForCurrentThread ( ) ) ) ) <nl> + . first ; <nl> + } <nl> + return value_pos - > second . get ( ) ; <nl> + } <nl> + <nl> + static void OnThreadLocalDestroyed ( <nl> + const ThreadLocalBase * thread_local_instance ) { <nl> + std : : vector < linked_ptr < ThreadLocalValueHolderBase > > value_holders ; <nl> + / / Clean up the ThreadLocalValues data structure while holding the lock , but <nl> + / / defer the destruction of the ThreadLocalValueHolderBases . <nl> + { <nl> + MutexLock lock ( & mutex_ ) ; <nl> + ThreadIdToThreadLocals * const thread_to_thread_locals = <nl> + GetThreadLocalsMapLocked ( ) ; <nl> + for ( ThreadIdToThreadLocals : : iterator it = <nl> + thread_to_thread_locals - > begin ( ) ; <nl> + it ! = thread_to_thread_locals - > end ( ) ; <nl> + + + it ) { <nl> + ThreadLocalValues & thread_local_values = it - > second ; <nl> + ThreadLocalValues : : iterator value_pos = <nl> + thread_local_values . find ( thread_local_instance ) ; <nl> + if ( value_pos ! = thread_local_values . end ( ) ) { <nl> + value_holders . push_back ( value_pos - > second ) ; <nl> + thread_local_values . erase ( value_pos ) ; <nl> + / / This ' if ' can only be successful at most once , so theoretically we <nl> + / / could break out of the loop here , but we don ' t bother doing so . <nl> + } <nl> + } <nl> + } <nl> + / / Outside the lock , let the destructor for ' value_holders ' deallocate the <nl> + / / ThreadLocalValueHolderBases . <nl> + } <nl> + <nl> + static void OnThreadExit ( DWORD thread_id ) { <nl> + GTEST_CHECK_ ( thread_id ! = 0 ) < < : : GetLastError ( ) ; <nl> + std : : vector < linked_ptr < ThreadLocalValueHolderBase > > value_holders ; <nl> + / / Clean up the ThreadIdToThreadLocals data structure while holding the <nl> + / / lock , but defer the destruction of the ThreadLocalValueHolderBases . <nl> + { <nl> + MutexLock lock ( & mutex_ ) ; <nl> + ThreadIdToThreadLocals * const thread_to_thread_locals = <nl> + GetThreadLocalsMapLocked ( ) ; <nl> + ThreadIdToThreadLocals : : iterator thread_local_pos = <nl> + thread_to_thread_locals - > find ( thread_id ) ; <nl> + if ( thread_local_pos ! = thread_to_thread_locals - > end ( ) ) { <nl> + ThreadLocalValues & thread_local_values = thread_local_pos - > second ; <nl> + for ( ThreadLocalValues : : iterator value_pos = <nl> + thread_local_values . begin ( ) ; <nl> + value_pos ! = thread_local_values . end ( ) ; <nl> + + + value_pos ) { <nl> + value_holders . push_back ( value_pos - > second ) ; <nl> + } <nl> + thread_to_thread_locals - > erase ( thread_local_pos ) ; <nl> + } <nl> + } <nl> + / / Outside the lock , let the destructor for ' value_holders ' deallocate the <nl> + / / ThreadLocalValueHolderBases . <nl> + } <nl> + <nl> + private : <nl> + / / In a particular thread , maps a ThreadLocal object to its value . <nl> + typedef std : : map < const ThreadLocalBase * , <nl> + linked_ptr < ThreadLocalValueHolderBase > > ThreadLocalValues ; <nl> + / / Stores all ThreadIdToThreadLocals having values in a thread , indexed by <nl> + / / thread ' s ID . <nl> + typedef std : : map < DWORD , ThreadLocalValues > ThreadIdToThreadLocals ; <nl> + <nl> + / / Holds the thread id and thread handle that we pass from <nl> + / / StartWatcherThreadFor to WatcherThreadFunc . <nl> + typedef std : : pair < DWORD , HANDLE > ThreadIdAndHandle ; <nl> + <nl> + static void StartWatcherThreadFor ( DWORD thread_id ) { <nl> + / / The returned handle will be kept in thread_map and closed by <nl> + / / watcher_thread in WatcherThreadFunc . <nl> + HANDLE thread = : : OpenThread ( SYNCHRONIZE | THREAD_QUERY_INFORMATION , <nl> + FALSE , <nl> + thread_id ) ; <nl> + GTEST_CHECK_ ( thread ! = NULL ) ; <nl> + / / We need to to pass a valid thread ID pointer into CreateThread for it <nl> + / / to work correctly under Win98 . <nl> + DWORD watcher_thread_id ; <nl> + HANDLE watcher_thread = : : CreateThread ( <nl> + NULL , / / Default security . <nl> + 0 , / / Default stack size <nl> + & ThreadLocalRegistryImpl : : WatcherThreadFunc , <nl> + reinterpret_cast < LPVOID > ( new ThreadIdAndHandle ( thread_id , thread ) ) , <nl> + CREATE_SUSPENDED , <nl> + & watcher_thread_id ) ; <nl> + GTEST_CHECK_ ( watcher_thread ! = NULL ) ; <nl> + / / Give the watcher thread the same priority as ours to avoid being <nl> + / / blocked by it . <nl> + : : SetThreadPriority ( watcher_thread , <nl> + : : GetThreadPriority ( : : GetCurrentThread ( ) ) ) ; <nl> + : : ResumeThread ( watcher_thread ) ; <nl> + : : CloseHandle ( watcher_thread ) ; <nl> + } <nl> + <nl> + / / Monitors exit from a given thread and notifies those <nl> + / / ThreadIdToThreadLocals about thread termination . <nl> + static DWORD WINAPI WatcherThreadFunc ( LPVOID param ) { <nl> + const ThreadIdAndHandle * tah = <nl> + reinterpret_cast < const ThreadIdAndHandle * > ( param ) ; <nl> + GTEST_CHECK_ ( <nl> + : : WaitForSingleObject ( tah - > second , INFINITE ) = = WAIT_OBJECT_0 ) ; <nl> + OnThreadExit ( tah - > first ) ; <nl> + : : CloseHandle ( tah - > second ) ; <nl> + delete tah ; <nl> + return 0 ; <nl> + } <nl> + <nl> + / / Returns map of thread local instances . <nl> + static ThreadIdToThreadLocals * GetThreadLocalsMapLocked ( ) { <nl> + mutex_ . AssertHeld ( ) ; <nl> + static ThreadIdToThreadLocals * map = new ThreadIdToThreadLocals ; <nl> + return map ; <nl> + } <nl> + <nl> + / / Protects access to GetThreadLocalsMapLocked ( ) and its return value . <nl> + static Mutex mutex_ ; <nl> + / / Protects access to GetThreadMapLocked ( ) and its return value . <nl> + static Mutex thread_map_mutex_ ; <nl> + } ; <nl> + <nl> + Mutex ThreadLocalRegistryImpl : : mutex_ ( Mutex : : kStaticMutex ) ; <nl> + Mutex ThreadLocalRegistryImpl : : thread_map_mutex_ ( Mutex : : kStaticMutex ) ; <nl> + <nl> + ThreadLocalValueHolderBase * ThreadLocalRegistry : : GetValueOnCurrentThread ( <nl> + const ThreadLocalBase * thread_local_instance ) { <nl> + return ThreadLocalRegistryImpl : : GetValueOnCurrentThread ( <nl> + thread_local_instance ) ; <nl> + } <nl> + <nl> + void ThreadLocalRegistry : : OnThreadLocalDestroyed ( <nl> + const ThreadLocalBase * thread_local_instance ) { <nl> + ThreadLocalRegistryImpl : : OnThreadLocalDestroyed ( thread_local_instance ) ; <nl> + } <nl> + <nl> + # endif / / GTEST_IS_THREADSAFE & & GTEST_OS_WINDOWS <nl> + <nl> # if GTEST_USES_POSIX_RE <nl> <nl> / / Implements RE . Currently only needed for death tests . <nl> mmm a / src / gtest . cc <nl> ppp b / src / gtest . cc <nl> std : : string FormatTimeInMillisAsSeconds ( TimeInMillis ms ) { <nl> / / Converts the given epoch time in milliseconds to a date string in the ISO <nl> / / 8601 format , without the timezone information . <nl> std : : string FormatEpochTimeInMillisAsIso8601 ( TimeInMillis ms ) { <nl> - / / Using non - reentrant version as localtime_r is not portable . <nl> time_t seconds = static_cast < time_t > ( ms / 1000 ) ; <nl> + struct tm time_struct ; <nl> # ifdef _MSC_VER <nl> - # pragma warning ( push ) / / Saves the current warning state . <nl> - # pragma warning ( disable : 4996 ) / / Temporarily disables warning 4996 <nl> - / / ( function or variable may be unsafe ) . <nl> - const struct tm * const time_struct = localtime ( & seconds ) ; / / NOLINT <nl> - # pragma warning ( pop ) / / Restores the warning state again . <nl> + if ( localtime_s ( & time_struct , & seconds ) ! = 0 ) <nl> + return " " ; / / Invalid ms value <nl> # else <nl> - const struct tm * const time_struct = localtime ( & seconds ) ; / / NOLINT <nl> - # endif <nl> - if ( time_struct = = NULL ) <nl> + if ( localtime_r ( & seconds , & time_struct ) = = NULL ) <nl> return " " ; / / Invalid ms value <nl> + # endif <nl> <nl> / / YYYY - MM - DDThh : mm : ss <nl> - return StreamableToString ( time_struct - > tm_year + 1900 ) + " - " + <nl> - String : : FormatIntWidth2 ( time_struct - > tm_mon + 1 ) + " - " + <nl> - String : : FormatIntWidth2 ( time_struct - > tm_mday ) + " T " + <nl> - String : : FormatIntWidth2 ( time_struct - > tm_hour ) + " : " + <nl> - String : : FormatIntWidth2 ( time_struct - > tm_min ) + " : " + <nl> - String : : FormatIntWidth2 ( time_struct - > tm_sec ) ; <nl> + return StreamableToString ( time_struct . tm_year + 1900 ) + " - " + <nl> + String : : FormatIntWidth2 ( time_struct . tm_mon + 1 ) + " - " + <nl> + String : : FormatIntWidth2 ( time_struct . tm_mday ) + " T " + <nl> + String : : FormatIntWidth2 ( time_struct . tm_hour ) + " : " + <nl> + String : : FormatIntWidth2 ( time_struct . tm_min ) + " : " + <nl> + String : : FormatIntWidth2 ( time_struct . tm_sec ) ; <nl> } <nl> <nl> / / Streams an XML CDATA section , escaping invalid CDATA sequences as needed . <nl> mmm a / test / gtest - death - test_test . cc <nl> ppp b / test / gtest - death - test_test . cc <nl> TEST_F ( TestForDeathTest , ExpectDebugDeathDoesNotAbort ) { <nl> <nl> void AssertDebugDeathHelper ( bool * aborted ) { <nl> * aborted = true ; <nl> - ASSERT_DEBUG_DEATH ( return , " " ) < < " This is expected to fail . " ; <nl> + GTEST_LOG_ ( INFO ) < < " Before ASSERT_DEBUG_DEATH " ; <nl> + ASSERT_DEBUG_DEATH ( GTEST_LOG_ ( INFO ) < < " In ASSERT_DEBUG_DEATH " ; return , " " ) <nl> + < < " This is expected to fail . " ; <nl> + GTEST_LOG_ ( INFO ) < < " After ASSERT_DEBUG_DEATH " ; <nl> * aborted = false ; <nl> } <nl> <nl> TEST_F ( TestForDeathTest , AssertDebugDeathAborts ) { <nl> EXPECT_TRUE ( aborted ) ; <nl> } <nl> <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts2 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts3 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts4 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts5 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts6 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts7 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts8 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts9 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> + TEST_F ( TestForDeathTest , AssertDebugDeathAborts10 ) { <nl> + static bool aborted ; <nl> + aborted = false ; <nl> + EXPECT_FATAL_FAILURE ( AssertDebugDeathHelper ( & aborted ) , " " ) ; <nl> + EXPECT_TRUE ( aborted ) ; <nl> + } <nl> + <nl> # endif / / _NDEBUG <nl> <nl> / / Tests the * _EXIT family of macros , using a variety of predicates . <nl> mmm a / test / gtest - port_test . cc <nl> ppp b / test / gtest - port_test . cc <nl> class AtomicCounterWithMutex { <nl> MutexLock lock ( mutex_ ) ; <nl> int temp = value_ ; <nl> { <nl> - / / Locking a mutex puts up a memory barrier , preventing reads and <nl> - / / writes to value_ rearranged when observed from other threads . <nl> - / / <nl> - / / We cannot use Mutex and MutexLock here or rely on their memory <nl> - / / barrier functionality as we are testing them here . <nl> + / / We need to put up a memory barrier to prevent reads and writes to <nl> + / / value_ rearranged with the call to SleepMilliseconds when observed <nl> + / / from other threads . <nl> + # if GTEST_HAS_PTHREAD <nl> + / / On POSIX , locking a mutex puts up a memory barrier . We cannot use <nl> + / / Mutex and MutexLock here or rely on their memory barrier <nl> + / / functionality as we are testing them here . <nl> pthread_mutex_t memory_barrier_mutex ; <nl> GTEST_CHECK_POSIX_SUCCESS_ ( <nl> pthread_mutex_init ( & memory_barrier_mutex , NULL ) ) ; <nl> class AtomicCounterWithMutex { <nl> <nl> GTEST_CHECK_POSIX_SUCCESS_ ( pthread_mutex_unlock ( & memory_barrier_mutex ) ) ; <nl> GTEST_CHECK_POSIX_SUCCESS_ ( pthread_mutex_destroy ( & memory_barrier_mutex ) ) ; <nl> + # elif GTEST_OS_WINDOWS <nl> + / / On Windows , performing an interlocked access puts up a memory barrier . <nl> + volatile LONG dummy = 0 ; <nl> + : : InterlockedIncrement ( & dummy ) ; <nl> + SleepMilliseconds ( random_ . Generate ( 30 ) ) ; <nl> + : : InterlockedIncrement ( & dummy ) ; <nl> + # else <nl> + # error " Memory barrier not implemented on this platform . " <nl> + # endif / / GTEST_HAS_PTHREAD <nl> } <nl> value_ = temp + 1 ; <nl> } <nl> TEST ( ThreadLocalTest , ParameterizedConstructorSetsDefault ) { <nl> EXPECT_STREQ ( " foo " , result . c_str ( ) ) ; <nl> } <nl> <nl> + / / Keeps track of whether of destructors being called on instances of <nl> + / / DestructorTracker . On Windows , waits for the destructor call reports . <nl> + class DestructorCall { <nl> + public : <nl> + DestructorCall ( ) { <nl> + invoked_ = false ; <nl> + # if GTEST_OS_WINDOWS <nl> + wait_event_ . Reset ( : : CreateEvent ( NULL , TRUE , FALSE , NULL ) ) ; <nl> + GTEST_CHECK_ ( wait_event_ . Get ( ) ! = NULL ) ; <nl> + # endif <nl> + } <nl> + <nl> + bool CheckDestroyed ( ) const { <nl> + # if GTEST_OS_WINDOWS <nl> + if ( : : WaitForSingleObject ( wait_event_ . Get ( ) , 1000 ) ! = WAIT_OBJECT_0 ) <nl> + return false ; <nl> + # endif <nl> + return invoked_ ; <nl> + } <nl> + <nl> + void ReportDestroyed ( ) { <nl> + invoked_ = true ; <nl> + # if GTEST_OS_WINDOWS <nl> + : : SetEvent ( wait_event_ . Get ( ) ) ; <nl> + # endif <nl> + } <nl> + <nl> + static std : : vector < DestructorCall * > & List ( ) { return * list_ ; } <nl> + <nl> + static void ResetList ( ) { <nl> + for ( size_t i = 0 ; i < list_ - > size ( ) ; + + i ) { <nl> + delete list_ - > at ( i ) ; <nl> + } <nl> + list_ - > clear ( ) ; <nl> + } <nl> + <nl> + private : <nl> + bool invoked_ ; <nl> + # if GTEST_OS_WINDOWS <nl> + AutoHandle wait_event_ ; <nl> + # endif <nl> + static std : : vector < DestructorCall * > * const list_ ; <nl> + <nl> + GTEST_DISALLOW_COPY_AND_ASSIGN_ ( DestructorCall ) ; <nl> + } ; <nl> + <nl> + std : : vector < DestructorCall * > * const DestructorCall : : list_ = <nl> + new std : : vector < DestructorCall * > ; <nl> + <nl> / / DestructorTracker keeps track of whether its instances have been <nl> / / destroyed . <nl> - static std : : vector < bool > g_destroyed ; <nl> - <nl> class DestructorTracker { <nl> public : <nl> DestructorTracker ( ) : index_ ( GetNewIndex ( ) ) { } <nl> DestructorTracker ( const DestructorTracker & / * rhs * / ) <nl> : index_ ( GetNewIndex ( ) ) { } <nl> ~ DestructorTracker ( ) { <nl> - / / We never access g_destroyed concurrently , so we don ' t need to <nl> - / / protect the write operation under a mutex . <nl> - g_destroyed [ index_ ] = true ; <nl> + / / We never access DestructorCall : : List ( ) concurrently , so we don ' t need <nl> + / / to protect this acccess with a mutex . <nl> + DestructorCall : : List ( ) [ index_ ] - > ReportDestroyed ( ) ; <nl> } <nl> <nl> private : <nl> static int GetNewIndex ( ) { <nl> - g_destroyed . push_back ( false ) ; <nl> - return g_destroyed . size ( ) - 1 ; <nl> + DestructorCall : : List ( ) . push_back ( new DestructorCall ) ; <nl> + return DestructorCall : : List ( ) . size ( ) - 1 ; <nl> } <nl> const int index_ ; <nl> + <nl> + GTEST_DISALLOW_ASSIGN_ ( DestructorTracker ) ; <nl> } ; <nl> <nl> typedef ThreadLocal < DestructorTracker > * ThreadParam ; <nl> void CallThreadLocalGet ( ThreadParam thread_local_param ) { <nl> / / Tests that when a ThreadLocal object dies in a thread , it destroys <nl> / / the managed object for that thread . <nl> TEST ( ThreadLocalTest , DestroysManagedObjectForOwnThreadWhenDying ) { <nl> - g_destroyed . clear ( ) ; <nl> + DestructorCall : : ResetList ( ) ; <nl> <nl> { <nl> / / The next line default constructs a DestructorTracker object as <nl> / / the default value of objects managed by thread_local_tracker . <nl> ThreadLocal < DestructorTracker > thread_local_tracker ; <nl> - ASSERT_EQ ( 1U , g_destroyed . size ( ) ) ; <nl> - ASSERT_FALSE ( g_destroyed [ 0 ] ) ; <nl> + ASSERT_EQ ( 1U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + ASSERT_FALSE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> <nl> / / This creates another DestructorTracker object for the main thread . <nl> thread_local_tracker . get ( ) ; <nl> - ASSERT_EQ ( 2U , g_destroyed . size ( ) ) ; <nl> - ASSERT_FALSE ( g_destroyed [ 0 ] ) ; <nl> - ASSERT_FALSE ( g_destroyed [ 1 ] ) ; <nl> + ASSERT_EQ ( 2U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + ASSERT_FALSE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> + ASSERT_FALSE ( DestructorCall : : List ( ) [ 1 ] - > CheckDestroyed ( ) ) ; <nl> } <nl> <nl> / / Now thread_local_tracker has died . It should have destroyed both the <nl> / / default value shared by all threads and the value for the main <nl> / / thread . <nl> - ASSERT_EQ ( 2U , g_destroyed . size ( ) ) ; <nl> - EXPECT_TRUE ( g_destroyed [ 0 ] ) ; <nl> - EXPECT_TRUE ( g_destroyed [ 1 ] ) ; <nl> + ASSERT_EQ ( 2U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + EXPECT_TRUE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> + EXPECT_TRUE ( DestructorCall : : List ( ) [ 1 ] - > CheckDestroyed ( ) ) ; <nl> <nl> - g_destroyed . clear ( ) ; <nl> + DestructorCall : : ResetList ( ) ; <nl> } <nl> <nl> / / Tests that when a thread exits , the thread - local object for that <nl> / / thread is destroyed . <nl> TEST ( ThreadLocalTest , DestroysManagedObjectAtThreadExit ) { <nl> - g_destroyed . clear ( ) ; <nl> + DestructorCall : : ResetList ( ) ; <nl> <nl> { <nl> / / The next line default constructs a DestructorTracker object as <nl> / / the default value of objects managed by thread_local_tracker . <nl> ThreadLocal < DestructorTracker > thread_local_tracker ; <nl> - ASSERT_EQ ( 1U , g_destroyed . size ( ) ) ; <nl> - ASSERT_FALSE ( g_destroyed [ 0 ] ) ; <nl> + ASSERT_EQ ( 1U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + ASSERT_FALSE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> <nl> / / This creates another DestructorTracker object in the new thread . <nl> ThreadWithParam < ThreadParam > thread ( <nl> & CallThreadLocalGet , & thread_local_tracker , NULL ) ; <nl> thread . Join ( ) ; <nl> <nl> - / / Now the new thread has exited . The per - thread object for it <nl> - / / should have been destroyed . <nl> - ASSERT_EQ ( 2U , g_destroyed . size ( ) ) ; <nl> - ASSERT_FALSE ( g_destroyed [ 0 ] ) ; <nl> - ASSERT_TRUE ( g_destroyed [ 1 ] ) ; <nl> + / / The thread has exited , and we should have another DestroyedTracker <nl> + / / instance created for it . But it may not have been destroyed yet . <nl> + / / The instance for the main thread should still persist . <nl> + ASSERT_EQ ( 2U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + ASSERT_FALSE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> } <nl> <nl> - / / Now thread_local_tracker has died . The default value should have been <nl> - / / destroyed too . <nl> - ASSERT_EQ ( 2U , g_destroyed . size ( ) ) ; <nl> - EXPECT_TRUE ( g_destroyed [ 0 ] ) ; <nl> - EXPECT_TRUE ( g_destroyed [ 1 ] ) ; <nl> + / / The thread has exited and thread_local_tracker has died . The default <nl> + / / value should have been destroyed too . <nl> + ASSERT_EQ ( 2U , DestructorCall : : List ( ) . size ( ) ) ; <nl> + EXPECT_TRUE ( DestructorCall : : List ( ) [ 0 ] - > CheckDestroyed ( ) ) ; <nl> + EXPECT_TRUE ( DestructorCall : : List ( ) [ 1 ] - > CheckDestroyed ( ) ) ; <nl> <nl> - g_destroyed . clear ( ) ; <nl> + DestructorCall : : ResetList ( ) ; <nl> } <nl> <nl> TEST ( ThreadLocalTest , ThreadLocalMutationsAffectOnlyCurrentThread ) { <nl> TEST ( ThreadLocalTest , ThreadLocalMutationsAffectOnlyCurrentThread ) { <nl> <nl> # endif / / GTEST_IS_THREADSAFE <nl> <nl> + # if GTEST_OS_WINDOWS <nl> + TEST ( WindowsTypesTest , HANDLEIsVoidStar ) { <nl> + StaticAssertTypeEq < HANDLE , void * > ( ) ; <nl> + } <nl> + <nl> + TEST ( WindowsTypesTest , CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION ) { <nl> + StaticAssertTypeEq < CRITICAL_SECTION , _RTL_CRITICAL_SECTION > ( ) ; <nl> + } <nl> + # endif / / GTEST_OS_WINDOWS <nl> + <nl> } / / namespace internal <nl> } / / namespace testing <nl> mmm a / test / gtest_output_test . py <nl> ppp b / test / gtest_output_test . py <nl> def GetOutputOfAllCommands ( ) : <nl> <nl> CAN_GENERATE_GOLDEN_FILE = ( SUPPORTS_DEATH_TESTS and <nl> SUPPORTS_TYPED_TESTS and <nl> - SUPPORTS_THREADS ) <nl> - <nl> + SUPPORTS_THREADS and <nl> + not IS_WINDOWS ) <nl> <nl> class GTestOutputTest ( gtest_test_utils . TestCase ) : <nl> def RemoveUnsupportedTests ( self , test_output ) : <nl> | Implement threading support for gtest on Windows . | google/googletest | a6340420b9cee27f77c5b91bea807121914a5831 | 2014-03-24T21:58:25Z |
mmm a / src / bittorrent . cpp <nl> ppp b / src / bittorrent . cpp <nl> bool bittorrent : : pauseTorrent ( QString hash ) { <nl> change = true ; <nl> qDebug ( " Torrent paused successfully " ) ; <nl> } else { <nl> - qDebug ( " Could not pause torrent , invalid or already paused . " ) ; <nl> + if ( ! h . is_valid ( ) ) { <nl> + qDebug ( " Could not pause torrent % s , reason : invalid " , ( const char * ) hash . toUtf8 ( ) ) ; <nl> + } else { <nl> + qDebug ( " Could not pause torrent % s , reason : already paused " , ( const char * ) hash . toUtf8 ( ) ) ; <nl> + } <nl> } <nl> / / Create . paused file if necessary <nl> if ( ! QFile : : exists ( misc : : qBittorrentPath ( ) + " BT_backup " + QDir : : separator ( ) + hash + " . paused " ) ) { <nl> mmm a / src / deleteThread . h <nl> ppp b / src / deleteThread . h <nl> class deleteThread : public QThread { <nl> mutex . unlock ( ) ; <nl> subDeleteThread * st = new subDeleteThread ( 0 , path ) ; <nl> subThreads < < st ; <nl> - connect ( st , SIGNAL ( deletionSuccessST ( subDownloadThread * , QString , QString ) ) , this , SLOT ( propagateDeletionSuccess ( subDeleteThread * , QString ) ) ) ; <nl> - connect ( st , SIGNAL ( deletionFailureST ( subDownloadThread * , QString , QString ) ) , this , SLOT ( propagateDeletionFailure ( subDeleteThread * , QString ) ) ) ; <nl> + connect ( st , SIGNAL ( deletionSuccessST ( subDownloadThread * , QString ) ) , this , SLOT ( propagateDeletionSuccess ( subDeleteThread * , QString ) ) ) ; <nl> + connect ( st , SIGNAL ( deletionFailureST ( subDownloadThread * , QString ) ) , this , SLOT ( propagateDeletionFailure ( subDeleteThread * , QString ) ) ) ; <nl> st - > start ( ) ; <nl> } else { <nl> condition . wait ( & mutex ) ; <nl> | - Fixed some slots connects in new deleteThread | qbittorrent/qBittorrent | 58dc75fbcfc7812c8c43a0887e8af19f04b48024 | 2007-07-31T14:28:17Z |
mmm a / editor / editor_file_system . cpp <nl> ppp b / editor / editor_file_system . cpp <nl> bool EditorFileSystem : : _update_scan_actions ( ) { <nl> bool fs_changed = false ; <nl> <nl> Vector < String > reimports ; <nl> + Vector < String > reloads ; <nl> <nl> for ( List < ItemAction > : : Element * E = scan_actions . front ( ) ; E ; E = E - > next ( ) ) { <nl> <nl> bool EditorFileSystem : : _update_scan_actions ( ) { <nl> <nl> fs_changed = true ; <nl> } break ; <nl> + case ItemAction : : ACTION_FILE_RELOAD : { <nl> + <nl> + int idx = ia . dir - > find_file_index ( ia . file ) ; <nl> + ERR_CONTINUE ( idx = = - 1 ) ; <nl> + String full_path = ia . dir - > get_file_path ( idx ) ; <nl> + <nl> + reloads . push_back ( full_path ) ; <nl> + <nl> + } break ; <nl> } <nl> } <nl> <nl> if ( reimports . size ( ) ) { <nl> reimport_files ( reimports ) ; <nl> } <nl> + <nl> + if ( reloads . size ( ) ) { <nl> + emit_signal ( " resources_reload " , reloads ) ; <nl> + } <nl> scan_actions . clear ( ) ; <nl> <nl> return fs_changed ; <nl> void EditorFileSystem : : _scan_fs_changes ( EditorFileSystemDirectory * p_dir , const <nl> continue ; <nl> } <nl> <nl> + String path = cd . plus_file ( p_dir - > files [ i ] - > file ) ; <nl> + <nl> if ( import_extensions . has ( p_dir - > files [ i ] - > file . get_extension ( ) . to_lower ( ) ) ) { <nl> / / check here if file must be imported or not <nl> <nl> - String path = cd . plus_file ( p_dir - > files [ i ] - > file ) ; <nl> - <nl> uint64_t mt = FileAccess : : get_modified_time ( path ) ; <nl> <nl> bool reimport = false ; <nl> void EditorFileSystem : : _scan_fs_changes ( EditorFileSystemDirectory * p_dir , const <nl> ia . file = p_dir - > files [ i ] - > file ; <nl> scan_actions . push_back ( ia ) ; <nl> } <nl> + } else if ( ResourceCache : : has ( path ) ) { / / test for potential reload <nl> + <nl> + uint64_t mt = FileAccess : : get_modified_time ( path ) ; <nl> + <nl> + if ( mt ! = p_dir - > files [ i ] - > modified_time ) { <nl> + <nl> + p_dir - > files [ i ] - > modified_time = mt ; / / save new time , but test for reload <nl> + <nl> + ItemAction ia ; <nl> + ia . action = ItemAction : : ACTION_FILE_RELOAD ; <nl> + ia . dir = p_dir ; <nl> + ia . file = p_dir - > files [ i ] - > file ; <nl> + scan_actions . push_back ( ia ) ; <nl> + } <nl> } <nl> } <nl> <nl> void EditorFileSystem : : _bind_methods ( ) { <nl> ADD_SIGNAL ( MethodInfo ( " filesystem_changed " ) ) ; <nl> ADD_SIGNAL ( MethodInfo ( " sources_changed " , PropertyInfo ( Variant : : BOOL , " exist " ) ) ) ; <nl> ADD_SIGNAL ( MethodInfo ( " resources_reimported " , PropertyInfo ( Variant : : POOL_STRING_ARRAY , " resources " ) ) ) ; <nl> + ADD_SIGNAL ( MethodInfo ( " resources_reload " , PropertyInfo ( Variant : : POOL_STRING_ARRAY , " resources " ) ) ) ; <nl> } <nl> <nl> void EditorFileSystem : : _update_extensions ( ) { <nl> mmm a / editor / editor_file_system . h <nl> ppp b / editor / editor_file_system . h <nl> class EditorFileSystem : public Node { <nl> ACTION_DIR_REMOVE , <nl> ACTION_FILE_ADD , <nl> ACTION_FILE_REMOVE , <nl> - ACTION_FILE_TEST_REIMPORT <nl> + ACTION_FILE_TEST_REIMPORT , <nl> + ACTION_FILE_RELOAD <nl> } ; <nl> <nl> Action action ; <nl> mmm a / editor / editor_node . cpp <nl> ppp b / editor / editor_node . cpp <nl> void EditorNode : : _on_plugin_ready ( Object * p_script , const String & p_activate_nam <nl> push_item ( script . operator - > ( ) ) ; <nl> } <nl> <nl> - void EditorNode : : _fs_changed ( ) { <nl> + void EditorNode : : _resources_changed ( const PoolVector < String > & p_resources ) { <nl> <nl> - for ( Set < FileDialog * > : : Element * E = file_dialogs . front ( ) ; E ; E = E - > next ( ) ) { <nl> + List < Ref < Resource > > changed ; <nl> <nl> - E - > get ( ) - > invalidate ( ) ; <nl> - } <nl> + int rc = p_resources . size ( ) ; <nl> + for ( int i = 0 ; i < rc ; i + + ) { <nl> <nl> - for ( Set < EditorFileDialog * > : : Element * E = editor_file_dialogs . front ( ) ; E ; E = E - > next ( ) ) { <nl> + Ref < Resource > res ( ResourceCache : : get ( p_resources . get ( i ) ) ) ; <nl> + if ( res . is_null ( ) ) { <nl> + continue ; <nl> + } <nl> <nl> - E - > get ( ) - > invalidate ( ) ; <nl> - } <nl> + if ( ! res - > editor_can_reload_from_file ( ) ) <nl> + continue ; <nl> + if ( ! res - > get_path ( ) . is_resource_file ( ) & & ! res - > get_path ( ) . is_abs_path ( ) ) <nl> + continue ; <nl> + if ( ! FileAccess : : exists ( res - > get_path ( ) ) ) <nl> + continue ; <nl> <nl> - { <nl> - / / reload changed resources <nl> - List < Ref < Resource > > changed ; <nl> + if ( res - > get_import_path ( ) ! = String ( ) ) { <nl> + / / this is an imported resource , will be reloaded if reimported via the _resources_reimported ( ) callback <nl> + continue ; <nl> + } <nl> <nl> - List < Ref < Resource > > cached ; <nl> - ResourceCache : : get_cached_resources ( & cached ) ; <nl> - / / FIXME : This should be done in a thread . <nl> - for ( List < Ref < Resource > > : : Element * E = cached . front ( ) ; E ; E = E - > next ( ) ) { <nl> + changed . push_back ( res ) ; <nl> + } <nl> <nl> - if ( ! E - > get ( ) - > editor_can_reload_from_file ( ) ) <nl> - continue ; <nl> - if ( ! E - > get ( ) - > get_path ( ) . is_resource_file ( ) & & ! E - > get ( ) - > get_path ( ) . is_abs_path ( ) ) <nl> - continue ; <nl> - if ( ! FileAccess : : exists ( E - > get ( ) - > get_path ( ) ) ) <nl> - continue ; <nl> + if ( changed . size ( ) ) { <nl> + for ( List < Ref < Resource > > : : Element * E = changed . front ( ) ; E ; E = E - > next ( ) ) { <nl> + E - > get ( ) - > reload_from_file ( ) ; <nl> + } <nl> + } <nl> + } <nl> <nl> - if ( E - > get ( ) - > get_import_path ( ) ! = String ( ) ) { <nl> - / / this is an imported resource , will be reloaded if reimported via the _resources_reimported ( ) callback <nl> - continue ; <nl> - } <nl> + void EditorNode : : _fs_changed ( ) { <nl> <nl> - uint64_t mt = FileAccess : : get_modified_time ( E - > get ( ) - > get_path ( ) ) ; <nl> + for ( Set < FileDialog * > : : Element * E = file_dialogs . front ( ) ; E ; E = E - > next ( ) ) { <nl> <nl> - if ( mt ! = E - > get ( ) - > get_last_modified_time ( ) ) { <nl> - changed . push_back ( E - > get ( ) ) ; <nl> - } <nl> - } <nl> + E - > get ( ) - > invalidate ( ) ; <nl> + } <nl> <nl> - if ( changed . size ( ) ) { <nl> - for ( List < Ref < Resource > > : : Element * E = changed . front ( ) ; E ; E = E - > next ( ) ) { <nl> - E - > get ( ) - > reload_from_file ( ) ; <nl> - } <nl> - } <nl> + for ( Set < EditorFileDialog * > : : Element * E = editor_file_dialogs . front ( ) ; E ; E = E - > next ( ) ) { <nl> + <nl> + E - > get ( ) - > invalidate ( ) ; <nl> } <nl> <nl> _mark_unsaved_scenes ( ) ; <nl> void EditorNode : : _bind_methods ( ) { <nl> <nl> ClassDB : : bind_method ( D_METHOD ( " _video_driver_selected " ) , & EditorNode : : _video_driver_selected ) ; <nl> <nl> + ClassDB : : bind_method ( D_METHOD ( " _resources_changed " ) , & EditorNode : : _resources_changed ) ; <nl> + <nl> ADD_SIGNAL ( MethodInfo ( " play_pressed " ) ) ; <nl> ADD_SIGNAL ( MethodInfo ( " pause_pressed " ) ) ; <nl> ADD_SIGNAL ( MethodInfo ( " stop_pressed " ) ) ; <nl> EditorNode : : EditorNode ( ) { <nl> EditorFileSystem : : get_singleton ( ) - > connect ( " sources_changed " , this , " _sources_changed " ) ; <nl> EditorFileSystem : : get_singleton ( ) - > connect ( " filesystem_changed " , this , " _fs_changed " ) ; <nl> EditorFileSystem : : get_singleton ( ) - > connect ( " resources_reimported " , this , " _resources_reimported " ) ; <nl> + EditorFileSystem : : get_singleton ( ) - > connect ( " resources_reload " , this , " _resources_changed " ) ; <nl> <nl> _build_icon_type_cache ( ) ; <nl> <nl> mmm a / editor / editor_node . h <nl> ppp b / editor / editor_node . h <nl> class EditorNode : public Node { <nl> static void _resource_saved ( RES p_resource , const String & p_path ) ; <nl> static void _resource_loaded ( RES p_resource , const String & p_path ) ; <nl> <nl> + void _resources_changed ( const PoolVector < String > & p_resources ) ; <nl> + <nl> protected : <nl> void _notification ( int p_what ) ; <nl> static void _bind_methods ( ) ; <nl> | Reworked how non - imported resources are reloaded on change , fixes | godotengine/godot | da0ec37aa9c6fd80becc9d8ffa8fd064445d8023 | 2018-11-21T00:48:48Z |
mmm a / src / EditTableDialog . cpp <nl> ppp b / src / EditTableDialog . cpp <nl> EditTableDialog : : EditTableDialog ( DBBrowserDB & db , const sqlb : : ObjectIdentifier & <nl> connect ( ui - > actionAddCheckConstraint , & QAction : : triggered , [ this ] ( ) { addConstraint ( sqlb : : Constraint : : CheckConstraintType ) ; } ) ; <nl> ui - > buttonAddConstraint - > setMenu ( constraint_menu ) ; <nl> <nl> + / / Get list of all collations <nl> + db . executeSQL ( " PRAGMA collation_list ; " , false , true , [ this ] ( int column_count , QStringList columns , QStringList ) - > bool { <nl> + if ( column_count > = 2 ) <nl> + m_collationList . push_back ( columns . at ( 1 ) ) ; <nl> + return false ; <nl> + } ) ; <nl> + if ( ! m_collationList . contains ( " " ) ) <nl> + m_collationList . push_back ( " " ) ; <nl> + m_collationList . sort ( ) ; <nl> + <nl> / / Editing an existing table ? <nl> if ( m_bNewTable = = false ) <nl> { <nl> void EditTableDialog : : populateFields ( ) <nl> } <nl> typeBox - > setCurrentIndex ( index ) ; <nl> typeBox - > installEventFilter ( this ) ; <nl> - connect ( typeBox , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypes ( ) ) ) ; <nl> + connect ( typeBox , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypeAndCollation ( ) ) ) ; <nl> ui - > treeWidget - > setItemWidget ( tbitem , kType , typeBox ) ; <nl> <nl> tbitem - > setCheckState ( kNotNull , f . notnull ( ) ? Qt : : Checked : Qt : : Unchecked ) ; <nl> void EditTableDialog : : populateFields ( ) <nl> <nl> tbitem - > setText ( kCheck , QString : : fromStdString ( f . check ( ) ) ) ; <nl> <nl> + QComboBox * collationBox = new QComboBox ( ui - > treeWidget ) ; <nl> + collationBox - > setProperty ( " column " , QString : : fromStdString ( f . name ( ) ) ) ; <nl> + collationBox - > addItems ( m_collationList ) ; <nl> + index = collationBox - > findText ( QString : : fromStdString ( f . collation ( ) ) , Qt : : MatchCaseSensitive ) ; <nl> + if ( index = = - 1 ) <nl> + { <nl> + / / some non - existing collation <nl> + collationBox - > addItem ( QString : : fromStdString ( f . collation ( ) ) ) ; <nl> + index = collationBox - > count ( ) - 1 ; <nl> + } <nl> + collationBox - > setCurrentIndex ( index ) ; <nl> + collationBox - > installEventFilter ( this ) ; <nl> + connect ( collationBox , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypeAndCollation ( ) ) ) ; <nl> + ui - > treeWidget - > setItemWidget ( tbitem , kCollation , collationBox ) ; <nl> + <nl> auto fk = std : : dynamic_pointer_cast < sqlb : : ForeignKeyClause > ( m_table . constraint ( { f . name ( ) } , sqlb : : Constraint : : ForeignKeyConstraintType ) ) ; <nl> if ( fk ) <nl> tbitem - > setText ( kForeignKey , QString : : fromStdString ( fk - > toString ( ) ) ) ; <nl> void EditTableDialog : : checkInput ( ) <nl> ui - > buttonBox - > button ( QDialogButtonBox : : Ok ) - > setEnabled ( valid ) ; <nl> } <nl> <nl> - void EditTableDialog : : updateTypes ( QObject * object ) <nl> + void EditTableDialog : : updateTypeAndCollation ( QObject * object ) <nl> { <nl> - QComboBox * typeBox = qobject_cast < QComboBox * > ( object ) ; <nl> - if ( typeBox ) <nl> + / / Get sender combo box and retrieve field name from it <nl> + QComboBox * combo = qobject_cast < QComboBox * > ( object ) ; <nl> + if ( ! combo ) <nl> + return ; <nl> + QString column = combo - > property ( " column " ) . toString ( ) ; <nl> + <nl> + / / Get type * and * collation combo box for this field <nl> + auto item = ui - > treeWidget - > findItems ( column , Qt : : MatchExactly , kName ) ; <nl> + if ( item . size ( ) ! = 1 ) <nl> + return ; <nl> + QComboBox * typeBox = qobject_cast < QComboBox * > ( ui - > treeWidget - > itemWidget ( item . front ( ) , kType ) ) ; <nl> + QComboBox * collationBox = qobject_cast < QComboBox * > ( ui - > treeWidget - > itemWidget ( item . front ( ) , kCollation ) ) ; <nl> + <nl> + / / Update table <nl> + if ( typeBox & & collationBox ) <nl> { <nl> QString type = typeBox - > currentText ( ) ; <nl> - std : : string column = typeBox - > property ( " column " ) . toString ( ) . toStdString ( ) ; <nl> + QString collation = collationBox - > currentText ( ) ; <nl> <nl> for ( size_t index = 0 ; index < m_table . fields . size ( ) ; + + index ) <nl> { <nl> - if ( m_table . fields . at ( index ) . name ( ) = = column ) <nl> + if ( m_table . fields . at ( index ) . name ( ) = = column . toStdString ( ) ) <nl> { <nl> m_table . fields . at ( index ) . setType ( type . toStdString ( ) ) ; <nl> + m_table . fields . at ( index ) . setCollation ( collation . toStdString ( ) ) ; <nl> break ; <nl> } <nl> } <nl> void EditTableDialog : : updateTypes ( QObject * object ) <nl> } <nl> } <nl> <nl> - void EditTableDialog : : updateTypes ( ) <nl> + void EditTableDialog : : updateTypeAndCollation ( ) <nl> { <nl> - updateTypes ( sender ( ) ) ; <nl> + updateTypeAndCollation ( sender ( ) ) ; <nl> } <nl> <nl> bool EditTableDialog : : eventFilter ( QObject * object , QEvent * event ) <nl> { <nl> if ( event - > type ( ) = = QEvent : : FocusOut ) <nl> { <nl> - updateTypes ( object ) ; <nl> + updateTypeAndCollation ( object ) ; <nl> } <nl> return false ; <nl> } <nl> void EditTableDialog : : fieldItemChanged ( QTreeWidgetItem * item , int column ) <nl> populateConstraints ( ) ; <nl> } break ; <nl> case kType : <nl> - / / see updateTypes ( ) SLOT <nl> + case kCollation : <nl> + / / see updateTypeAndCollation ( ) SLOT <nl> break ; <nl> case kPrimaryKey : <nl> { <nl> void EditTableDialog : : addField ( ) <nl> <nl> ui - > treeWidget - > setItemWidget ( tbitem , kType , typeBox ) ; <nl> typeBox - > installEventFilter ( this ) ; <nl> - connect ( typeBox , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypes ( ) ) ) ; <nl> + connect ( typeBox , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypeAndCollation ( ) ) ) ; <nl> <nl> tbitem - > setCheckState ( kNotNull , Qt : : Unchecked ) ; <nl> tbitem - > setCheckState ( kPrimaryKey , Qt : : Unchecked ) ; <nl> void EditTableDialog : : moveCurrentField ( bool down ) <nl> int currentRow = ui - > treeWidget - > currentIndex ( ) . row ( ) ; <nl> int newRow = currentRow + ( down ? 1 : - 1 ) ; <nl> <nl> - / / Save the combobox first by making a copy <nl> - QComboBox * oldCombo = qobject_cast < QComboBox * > ( ui - > treeWidget - > itemWidget ( ui - > treeWidget - > topLevelItem ( currentRow ) , kType ) ) ; <nl> - QComboBox * newCombo = new QComboBox ( ui - > treeWidget ) ; <nl> - newCombo - > setProperty ( " column " , oldCombo - > property ( " column " ) ) ; <nl> - newCombo - > installEventFilter ( this ) ; <nl> - connect ( newCombo , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypes ( ) ) ) ; <nl> - newCombo - > setEditable ( true ) ; <nl> - for ( int i = 0 ; i < oldCombo - > count ( ) ; + + i ) <nl> - newCombo - > addItem ( oldCombo - > itemText ( i ) ) ; <nl> - newCombo - > setCurrentIndex ( oldCombo - > currentIndex ( ) ) ; <nl> + / / Save the comboboxes first by making copies <nl> + QComboBox * newCombo [ 2 ] ; <nl> + for ( int c = 0 ; c < 2 ; c + + ) <nl> + { <nl> + int column = ( c = = 0 ? kType : kCollation ) ; <nl> + <nl> + QComboBox * oldCombo = qobject_cast < QComboBox * > ( ui - > treeWidget - > itemWidget ( ui - > treeWidget - > topLevelItem ( currentRow ) , column ) ) ; <nl> + newCombo [ c ] = new QComboBox ( ui - > treeWidget ) ; <nl> + newCombo [ c ] - > setProperty ( " column " , oldCombo - > property ( " column " ) ) ; <nl> + newCombo [ c ] - > installEventFilter ( this ) ; <nl> + connect ( newCombo [ c ] , SIGNAL ( currentIndexChanged ( int ) ) , this , SLOT ( updateTypeAndCollation ( ) ) ) ; <nl> + newCombo [ c ] - > setEditable ( oldCombo - > isEditable ( ) ) ; <nl> + for ( int i = 0 ; i < oldCombo - > count ( ) ; + + i ) <nl> + newCombo [ c ] - > addItem ( oldCombo - > itemText ( i ) ) ; <nl> + newCombo [ c ] - > setCurrentIndex ( oldCombo - > currentIndex ( ) ) ; <nl> + } <nl> <nl> / / Now , just remove the item and insert it at it ' s new position , then restore the combobox <nl> QTreeWidgetItem * item = ui - > treeWidget - > takeTopLevelItem ( currentRow ) ; <nl> ui - > treeWidget - > insertTopLevelItem ( newRow , item ) ; <nl> - ui - > treeWidget - > setItemWidget ( item , kType , newCombo ) ; <nl> + ui - > treeWidget - > setItemWidget ( item , kType , newCombo [ 0 ] ) ; <nl> + ui - > treeWidget - > setItemWidget ( item , kCollation , newCombo [ 1 ] ) ; <nl> <nl> / / Select the old item at its new position <nl> ui - > treeWidget - > setCurrentIndex ( ui - > treeWidget - > currentIndex ( ) . sibling ( newRow , 0 ) ) ; <nl> mmm a / src / EditTableDialog . h <nl> ppp b / src / EditTableDialog . h <nl> class EditTableDialog : public QDialog <nl> kUnique = 5 , <nl> kDefault = 6 , <nl> kCheck = 7 , <nl> - kForeignKey = 8 <nl> + kCollation = 8 , <nl> + kForeignKey = 9 <nl> } ; <nl> <nl> enum ConstraintColumns { <nl> private slots : <nl> void checkInput ( ) ; <nl> void fieldItemChanged ( QTreeWidgetItem * item , int column ) ; <nl> void constraintItemChanged ( QTableWidgetItem * item ) ; <nl> - void updateTypes ( QObject * object ) ; <nl> + void updateTypeAndCollation ( QObject * object ) ; <nl> bool eventFilter ( QObject * object , QEvent * event ) override ; <nl> - void updateTypes ( ) ; <nl> + void updateTypeAndCollation ( ) ; <nl> void moveUp ( ) ; <nl> void moveDown ( ) ; <nl> void setWithoutRowid ( bool without_rowid ) ; <nl> private slots : <nl> sqlb : : Table m_table ; <nl> bool m_bNewTable ; <nl> QString m_sRestorePointName ; <nl> + QStringList m_collationList ; <nl> } ; <nl> <nl> # endif <nl> mmm a / src / EditTableDialog . ui <nl> ppp b / src / EditTableDialog . ui <nl> <nl> < string > Check constraint < / string > <nl> < / property > <nl> < / column > <nl> + < column > <nl> + < property name = " text " > <nl> + < string > Collation < / string > <nl> + < / property > <nl> + < / column > <nl> < column > <nl> < property name = " text " > <nl> < string > Foreign Key < / string > <nl> | Add column for editing collations to Edit Table dialog | sqlitebrowser/sqlitebrowser | 0768ccd1dbb608a75812512ad47891ff44bc2e8a | 2019-08-21T21:37:16Z |
mmm a / tensorflow / compiler / mlir / lite / transforms / unroll_batch_matmul . cc <nl> ppp b / tensorflow / compiler / mlir / lite / transforms / unroll_batch_matmul . cc <nl> See the License for the specific language governing permissions and <nl> limitations under the License . <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * / <nl> <nl> - / / This transformation pass prepares for legalization to the TFLite dialect by <nl> - / / converting operations in TensorFlow dialect into operations that can be <nl> - / / legalized to TensorFlow Lite dialect with simple replacements . The newly <nl> - / / created operations are in the TensorFlow dialect if the operation can be <nl> - / / represented using a TensorFlow op . Otherwise , TensorFlow Lite dialect op is <nl> - / / used . For example , Conv2D in TFLite which uses OHWI data format for filters <nl> - / / is not supported in TensorFlow because TensorFlow requires filters in the <nl> - / / HWIO data format . <nl> - / / <nl> - / / Motivation to prepare for the TFLite legalization before the actual <nl> - / / legalization is to exploit constant folding opportunities in any newly <nl> - / / created ops by leveraging constant folding support for the TensorFlow ops . <nl> - / / This way TFLite can be used as a serialization format only and does not <nl> - / / require access to the TFLite runtime for optimizations as required by the <nl> - / / TFLite team . <nl> - <nl> # include " tensorflow / compiler / mlir / lite / transforms / unroll_batch_matmul . h " <nl> <nl> # include < climits > <nl> | Remove cloned prepare_tf pass description from unroll_batch_matmul pass | tensorflow/tensorflow | c923edca9d0d54360fa52a1da36d303822d63330 | 2019-09-11T22:27:28Z |
mmm a / libs / libmysqlxx / include / mysqlxx / Connection . h <nl> ppp b / libs / libmysqlxx / include / mysqlxx / Connection . h <nl> <nl> # include < mysqlxx / Query . h > <nl> <nl> # define MYSQLXX_DEFAULT_TIMEOUT 60 <nl> + # define MYSQLXX_DEFAULT_RW_TIMEOUT 1800 <nl> <nl> <nl> namespace mysqlxx <nl> class Connection : private boost : : noncopyable <nl> const char * password = 0 , <nl> unsigned port = 0 , <nl> unsigned timeout = MYSQLXX_DEFAULT_TIMEOUT , <nl> - unsigned rw_timeout = MYSQLXX_DEFAULT_TIMEOUT ) ; <nl> + unsigned rw_timeout = MYSQLXX_DEFAULT_RW_TIMEOUT ) ; <nl> <nl> / * * Конструктор - помошник . Создать соединение , считав все параметры из секции config_name конфигурации . <nl> * Можно использовать , если вы используете Poco : : Util : : Application из библиотеки Poco . <nl> class Connection : private boost : : noncopyable <nl> const char * password , <nl> unsigned port , <nl> unsigned timeout = MYSQLXX_DEFAULT_TIMEOUT , <nl> - unsigned rw_timeout = MYSQLXX_DEFAULT_TIMEOUT ) ; <nl> + unsigned rw_timeout = MYSQLXX_DEFAULT_RW_TIMEOUT ) ; <nl> <nl> void connect ( const std : : string & config_name ) <nl> { <nl> class Connection : private boost : : noncopyable <nl> unsigned rw_timeout = <nl> cfg . getInt ( config_name + " . rw_timeout " , <nl> cfg . getInt ( " mysql_rw_timeout " , <nl> - MYSQLXX_DEFAULT_TIMEOUT ) ) ; <nl> + MYSQLXX_DEFAULT_RW_TIMEOUT ) ) ; <nl> <nl> connect ( db . c_str ( ) , server . c_str ( ) , user . c_str ( ) , password . c_str ( ) , port , timeout , rw_timeout ) ; <nl> } <nl> mmm a / libs / libmysqlxx / include / mysqlxx / Pool . h <nl> ppp b / libs / libmysqlxx / include / mysqlxx / Pool . h <nl> class Pool <nl> rw_timeout = <nl> cfg . getInt ( config_name + " . rw_timeout " , <nl> cfg . getInt ( " mysql_rw_timeout " , <nl> - MYSQLXX_DEFAULT_TIMEOUT ) ) ; <nl> + MYSQLXX_DEFAULT_RW_TIMEOUT ) ) ; <nl> } <nl> <nl> / * * <nl> class Pool <nl> const std : : string & password_ = " " , <nl> unsigned port_ = 0 , <nl> unsigned connect_timeout_ = MYSQLXX_DEFAULT_TIMEOUT , <nl> - unsigned rw_timeout_ = MYSQLXX_DEFAULT_TIMEOUT , <nl> + unsigned rw_timeout_ = MYSQLXX_DEFAULT_RW_TIMEOUT , <nl> unsigned default_connections_ = MYSQLXX_POOL_DEFAULT_START_CONNECTIONS , <nl> unsigned max_connections_ = MYSQLXX_POOL_DEFAULT_MAX_CONNECTIONS ) <nl> : default_connections ( default_connections_ ) , max_connections ( max_connections_ ) , <nl> | mysqlxx : set default rw_timeout to half - hour [ # CONV - 7420 ] . | ClickHouse/ClickHouse | 8ad2255fa9d1c67314897a5d631e994b2b93f1a5 | 2013-04-24T21:50:19Z |
mmm a / filament / src / Texture . cpp <nl> ppp b / filament / src / Texture . cpp <nl> void FTexture : : generatePrefilterMipmap ( FEngine & engine , <nl> for ( ssize_t i = baseExp ; i > = 0 ; - - i ) { <nl> const size_t dim = 1U < < i ; <nl> const size_t level = baseExp - i ; <nl> - const float lod = saturate ( level / ( numLevels - 1 . 0 ) ) ; <nl> + const float lod = saturate ( level / ( numLevels - 1 . 0f ) ) ; <nl> const float linearRoughness = lod * lod ; <nl> <nl> Image image ; <nl> mmm a / libs / ibl / src / CubemapIBL . cpp <nl> ppp b / libs / ibl / src / CubemapIBL . cpp <nl> void CubemapIBL : : roughnessFilter ( JobSystem & js , Cubemap & dst , const std : : vector < <nl> std : : atomic_uint progress = { 0 } ; <nl> <nl> if ( linearRoughness = = 0 ) { <nl> - CubemapUtils : : process < CubemapUtils : : EmptyState > ( dst , js , [ & ] <nl> + auto scanline = [ & ] <nl> ( CubemapUtils : : EmptyState & , size_t y , Cubemap : : Face f , Cubemap : : Texel * data , size_t dim ) { <nl> if ( UTILS_UNLIKELY ( updater ) ) { <nl> size_t p = progress . fetch_add ( 1 , std : : memory_order_relaxed ) + 1 ; <nl> void CubemapIBL : : roughnessFilter ( JobSystem & js , Cubemap & dst , const std : : vector < <nl> / / FIXME : we should pick the proper LOD here and do trilinear filtering <nl> Cubemap : : writeAt ( data , cm . sampleAt ( N ) ) ; <nl> } <nl> - } ) ; <nl> + } ; <nl> + / / at least 256 pixel cubemap before we use multithreading - - the overhead of launching <nl> + / / jobs is too large compared to the work above . <nl> + if ( dst . getDimensions ( ) < = 256 ) { <nl> + CubemapUtils : : processSingleThreaded < CubemapUtils : : EmptyState > ( dst , js , std : : ref ( scanline ) ) ; <nl> + } else { <nl> + CubemapUtils : : process < CubemapUtils : : EmptyState > ( dst , js , std : : ref ( scanline ) ) ; <nl> + } <nl> return ; <nl> } <nl> <nl> - <nl> / / be careful w / the size of this structure , the smaller the better <nl> struct CacheEntry { <nl> float3 L ; <nl> void CubemapIBL : : roughnessFilter ( JobSystem & js , Cubemap & dst , const std : : vector < <nl> return lhs . brdf_NoL < rhs . brdf_NoL ; <nl> } ) ; <nl> <nl> - CubemapUtils : : process < CubemapUtils : : EmptyState > ( dst , js , <nl> - [ & ] ( CubemapUtils : : EmptyState & , size_t y , <nl> - Cubemap : : Face f , Cubemap : : Texel * data , size_t dim ) { <nl> - <nl> + auto scanline = [ & ] ( CubemapUtils : : EmptyState & , size_t y , <nl> + Cubemap : : Face f , Cubemap : : Texel * data , size_t dim ) { <nl> if ( UTILS_UNLIKELY ( updater ) ) { <nl> size_t p = progress . fetch_add ( 1 , std : : memory_order_relaxed ) + 1 ; <nl> updater ( 0 , ( float ) p / ( dim * 6 ) ) ; <nl> } <nl> - <nl> mat3 R ; <nl> const size_t numSamples = cache . size ( ) ; <nl> for ( size_t x = 0 ; x < dim ; + + x , + + data ) { <nl> void CubemapIBL : : roughnessFilter ( JobSystem & js , Cubemap & dst , const std : : vector < <nl> } <nl> Cubemap : : writeAt ( data , Cubemap : : Texel ( Li ) ) ; <nl> } <nl> - } ) ; <nl> + } ; <nl> + <nl> + / / don ' t use the jobsystem unless we have enough work per scanline - - or the overhead of <nl> + / / launching jobs will prevail . <nl> + if ( dst . getDimensions ( ) * maxNumSamples < = 256 ) { <nl> + CubemapUtils : : processSingleThreaded < CubemapUtils : : EmptyState > ( dst , js , std : : ref ( scanline ) ) ; <nl> + } else { <nl> + CubemapUtils : : process < CubemapUtils : : EmptyState > ( dst , js , std : : ref ( scanline ) ) ; <nl> + } <nl> } <nl> <nl> / * <nl> mmm a / libs / ibl / src / CubemapUtils . cpp <nl> ppp b / libs / ibl / src / CubemapUtils . cpp <nl> void CubemapUtils : : generateUVGrid ( JobSystem & js , Cubemap & cml , size_t gridFreque <nl> const float uvGridHDRIntensity = 5 . 0f ; <nl> size_t gridSizeX = cml . getDimensions ( ) / gridFrequencyX ; <nl> size_t gridSizeY = cml . getDimensions ( ) / gridFrequencyY ; <nl> - CubemapUtils : : process < CubemapUtils : : EmptyState > ( cml , js , <nl> - [ & ] ( CubemapUtils : : EmptyState & , <nl> + CubemapUtils : : process < EmptyState > ( cml , js , <nl> + [ & ] ( EmptyState & , <nl> size_t y , Cubemap : : Face f , Cubemap : : Texel * data , size_t dim ) { <nl> for ( size_t x = 0 ; x < dim ; + + x , + + data ) { <nl> bool grid = bool ( ( ( x / gridSizeX ) ^ ( y / gridSizeY ) ) & 1 ) ; <nl> mmm a / libs / ibl / src / CubemapUtilsImpl . h <nl> ppp b / libs / ibl / src / CubemapUtilsImpl . h <nl> <nl> <nl> # include < ibl / CubemapUtils . h > <nl> <nl> + # include < utils / compiler . h > <nl> # include < utils / JobSystem . h > <nl> <nl> namespace filament { <nl> void CubemapUtils : : process ( <nl> <nl> JobSystem : : Job * parent = js . createJob ( ) ; <nl> for ( size_t faceIndex = 0 ; faceIndex < 6 ; faceIndex + + ) { <nl> - const Cubemap : : Face f = ( Cubemap : : Face ) faceIndex ; <nl> - JobSystem : : Job * face = jobs : : createJob ( js , parent , <nl> - [ faceIndex , & states , f , & cm , & dim , & proc ] <nl> - ( utils : : JobSystem & js , utils : : JobSystem : : Job * parent ) { <nl> - STATE & s = states [ faceIndex ] ; <nl> - Image & image ( cm . getImageForFace ( f ) ) ; <nl> - <nl> - auto parallelJobTask = [ & image , & proc , & s , dim , f ] ( size_t y0 , size_t c ) { <nl> - for ( size_t y = y0 ; y < y0 + c ; y + + ) { <nl> - Cubemap : : Texel * data = <nl> - static_cast < Cubemap : : Texel * > ( image . getPixelRef ( 0 , y ) ) ; <nl> - proc ( s , y , f , data , dim ) ; <nl> - } <nl> - } ; <nl> - <nl> - if ( std : : is_same < STATE , CubemapUtils : : EmptyState > : : value ) { <nl> - auto job = jobs : : parallel_for ( js , parent , 0 , uint32_t ( dim ) , <nl> - std : : ref ( parallelJobTask ) , jobs : : CountSplitter < 64 , 8 > ( ) ) ; <nl> - <nl> - / / we need to wait here because parallelJobTask is passed by reference <nl> - js . runAndWait ( job ) ; <nl> - } else { <nl> - / / if we have a per - thread STATE , we can ' t parallel_for ( ) <nl> - parallelJobTask ( 0 , dim ) ; <nl> - } <nl> - } , std : : ref ( js ) , parent ) ; <nl> - js . run ( face ) ; <nl> + <nl> + auto perFaceJob = [ faceIndex , & states , & cm , dim , & proc ] <nl> + ( utils : : JobSystem & js , utils : : JobSystem : : Job * parent ) { <nl> + STATE & s = states [ faceIndex ] ; <nl> + Image & image ( cm . getImageForFace ( ( Cubemap : : Face ) faceIndex ) ) ; <nl> + <nl> + / / here we must limit how much we capture so we can use this closure <nl> + / / by value . <nl> + auto parallelJobTask = [ & s , & image , & proc , dim = uint16_t ( dim ) , <nl> + faceIndex = uint8_t ( faceIndex ) ] ( size_t y0 , size_t c ) { <nl> + for ( size_t y = y0 ; y < y0 + c ; y + + ) { <nl> + Cubemap : : Texel * data = <nl> + static_cast < Cubemap : : Texel * > ( image . getPixelRef ( 0 , y ) ) ; <nl> + proc ( s , y , ( Cubemap : : Face ) faceIndex , data , dim ) ; <nl> + } <nl> + } ; <nl> + <nl> + constexpr bool isStateLess = std : : is_same < STATE , CubemapUtils : : EmptyState > : : value ; <nl> + if ( UTILS_LIKELY ( isStateLess ) ) { <nl> + / / create the job , copying it by value <nl> + auto job = jobs : : parallel_for ( js , parent , 0 , uint32_t ( dim ) , <nl> + parallelJobTask , jobs : : CountSplitter < 64 , 8 > ( ) ) ; <nl> + / / not need to signal here , since we ' re just scheduling work <nl> + js . run ( job , JobSystem : : DONT_SIGNAL ) ; <nl> + } else { <nl> + / / if we have a per - thread STATE , we can ' t parallel_for ( ) <nl> + parallelJobTask ( 0 , dim ) ; <nl> + } <nl> + } ; <nl> + <nl> + / / not need to signal here , since we ' re just scheduling work <nl> + js . run ( jobs : : createJob ( js , parent , perFaceJob , std : : ref ( js ) , parent ) , JobSystem : : DONT_SIGNAL ) ; <nl> } <nl> + <nl> / / wait for all our threads to finish <nl> js . runAndWait ( parent ) ; <nl> <nl> mmm a / libs / utils / src / JobSystem . cpp <nl> ppp b / libs / utils / src / JobSystem . cpp <nl> void JobSystem : : release ( JobSystem : : Job * & job ) noexcept { <nl> void JobSystem : : run ( JobSystem : : Job * & job , uint32_t flags ) noexcept { <nl> # if HEAVY_SYSTRACE <nl> SYSTRACE_CALL ( ) ; <nl> + # else <nl> + SYSTRACE_CONTEXT ( ) ; <nl> # endif <nl> <nl> ThreadState & state ( getState ( ) ) ; <nl> void JobSystem : : run ( JobSystem : : Job * & job , uint32_t flags ) noexcept { <nl> <nl> put ( state . workQueue , job ) ; <nl> <nl> - SYSTRACE_CONTEXT ( ) ; <nl> SYSTRACE_VALUE32 ( " JobSystem : : activeJobs " , activeJobs + 1 ) ; <nl> <nl> / / wake - up a thread if needed . . . <nl> if ( ! ( flags & DONT_SIGNAL ) ) { <nl> - / / wake - up a queue <nl> + / / wake - up multiple queues because there could be multiple jobs queued <nl> + / / especially if DONT_SIGNAL was used <nl> { std : : lock_guard < Mutex > lock ( mLooperLock ) ; } <nl> - mLooperCondition . notify_one ( ) ; <nl> + mLooperCondition . notify_all ( ) ; <nl> } <nl> <nl> / / after run ( ) returns , the job is virtually invalid ( it ' ll die on its own ) <nl> | mitigate overhead of jobsystem | google/filament | 921c2bcd61ef92c8131a5b13053dd9509793984e | 2019-06-26T23:05:30Z |
mmm a / doc / classes / AnimationPlayer . xml <nl> ppp b / doc / classes / AnimationPlayer . xml <nl> <nl> Get the actual playing speed of current animation or 0 if not playing . This speed is the [ code ] playback_speed [ / code ] property multiplied by [ code ] custom_speed [ / code ] argument specified when calling the [ code ] play [ / code ] method . <nl> < / description > <nl> < / method > <nl> + < method name = " get_queue " > <nl> + < return type = " PoolStringArray " > <nl> + < / return > <nl> + < description > <nl> + < / description > <nl> + < / method > <nl> < method name = " has_animation " qualifiers = " const " > <nl> < return type = " bool " > <nl> < / return > <nl> mmm a / doc / classes / CPUParticles . xml <nl> ppp b / doc / classes / CPUParticles . xml <nl> <nl> < / member > <nl> < member name = " randomness " type = " float " setter = " set_randomness_ratio " getter = " get_randomness_ratio " > <nl> < / member > <nl> - < member name = " scale " type = " float " setter = " set_param " getter = " get_param " > <nl> + < member name = " scale_amount " type = " float " setter = " set_param " getter = " get_param " > <nl> < / member > <nl> - < member name = " scale_curve " type = " Curve " setter = " set_param_curve " getter = " get_param_curve " > <nl> + < member name = " scale_amount_curve " type = " Curve " setter = " set_param_curve " getter = " get_param_curve " > <nl> < / member > <nl> - < member name = " scale_random " type = " float " setter = " set_param_randomness " getter = " get_param_randomness " > <nl> + < member name = " scale_amount_random " type = " float " setter = " set_param_randomness " getter = " get_param_randomness " > <nl> < / member > <nl> < member name = " speed_scale " type = " float " setter = " set_speed_scale " getter = " get_speed_scale " > <nl> < / member > <nl> mmm a / doc / classes / CPUParticles2D . xml <nl> ppp b / doc / classes / CPUParticles2D . xml <nl> <nl> < / member > <nl> < member name = " randomness " type = " float " setter = " set_randomness_ratio " getter = " get_randomness_ratio " > <nl> < / member > <nl> - < member name = " scale " type = " float " setter = " set_param " getter = " get_param " > <nl> + < member name = " scale_amount " type = " float " setter = " set_param " getter = " get_param " > <nl> < / member > <nl> - < member name = " scale_curve " type = " Curve " setter = " set_param_curve " getter = " get_param_curve " > <nl> + < member name = " scale_amount_curve " type = " Curve " setter = " set_param_curve " getter = " get_param_curve " > <nl> < / member > <nl> - < member name = " scale_random " type = " float " setter = " set_param_randomness " getter = " get_param_randomness " > <nl> + < member name = " scale_amount_random " type = " float " setter = " set_param_randomness " getter = " get_param_randomness " > <nl> < / member > <nl> < member name = " speed_scale " type = " float " setter = " set_speed_scale " getter = " get_speed_scale " > <nl> < / member > <nl> mmm a / doc / classes / Environment . xml <nl> ppp b / doc / classes / Environment . xml <nl> <nl> < member name = " glow_enabled " type = " bool " setter = " set_glow_enabled " getter = " is_glow_enabled " > <nl> Enables glow rendering . <nl> < / member > <nl> + < member name = " glow_hdr_luminance_cap " type = " float " setter = " set_glow_hdr_luminance_cap " getter = " get_glow_hdr_luminance_cap " > <nl> + < / member > <nl> < member name = " glow_hdr_scale " type = " float " setter = " set_glow_hdr_bleed_scale " getter = " get_glow_hdr_bleed_scale " > <nl> Bleed scale of the HDR glow . <nl> < / member > <nl> mmm a / doc / classes / Generic6DOFJoint . xml <nl> ppp b / doc / classes / Generic6DOFJoint . xml <nl> <nl> < / member > <nl> < member name = " linear_spring_z / stiffness " type = " float " setter = " set_param_z " getter = " get_param_z " > <nl> < / member > <nl> + < member name = " precision " type = " int " setter = " set_precision " getter = " get_precision " > <nl> + < / member > <nl> < / members > <nl> < constants > <nl> < constant name = " PARAM_LINEAR_LOWER_LIMIT " value = " 0 " enum = " Param " > <nl> mmm a / doc / classes / TextEdit . xml <nl> ppp b / doc / classes / TextEdit . xml <nl> <nl> < constant name = " MENU_UNDO " value = " 5 " enum = " MenuItems " > <nl> Undoes the previous action . <nl> < / constant > <nl> - < constant name = " MENU_MAX " value = " 6 " enum = " MenuItems " > <nl> + < constant name = " MENU_REDO " value = " 6 " enum = " MenuItems " > <nl> + < / constant > <nl> + < constant name = " MENU_MAX " value = " 7 " enum = " MenuItems " > <nl> < / constant > <nl> < / constants > <nl> < theme_items > <nl> mmm a / doc / classes / Theme . xml <nl> ppp b / doc / classes / Theme . xml <nl> <nl> < demos > <nl> < / demos > <nl> < methods > <nl> + < method name = " clear " > <nl> + < return type = " void " > <nl> + < / return > <nl> + < description > <nl> + < / description > <nl> + < / method > <nl> < method name = " clear_color " > <nl> < return type = " void " > <nl> < / return > <nl> mmm a / doc / classes / VisualServer . xml <nl> ppp b / doc / classes / VisualServer . xml <nl> <nl> < / argument > <nl> < argument index = " 8 " name = " hdr_bleed_scale " type = " float " > <nl> < / argument > <nl> - < argument index = " 9 " name = " bicubic_upscale " type = " bool " > <nl> + < argument index = " 9 " name = " hdr_luminance_cap " type = " float " > <nl> + < / argument > <nl> + < argument index = " 10 " name = " bicubic_upscale " type = " bool " > <nl> < / argument > <nl> < description > <nl> < / description > <nl> mmm a / servers / visual_server . cpp <nl> ppp b / servers / visual_server . cpp <nl> void VisualServer : : _bind_methods ( ) { <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_ambient_light " , " env " , " color " , " energy " , " sky_contibution " ) , & VisualServer : : environment_set_ambient_light , DEFVAL ( 1 . 0 ) , DEFVAL ( 0 . 0 ) ) ; <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_dof_blur_near " , " env " , " enable " , " distance " , " transition " , " far_amount " , " quality " ) , & VisualServer : : environment_set_dof_blur_near ) ; <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_dof_blur_far " , " env " , " enable " , " distance " , " transition " , " far_amount " , " quality " ) , & VisualServer : : environment_set_dof_blur_far ) ; <nl> - ClassDB : : bind_method ( D_METHOD ( " environment_set_glow " , " env " , " enable " , " level_flags " , " intensity " , " strength " , " bloom_threshold " , " blend_mode " , " hdr_bleed_threshold " , " hdr_bleed_scale " , " bicubic_upscale " ) , & VisualServer : : environment_set_glow ) ; <nl> + ClassDB : : bind_method ( D_METHOD ( " environment_set_glow " , " env " , " enable " , " level_flags " , " intensity " , " strength " , " bloom_threshold " , " blend_mode " , " hdr_bleed_threshold " , " hdr_bleed_scale " , " hdr_luminance_cap " , " bicubic_upscale " ) , & VisualServer : : environment_set_glow ) ; <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_tonemap " , " env " , " tone_mapper " , " exposure " , " white " , " auto_exposure " , " min_luminance " , " max_luminance " , " auto_exp_speed " , " auto_exp_grey " ) , & VisualServer : : environment_set_tonemap ) ; <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_adjustment " , " env " , " enable " , " brightness " , " contrast " , " saturation " , " ramp " ) , & VisualServer : : environment_set_adjustment ) ; <nl> ClassDB : : bind_method ( D_METHOD ( " environment_set_ssr " , " env " , " enable " , " max_steps " , " fade_in " , " fade_out " , " depth_tolerance " , " roughness " ) , & VisualServer : : environment_set_ssr ) ; <nl> | doc : Sync classref with current source | godotengine/godot | 8fda8769d1071c3afa2e9f3836e9c7615d0c2dab | 2018-12-04T20:57:07Z |
mmm a / shell / collection . js <nl> ppp b / shell / collection . js <nl> DBCollection . prototype . _validateObject = function ( o ) { <nl> throw " can ' t save a DBQuery object " ; <nl> } <nl> <nl> - DBCollection . _allowedFields = { $ id : 1 , $ ref : 1 } ; <nl> + DBCollection . _allowedFields = { $ id : 1 , $ ref : 1 , $ db : 1 } ; <nl> <nl> DBCollection . prototype . _validateForStorage = function ( o ) { <nl> this . _validateObject ( o ) ; <nl> | Added $ db to allowed fields with names starting with ' $ ' | mongodb/mongo | aefc2167f4e0ce488e39be27e7c3e01c66216492 | 2011-06-21T20:21:09Z |
mmm a / tools / make - package / git - archive - all <nl> ppp b / tools / make - package / git - archive - all <nl> import traceback <nl> from os import path , extsep <nl> from subprocess import Popen , PIPE , CalledProcessError <nl> <nl> + def os_is_win32 ( ) : <nl> + return sys . platform = = ' win32 ' <nl> + <nl> class UnrecognizedFormat : <nl> def __init__ ( self , prompt ) : <nl> self . _prompt = prompt <nl> class GitArchiver ( object ) : <nl> <nl> # Raises an exception if there is no repo under main_repo_abspath . <nl> try : <nl> - self . run_shell ( " [ - d . git ] | | git rev - parse - - git - dir > / dev / null 2 > & 1 " , main_repo_abspath ) <nl> + self . run_shell ( " git rev - parse " , main_repo_abspath ) <nl> except Exception as e : <nl> raise ValueError ( " Not a git repository ( or any of the parent directories ) . " . format ( path = main_repo_abspath ) ) <nl> <nl> class GitArchiver ( object ) : <nl> if self . verbose : <nl> print ( " Compressing { f } = > { a } . . . " . format ( f = file_path , <nl> a = path . join ( self . prefix , file_path ) ) ) <nl> - add ( file_path , file_path ) <nl> + try : <nl> + add ( file_path , file_path ) <nl> + except : <nl> + print ( ' add % s failed . ' % file_path ) <nl> + pass <nl> else : <nl> print ( " { f } = > { a } " . format ( f = file_path , <nl> a = path . join ( self . prefix , file_path ) ) ) <nl> class GitArchiver ( object ) : <nl> print ( " Compressing { f } = > { a } . . . " . format ( f = path . join ( self . main_repo_abspath , file_path ) , <nl> a = path . join ( self . prefix , file_path ) ) ) <nl> <nl> - add ( path . join ( self . main_repo_abspath , file_path ) , file_path ) <nl> + try : <nl> + add ( path . join ( self . main_repo_abspath , file_path ) , file_path ) <nl> + except : <nl> + print ( ' add % s failed . ' % file_path ) <nl> + pass <nl> else : <nl> print ( " { f } = > { a } " . format ( f = path . join ( self . main_repo_abspath , file_path ) , <nl> a = path . join ( self . prefix , file_path ) ) ) <nl> class GitArchiver ( object ) : <nl> raise Exception ( " Couldn ' t find extra folder path ( % s ) in ( % s ) ! " % ( extra_folder_path , file_path ) ) <nl> <nl> path_in_zip = extra_to_zip_file + file_path [ ( len ( extra_folder_path ) ) : ] <nl> - add ( file_path , path_in_zip ) <nl> + <nl> + try : <nl> + add ( file_path , path_in_zip ) <nl> + except : <nl> + print ( ' add % s failed . ' % file_path ) <nl> + pass <nl> <nl> outfile_name , outfile_ext = path . splitext ( output_path ) <nl> for extra_dir in config_data [ " extra_dirs " ] : <nl> class GitArchiver ( object ) : <nl> for f in files : <nl> file_path = path . join ( root , f ) <nl> path_in_zip = file_path [ ( len ( self . main_repo_abspath ) + 1 ) : ] <nl> - add ( file_path , path_in_zip ) <nl> + try : <nl> + add ( file_path , path_in_zip ) <nl> + except : <nl> + print ( ' add % s failed . ' % file_path ) <nl> + pass <nl> <nl> if not dry_run : <nl> archive . close ( ) <nl> class GitArchiver ( object ) : <nl> " " " <nl> components = [ ] <nl> <nl> - while not path . samefile ( abspath , repo_abspath ) : <nl> - abspath , tail = path . split ( abspath ) <nl> + if os_is_win32 ( ) : <nl> + abspath = os . path . normpath ( abspath ) <nl> + repo_abspath = os . path . normpath ( repo_abspath ) <nl> + while abspath ! = repo_abspath : <nl> + abspath , tail = path . split ( abspath ) <nl> + <nl> + if len ( tail ) : <nl> + components . insert ( 0 , tail ) <nl> + else : <nl> + while not path . samefile ( abspath , repo_abspath ) : <nl> + abspath , tail = path . split ( abspath ) <nl> <nl> - if len ( tail ) : <nl> - components . insert ( 0 , tail ) <nl> + if len ( tail ) : <nl> + components . insert ( 0 , tail ) <nl> <nl> components . insert ( 0 , path . relpath ( repo_abspath , repo_abspath ) ) <nl> return components <nl> | Merge pull request from natural - law / v3 | cocos2d/cocos2d-x | 50a71fec81cd4822a91a36ac5cfe3048fc25c155 | 2015-06-23T10:25:45Z |
mmm a / lib / BasicsC / linked - list . c <nl> ppp b / lib / BasicsC / linked - list . c <nl> bool EqualElement ( TRI_associative_pointer_t * array , void const * left , void cons <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> - / / / @ brief equal element <nl> + / / / @ brief inserts an entry at the front or end of a linked array <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> + static int AddLinkedArray ( TRI_linked_array_t * array , <nl> + void const * data , <nl> + const bool front ) { <nl> + TRI_linked_list_entry_t * entry ; <nl> + TRI_linked_list_entry_t * found ; <nl> + <nl> + / / create entry <nl> + entry = TRI_Allocate ( array - > _memoryZone , sizeof ( TRI_linked_list_entry_t ) , false ) ; <nl> + <nl> + if ( entry = = NULL ) { <nl> + return TRI_ERROR_OUT_OF_MEMORY ; <nl> + } <nl> + <nl> + entry - > _data = data ; <nl> + <nl> + / / insert to lookup table <nl> + found = TRI_InsertElementAssociativePointer ( & array - > _array , entry , true ) ; <nl> + <nl> + if ( TRI_errno ( ) = = TRI_ERROR_OUT_OF_MEMORY ) { <nl> + TRI_Free ( array - > _memoryZone , entry ) ; <nl> + return TRI_set_errno ( TRI_ERROR_OUT_OF_MEMORY ) ; <nl> + } <nl> + <nl> + / / this should not happen <nl> + if ( found ! = NULL ) { <nl> + TRI_RemoveLinkedList ( & array - > _list , found ) ; <nl> + TRI_Free ( array - > _memoryZone , found ) ; <nl> + } <nl> + <nl> + if ( front ) { <nl> + / / add element at the beginning <nl> + TRI_AddFrontLinkedList ( & array - > _list , entry ) ; <nl> + } <nl> + else { <nl> + / / add element at the end <nl> + TRI_AddLinkedList ( & array - > _list , entry ) ; <nl> + } <nl> + <nl> + return TRI_ERROR_NO_ERROR ; <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ } <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void TRI_AddLinkedList ( TRI_linked_list_t * list , TRI_linked_list_entry_t * entry ) <nl> } <nl> } <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief inserts an entry at the front of a linked list <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + void TRI_AddFrontLinkedList ( TRI_linked_list_t * list , TRI_linked_list_entry_t * entry ) { <nl> + TRI_linked_list_entry_t * begin = list - > _begin ; <nl> + <nl> + if ( begin = = NULL ) { <nl> + / / list is empty <nl> + TRI_AddLinkedList ( list , entry ) ; <nl> + } <nl> + else { <nl> + / / list is not empty <nl> + begin - > _prev = entry ; <nl> + entry - > _prev = NULL ; <nl> + entry - > _next = begin ; <nl> + <nl> + list - > _begin = entry ; <nl> + / / end does not change <nl> + } <nl> + } <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief removes an entry from a linked list <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void TRI_RemoveLinkedList ( TRI_linked_list_t * list , TRI_linked_list_entry_t * ent <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> int TRI_AddLinkedArray ( TRI_linked_array_t * array , void const * data ) { <nl> - TRI_linked_list_entry_t * entry ; <nl> - TRI_linked_list_entry_t * found ; <nl> - <nl> - / / create entry <nl> - entry = TRI_Allocate ( array - > _memoryZone , sizeof ( TRI_linked_list_entry_t ) , false ) ; <nl> - <nl> - if ( entry = = NULL ) { <nl> - return TRI_ERROR_OUT_OF_MEMORY ; <nl> - } <nl> - <nl> - entry - > _data = data ; <nl> - <nl> - / / insert to lookup table <nl> - found = TRI_InsertElementAssociativePointer ( & array - > _array , entry , true ) ; <nl> - <nl> - if ( TRI_errno ( ) = = TRI_ERROR_OUT_OF_MEMORY ) { <nl> - TRI_Free ( array - > _memoryZone , entry ) ; <nl> - return TRI_set_errno ( TRI_ERROR_OUT_OF_MEMORY ) ; <nl> - } <nl> - <nl> - / / this should not happen <nl> - if ( found ! = NULL ) { <nl> - TRI_RemoveLinkedList ( & array - > _list , found ) ; <nl> - TRI_Free ( array - > _memoryZone , found ) ; <nl> - } <nl> + return AddLinkedArray ( array , data , false ) ; <nl> + } <nl> <nl> - / / add element at the beginning <nl> - TRI_AddLinkedList ( & array - > _list , entry ) ; <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief inserts an entry at the beginning of a linked array <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> <nl> - return TRI_ERROR_NO_ERROR ; <nl> + int TRI_AddFrontLinkedArray ( TRI_linked_array_t * array , void const * data ) { <nl> + return AddLinkedArray ( array , data , true ) ; <nl> } <nl> <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> mmm a / lib / BasicsC / linked - list . h <nl> ppp b / lib / BasicsC / linked - list . h <nl> void TRI_FreeLinkedArray ( TRI_memory_zone_t * , TRI_linked_array_t * ) ; <nl> <nl> void TRI_AddLinkedList ( TRI_linked_list_t * , TRI_linked_list_entry_t * ) ; <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief inserts an entry at the beginning of a linked list <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + void TRI_AddFrontLinkedList ( TRI_linked_list_t * , TRI_linked_list_entry_t * ) ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief removes an entry from a linked list <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> void TRI_RemoveLinkedList ( TRI_linked_list_t * , TRI_linked_list_entry_t * ) ; <nl> <nl> int TRI_AddLinkedArray ( TRI_linked_array_t * , void const * data ) ; <nl> <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + / / / @ brief inserts an entry at the beginning of a linked array <nl> + / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> + <nl> + int TRI_AddFrontLinkedArray ( TRI_linked_array_t * , void const * data ) ; <nl> + <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> / / / @ brief removes an entry from a linked array <nl> / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / / <nl> | added functions to add to front | arangodb/arangodb | 7f1cd2681c03e1fda02d76b265bd6c4978abdce5 | 2013-04-10T06:50:49Z |
mmm a / src / mips / lithium - mips . cc <nl> ppp b / src / mips / lithium - mips . cc <nl> LInstruction * LChunkBuilder : : DoChange ( HChange * instr ) { <nl> <nl> LInstruction * LChunkBuilder : : DoCheckHeapObject ( HCheckHeapObject * instr ) { <nl> LOperand * value = UseRegisterAtStart ( instr - > value ( ) ) ; <nl> - return AssignEnvironment ( new ( zone ( ) ) LCheckNonSmi ( value ) ) ; <nl> + LInstruction * result = new ( zone ( ) ) LCheckNonSmi ( value ) ; <nl> + if ( ! instr - > value ( ) - > IsHeapObject ( ) ) result = AssignEnvironment ( result ) ; <nl> + return result ; <nl> } <nl> <nl> <nl> | MIPS : Reland " Fixed environment assignment for LCheckNonSmi . " | v8/v8 | 5b1636f23e59d4d8a5d9386aebe8875f4535b3ec | 2014-04-04T15:17:37Z |
mmm a / src / video_core / renderer_opengl / gl_rasterizer . cpp <nl> ppp b / src / video_core / renderer_opengl / gl_rasterizer . cpp <nl> std : : pair < Surface , Surface > RasterizerOpenGL : : ConfigureFramebuffers ( bool using_c <nl> / / TODO ( bunnei ) : Implement this <nl> const bool has_stencil = false ; <nl> <nl> - const MathUtil : : Rectangle < s32 > viewport_rect { regs . viewport_transform [ 0 ] . GetRect ( ) } ; <nl> - <nl> const bool write_color_fb = <nl> state . color_mask . red_enabled = = GL_TRUE | | state . color_mask . green_enabled = = GL_TRUE | | <nl> state . color_mask . blue_enabled = = GL_TRUE | | state . color_mask . alpha_enabled = = GL_TRUE ; <nl> std : : pair < Surface , Surface > RasterizerOpenGL : : ConfigureFramebuffers ( bool using_c <nl> Surface depth_surface ; <nl> MathUtil : : Rectangle < u32 > surfaces_rect ; <nl> std : : tie ( color_surface , depth_surface , surfaces_rect ) = <nl> - res_cache . GetFramebufferSurfaces ( using_color_fb , using_depth_fb , viewport_rect ) ; <nl> + res_cache . GetFramebufferSurfaces ( using_color_fb , using_depth_fb ) ; <nl> <nl> - MathUtil : : Rectangle < u32 > draw_rect { <nl> + const MathUtil : : Rectangle < s32 > viewport_rect { regs . viewport_transform [ 0 ] . GetRect ( ) } ; <nl> + const MathUtil : : Rectangle < u32 > draw_rect { <nl> static_cast < u32 > ( std : : clamp < s32 > ( static_cast < s32 > ( surfaces_rect . left ) + viewport_rect . left , <nl> surfaces_rect . left , surfaces_rect . right ) ) , / / Left <nl> static_cast < u32 > ( std : : clamp < s32 > ( static_cast < s32 > ( surfaces_rect . bottom ) + viewport_rect . top , <nl> mmm a / src / video_core / renderer_opengl / gl_rasterizer_cache . cpp <nl> ppp b / src / video_core / renderer_opengl / gl_rasterizer_cache . cpp <nl> Surface RasterizerCacheOpenGL : : GetTextureSurface ( const Tegra : : Texture : : FullTextu <nl> return GetSurface ( SurfaceParams : : CreateForTexture ( config ) ) ; <nl> } <nl> <nl> - SurfaceSurfaceRect_Tuple RasterizerCacheOpenGL : : GetFramebufferSurfaces ( <nl> - bool using_color_fb , bool using_depth_fb , const MathUtil : : Rectangle < s32 > & viewport ) { <nl> + SurfaceSurfaceRect_Tuple RasterizerCacheOpenGL : : GetFramebufferSurfaces ( bool using_color_fb , <nl> + bool using_depth_fb ) { <nl> const auto & regs = Core : : System : : GetInstance ( ) . GPU ( ) . Maxwell3D ( ) . regs ; <nl> <nl> / / TODO ( bunnei ) : This is hard corded to use just the first render buffer <nl> mmm a / src / video_core / renderer_opengl / gl_rasterizer_cache . h <nl> ppp b / src / video_core / renderer_opengl / gl_rasterizer_cache . h <nl> class RasterizerCacheOpenGL final : NonCopyable { <nl> Surface GetTextureSurface ( const Tegra : : Texture : : FullTextureInfo & config ) ; <nl> <nl> / / / Get the color and depth surfaces based on the framebuffer configuration <nl> - SurfaceSurfaceRect_Tuple GetFramebufferSurfaces ( bool using_color_fb , bool using_depth_fb , <nl> - const MathUtil : : Rectangle < s32 > & viewport ) ; <nl> + SurfaceSurfaceRect_Tuple GetFramebufferSurfaces ( bool using_color_fb , bool using_depth_fb ) ; <nl> <nl> / / / Flushes the surface to Switch memory <nl> void FlushSurface ( const Surface & surface ) ; <nl> | gl_rasterizer_cache : Remove unused viewport parameter of GetFramebufferSurfaces ( ) | yuzu-emu/yuzu | 0e1510ac2923eee590db38350ae7061c30516586 | 2018-08-10T00:55:41Z |
mmm a / taichi / transforms / demote_dense_struct_fors . cpp <nl> ppp b / taichi / transforms / demote_dense_struct_fors . cpp <nl> VecStatement convert_to_range_for ( StructForStmt * struct_for ) { <nl> auto loop_var = ret . push_back < AllocaStmt > ( DataType : : i32 ) ; <nl> auto lower = ret . push_back < ConstStmt > ( TypedConstant ( 0 ) ) ; <nl> std : : vector < SNode * > snodes ; <nl> - auto snode = struct_for - > snode - > parent ; <nl> + auto snode = struct_for - > snode ; <nl> int total_bits = 0 ; <nl> while ( snode - > type ! = SNodeType : : root ) { <nl> snodes . push_back ( snode ) ; <nl> void demote_dense_struct_fors ( IRNode * root ) { <nl> for ( int i = 0 ; i < ( int ) block_body . size ( ) ; i + + ) { <nl> auto s_ = block_body [ i ] ; <nl> if ( auto s = s_ - > cast < StructForStmt > ( ) ) { <nl> - auto snode = s - > snode - > parent ; <nl> - bool all_dense = snode - > type = = SNodeType : : place ; <nl> + auto snode = s - > snode ; <nl> + TI_P ( snode_type_name ( snode - > type ) ) ; <nl> + bool all_dense = true ; <nl> while ( all_dense & & snode - > type ! = SNodeType : : root ) { <nl> if ( snode - > type ! = SNodeType : : dense ) { <nl> all_dense = false ; <nl> | fixed demote_dense_struct_fors | taichi-dev/taichi | ca2c7b8c75f5add07866e8a6813dd320f6aabf1d | 2020-02-17T04:25:31Z |
mmm a / src / video_core / renderer_opengl / gl_resource_manager . cpp <nl> ppp b / src / video_core / renderer_opengl / gl_resource_manager . cpp <nl> <nl> # include " video_core / renderer_opengl / gl_shader_util . h " <nl> # include " video_core / renderer_opengl / gl_state . h " <nl> <nl> - MICROPROFILE_DEFINE ( OpenGL_ResourceCreation , " OpenGL " , " Resource Creation " , <nl> - MP_RGB ( 128 , 128 , 192 ) ) ; <nl> - MICROPROFILE_DEFINE ( OpenGL_ResourceDeletion , " OpenGL " , " Resource Deletion " , <nl> - MP_RGB ( 128 , 128 , 192 ) ) ; <nl> + MICROPROFILE_DEFINE ( OpenGL_ResourceCreation , " OpenGL " , " Resource Creation " , MP_RGB ( 128 , 128 , 192 ) ) ; <nl> + MICROPROFILE_DEFINE ( OpenGL_ResourceDeletion , " OpenGL " , " Resource Deletion " , MP_RGB ( 128 , 128 , 192 ) ) ; <nl> <nl> namespace OpenGL { <nl> <nl> | gl_resource_manager : Amend clang - format discrepancies | yuzu-emu/yuzu | 29f082775b400c56d7afe45b84ad8821e255f127 | 2018-11-08T05:23:45Z |
mmm a / src / mongo / db / s / collection_metadata . cpp <nl> ppp b / src / mongo / db / s / collection_metadata . cpp <nl> namespace mongo { <nl> CollectionMetadata : : CollectionMetadata ( std : : shared_ptr < ChunkManager > cm , const ShardId & thisShardId ) <nl> : _cm ( std : : move ( cm ) ) , <nl> _thisShardId ( thisShardId ) , <nl> - _shardVersion ( _cm - > getVersion ( _thisShardId ) ) , <nl> - _chunksMap ( SimpleBSONObjComparator : : kInstance . makeBSONObjIndexedMap < BSONObj > ( ) ) , <nl> - _rangesMap ( SimpleBSONObjComparator : : kInstance . makeBSONObjIndexedMap < BSONObj > ( ) ) { <nl> + _shardVersion ( _cm - > getVersion ( thisShardId ) ) , <nl> + _chunksMap ( SimpleBSONObjComparator : : kInstance . makeBSONObjIndexedMap < BSONObj > ( ) ) { <nl> <nl> invariant ( _cm - > getVersion ( ) . isSet ( ) ) ; <nl> invariant ( _cm - > getVersion ( ) > = _shardVersion ) ; <nl> CollectionMetadata : : CollectionMetadata ( std : : shared_ptr < ChunkManager > cm , const S <nl> return ; <nl> } <nl> invariant ( _shardVersion . isSet ( ) ) ; <nl> - <nl> - _buildRangesMap ( ) ; <nl> - } <nl> - <nl> - CollectionMetadata : : ~ CollectionMetadata ( ) = default ; <nl> - <nl> - void CollectionMetadata : : _buildRangesMap ( ) { <nl> - _rangesMap . clear ( ) ; <nl> - <nl> - / / Load the chunk information , coalescing their ranges . The version for this shard would be <nl> - / / the highest version for any of the chunks . <nl> - <nl> - BSONObj min , max ; <nl> - <nl> - for ( const auto & entry : _chunksMap ) { <nl> - BSONObj const & currMin = entry . first ; <nl> - BSONObj const & currMax = entry . second ; <nl> - <nl> - / / Coalesce the chunk ' s bounds in ranges if they are adjacent chunks <nl> - if ( min . isEmpty ( ) ) { <nl> - min = currMin ; <nl> - max = currMax ; <nl> - continue ; <nl> - } <nl> - <nl> - if ( SimpleBSONObjComparator : : kInstance . evaluate ( max = = currMin ) ) { <nl> - max = currMax ; <nl> - continue ; <nl> - } <nl> - <nl> - _rangesMap . emplace_hint ( _rangesMap . end ( ) , min , max ) ; <nl> - <nl> - min = currMin ; <nl> - max = currMax ; <nl> - } <nl> - <nl> - invariant ( ! min . isEmpty ( ) ) ; <nl> - invariant ( ! max . isEmpty ( ) ) ; <nl> - <nl> - _rangesMap . emplace ( min , max ) ; <nl> - } <nl> - <nl> - bool CollectionMetadata : : keyBelongsToMe ( const BSONObj & key ) const { <nl> - if ( _rangesMap . empty ( ) ) { <nl> - return false ; <nl> - } <nl> - <nl> - auto it = _rangesMap . upper_bound ( key ) ; <nl> - if ( it ! = _rangesMap . begin ( ) ) <nl> - it - - ; <nl> - <nl> - return rangeContains ( it - > first , it - > second , key ) ; <nl> } <nl> <nl> bool CollectionMetadata : : getNextChunk ( const BSONObj & lookupKey , ChunkType * chunk ) const { <nl> - RangeMap : : const_iterator upperChunkIt = _chunksMap . upper_bound ( lookupKey ) ; <nl> - RangeMap : : const_iterator lowerChunkIt = upperChunkIt ; <nl> - <nl> - if ( upperChunkIt ! = _chunksMap . begin ( ) ) { <nl> - - - lowerChunkIt ; <nl> - } else { <nl> - lowerChunkIt = _chunksMap . end ( ) ; <nl> - } <nl> - <nl> - if ( lowerChunkIt ! = _chunksMap . end ( ) & & lowerChunkIt - > second . woCompare ( lookupKey ) > 0 ) { <nl> - chunk - > setMin ( lowerChunkIt - > first ) ; <nl> - chunk - > setMax ( lowerChunkIt - > second ) ; <nl> - return true ; <nl> - } <nl> - <nl> - if ( upperChunkIt ! = _chunksMap . end ( ) ) { <nl> - chunk - > setMin ( upperChunkIt - > first ) ; <nl> - chunk - > setMax ( upperChunkIt - > second ) ; <nl> - return true ; <nl> - } <nl> + auto foundIt = _cm - > getNextChunkOnShard ( lookupKey , _thisShardId ) ; <nl> + if ( foundIt . begin ( ) = = foundIt . end ( ) ) <nl> + return false ; <nl> <nl> - return false ; <nl> + const auto & nextChunk = * foundIt . begin ( ) ; <nl> + chunk - > setMin ( nextChunk - > getMin ( ) ) ; <nl> + chunk - > setMax ( nextChunk - > getMax ( ) ) ; <nl> + return true ; <nl> } <nl> <nl> bool CollectionMetadata : : getDifferentChunk ( const BSONObj & chunkMinKey , <nl> ChunkType * differentChunk ) const { <nl> - RangeMap : : const_iterator upperChunkIt = _chunksMap . end ( ) ; <nl> - RangeMap : : const_iterator lowerChunkIt = _chunksMap . begin ( ) ; <nl> - <nl> - while ( lowerChunkIt ! = upperChunkIt ) { <nl> - if ( lowerChunkIt - > first . woCompare ( chunkMinKey ) ! = 0 ) { <nl> - differentChunk - > setMin ( lowerChunkIt - > first ) ; <nl> - differentChunk - > setMax ( lowerChunkIt - > second ) ; <nl> - return true ; <nl> + for ( const auto & found : _cm - > chunks ( ) ) { <nl> + if ( found - > getShardId ( ) = = _thisShardId ) { <nl> + if ( found - > getMin ( ) . woCompare ( chunkMinKey ) ! = 0 ) { <nl> + differentChunk - > setMin ( found - > getMin ( ) ) ; <nl> + differentChunk - > setMax ( found - > getMax ( ) ) ; <nl> + return true ; <nl> + } <nl> } <nl> - + + lowerChunkIt ; <nl> } <nl> - <nl> return false ; <nl> } <nl> <nl> - Status CollectionMetadata : : checkChunkIsValid ( const ChunkType & chunk ) { <nl> + Status CollectionMetadata : : checkChunkIsValid ( const ChunkType & chunk ) const { <nl> ChunkType existingChunk ; <nl> <nl> if ( ! getNextChunk ( chunk . getMin ( ) , & existingChunk ) ) { <nl> Status CollectionMetadata : : checkChunkIsValid ( const ChunkType & chunk ) { <nl> return Status : : OK ( ) ; <nl> } <nl> <nl> - bool CollectionMetadata : : rangeOverlapsChunk ( ChunkRange const & range ) { <nl> - return rangeMapOverlaps ( _rangesMap , range . getMin ( ) , range . getMax ( ) ) ; <nl> - } <nl> - <nl> void CollectionMetadata : : toBSONBasic ( BSONObjBuilder & bb ) const { <nl> _cm - > getVersion ( ) . addToBSON ( bb , " collVersion " ) ; <nl> _shardVersion . addToBSON ( bb , " shardVersion " ) ; <nl> void CollectionMetadata : : toBSONBasic ( BSONObjBuilder & bb ) const { <nl> } <nl> <nl> void CollectionMetadata : : toBSONChunks ( BSONArrayBuilder & bb ) const { <nl> - if ( _chunksMap . empty ( ) ) <nl> - return ; <nl> - <nl> - for ( RangeMap : : const_iterator it = _chunksMap . begin ( ) ; it ! = _chunksMap . end ( ) ; + + it ) { <nl> - BSONArrayBuilder chunkBB ( bb . subarrayStart ( ) ) ; <nl> - chunkBB . append ( it - > first ) ; <nl> - chunkBB . append ( it - > second ) ; <nl> - chunkBB . done ( ) ; <nl> + for ( const auto & chunk : _cm - > chunks ( ) ) { <nl> + if ( chunk - > getShardId ( ) = = _thisShardId ) { <nl> + BSONArrayBuilder chunkBB ( bb . subarrayStart ( ) ) ; <nl> + chunkBB . append ( chunk - > getMin ( ) ) ; <nl> + chunkBB . append ( chunk - > getMax ( ) ) ; <nl> + chunkBB . done ( ) ; <nl> + } <nl> } <nl> } <nl> <nl> mmm a / src / mongo / db / s / collection_metadata . h <nl> ppp b / src / mongo / db / s / collection_metadata . h <nl> class CollectionMetadata { <nl> * " does this key belong to this shard " ? <nl> * / <nl> CollectionMetadata ( std : : shared_ptr < ChunkManager > cm , const ShardId & thisShardId ) ; <nl> - ~ CollectionMetadata ( ) ; <nl> <nl> / * * <nl> * Returns true if ' key ' contains exactly the same fields as the shard key pattern . <nl> class CollectionMetadata { <nl> * Returns true if the document with the given key belongs to this chunkset . If the key is empty <nl> * returns false . If key is not a valid shard key , the behaviour is undefined . <nl> * / <nl> - bool keyBelongsToMe ( const BSONObj & key ) const ; <nl> + bool keyBelongsToMe ( const BSONObj & key ) const { <nl> + return _cm - > keyBelongsToShard ( key , _thisShardId ) ; <nl> + } <nl> <nl> / * * <nl> * Given a key ' lookupKey ' in the shard key range , get the next chunk which overlaps or is <nl> class CollectionMetadata { <nl> / * * <nl> * Validates that the passed - in chunk ' s bounds exactly match a chunk in the metadata cache . <nl> * / <nl> - Status checkChunkIsValid ( const ChunkType & chunk ) ; <nl> + Status checkChunkIsValid ( const ChunkType & chunk ) const ; <nl> <nl> / * * <nl> * Returns true if the argument range overlaps any chunk . <nl> * / <nl> - bool rangeOverlapsChunk ( ChunkRange const & range ) ; <nl> + bool rangeOverlapsChunk ( ChunkRange const & range ) const { <nl> + return _cm - > rangeOverlapsShard ( range , _thisShardId ) ; <nl> + } <nl> <nl> / * * <nl> * Given a key in the shard key range , get the next range which overlaps or is greater than <nl> class CollectionMetadata { <nl> } <nl> <nl> private : <nl> - / * * <nl> - * Builds _rangesMap from the contents of _chunksMap . <nl> - * / <nl> - void _buildRangesMap ( ) ; <nl> - <nl> / / The full routing table for the collection . <nl> std : : shared_ptr < ChunkManager > _cm ; <nl> <nl> class CollectionMetadata { <nl> <nl> / / Map of chunks tracked by this shard <nl> RangeMap _chunksMap ; <nl> - <nl> - / / A second map from a min key into a range of contiguous chunks . This map is redundant with <nl> - / / respect to the contents of _chunkMap but we expect high chunk contiguity , especially in small <nl> - / / clusters . <nl> - RangeMap _rangesMap ; <nl> } ; <nl> <nl> } / / namespace mongo <nl> mmm a / src / mongo / s / chunk_manager . cpp <nl> ppp b / src / mongo / s / chunk_manager . cpp <nl> <nl> <nl> # include " mongo / s / chunk_manager . h " <nl> <nl> - # include < vector > <nl> - <nl> # include " mongo / base / owned_pointer_vector . h " <nl> # include " mongo / bson / simple_bsonobj_comparator . h " <nl> # include " mongo / db / matcher / extensions_callback_noop . h " <nl> <nl> # include " mongo / db / query / index_bounds_builder . h " <nl> # include " mongo / db / query / query_planner . h " <nl> # include " mongo / db / query / query_planner_common . h " <nl> + # include " mongo / db / storage / key_string . h " <nl> # include " mongo / util / log . h " <nl> <nl> namespace mongo { <nl> void checkAllElementsAreOfType ( BSONType type , const BSONObj & o ) { <nl> } <nl> } <nl> <nl> + std : : string extractKeyStringInternal ( const BSONObj & shardKeyValue , Ordering ordering ) { <nl> + BSONObjBuilder strippedKeyValue ; <nl> + for ( const auto & elem : shardKeyValue ) { <nl> + strippedKeyValue . appendAs ( elem , " " _sd ) ; <nl> + } <nl> + <nl> + KeyString ks ( KeyString : : Version : : V1 , strippedKeyValue . done ( ) , ordering ) ; <nl> + return { ks . getBuffer ( ) , ks . getSize ( ) } ; <nl> + } <nl> + <nl> } / / namespace <nl> <nl> ChunkManager : : ChunkManager ( NamespaceString nss , <nl> ChunkManager : : ChunkManager ( NamespaceString nss , <nl> _nss ( std : : move ( nss ) ) , <nl> _uuid ( uuid ) , <nl> _shardKeyPattern ( shardKeyPattern ) , <nl> + _shardKeyOrdering ( Ordering : : make ( _shardKeyPattern . toBSON ( ) ) ) , <nl> _defaultCollator ( std : : move ( defaultCollator ) ) , <nl> _unique ( unique ) , <nl> _chunkMap ( std : : move ( chunkMap ) ) , <nl> - _chunkMapViews ( _constructChunkMapViews ( collectionVersion . epoch ( ) , _chunkMap ) ) , <nl> + _chunkMapViews ( <nl> + _constructChunkMapViews ( collectionVersion . epoch ( ) , _chunkMap , _shardKeyOrdering ) ) , <nl> _collectionVersion ( collectionVersion ) { } <nl> <nl> std : : shared_ptr < Chunk > ChunkManager : : findIntersectingChunk ( const BSONObj & shardKey , <nl> std : : shared_ptr < Chunk > ChunkManager : : findIntersectingChunk ( const BSONObj & shardK <nl> } <nl> } <nl> <nl> - const auto it = _chunkMap . upper_bound ( shardKey ) ; <nl> + const auto it = _chunkMap . upper_bound ( _extractKeyString ( shardKey ) ) ; <nl> uassert ( ErrorCodes : : ShardKeyNotFound , <nl> str : : stream ( ) < < " Cannot target single shard using key " < < shardKey , <nl> it ! = _chunkMap . end ( ) & & it - > second - > containsKey ( shardKey ) ) ; <nl> std : : shared_ptr < Chunk > ChunkManager : : findIntersectingChunkWithSimpleCollation ( <nl> return findIntersectingChunk ( shardKey , CollationSpec : : kSimpleSpec ) ; <nl> } <nl> <nl> + bool ChunkManager : : keyBelongsToShard ( const BSONObj & shardKey , const ShardId & shardId ) const { <nl> + if ( shardKey . isEmpty ( ) ) <nl> + return false ; <nl> + <nl> + const auto it = _rangeMapUpperBound ( shardKey ) ; <nl> + if ( it = = _chunkMapViews . chunkRangeMap . end ( ) ) <nl> + return false ; <nl> + <nl> + return it - > shardId = = shardId ; <nl> + } <nl> + <nl> void ChunkManager : : getShardIdsForQuery ( OperationContext * opCtx , <nl> const BSONObj & query , <nl> const BSONObj & collation , <nl> void ChunkManager : : getShardIdsForQuery ( OperationContext * opCtx , <nl> / / For now , we satisfy that assumption by adding a shard with no matches rather than returning <nl> / / an empty set of shards . <nl> if ( shardIds - > empty ( ) ) { <nl> - shardIds - > insert ( _chunkMapViews . chunkRangeMap . begin ( ) - > second . shardId ) ; <nl> + shardIds - > insert ( _chunkMapViews . chunkRangeMap . begin ( ) - > shardId ) ; <nl> } <nl> } <nl> <nl> void ChunkManager : : getShardIdsForRange ( const BSONObj & min , <nl> const BSONObj & max , <nl> std : : set < ShardId > * shardIds ) const { <nl> - auto it = _chunkMapViews . chunkRangeMap . upper_bound ( min ) ; <nl> - auto end = _chunkMapViews . chunkRangeMap . upper_bound ( max ) ; <nl> - <nl> - / / The chunk range map must always cover the entire key space <nl> - invariant ( it ! = _chunkMapViews . chunkRangeMap . end ( ) ) ; <nl> - <nl> - / / We need to include the last chunk <nl> - if ( end ! = _chunkMapViews . chunkRangeMap . end ( ) ) { <nl> - + + end ; <nl> - } <nl> - <nl> - for ( ; it ! = end ; + + it ) { <nl> - shardIds - > insert ( it - > second . shardId ) ; <nl> + const auto bounds = _overlappingRanges ( min , max , true ) ; <nl> + for ( auto it = bounds . first ; it ! = bounds . second ; + + it ) { <nl> + shardIds - > insert ( it - > shardId ) ; <nl> <nl> / / No need to iterate through the rest of the ranges , because we already know we need to use <nl> / / all shards . <nl> void ChunkManager : : getShardIdsForRange ( const BSONObj & min , <nl> } <nl> } <nl> <nl> + bool ChunkManager : : rangeOverlapsShard ( const ChunkRange & range , const ShardId & shardId ) const { <nl> + const auto bounds = _overlappingRanges ( range . getMin ( ) , range . getMax ( ) , false ) ; <nl> + const auto it = std : : find_if ( bounds . first , bounds . second , [ & shardId ] ( const auto & scr ) { <nl> + return scr . shardId = = shardId ; <nl> + } ) ; <nl> + return it ! = bounds . second ; <nl> + } <nl> + <nl> + ChunkManager : : ConstRangeOfChunks ChunkManager : : getNextChunkOnShard ( const BSONObj & shardKey , <nl> + const ShardId & shardId ) const { <nl> + for ( auto it = _chunkMap . upper_bound ( _extractKeyString ( shardKey ) ) ; it ! = _chunkMap . end ( ) ; <nl> + + + it ) { <nl> + const auto & chunk = it - > second ; <nl> + if ( chunk - > getShardId ( ) = = shardId ) { <nl> + const auto begin = it ; <nl> + const auto end = + + it ; <nl> + return { ConstChunkIterator ( begin ) , ConstChunkIterator ( end ) } ; <nl> + } <nl> + } <nl> + <nl> + return { ConstChunkIterator ( ) , ConstChunkIterator ( ) } ; <nl> + } <nl> + <nl> void ChunkManager : : getAllShardIds ( std : : set < ShardId > * all ) const { <nl> std : : transform ( _chunkMapViews . shardVersions . begin ( ) , <nl> _chunkMapViews . shardVersions . end ( ) , <nl> std : : string ChunkManager : : toString ( ) const { <nl> sb < < " ChunkManager : " < < _nss . ns ( ) < < " key : " < < _shardKeyPattern . toString ( ) < < ' \ n ' ; <nl> <nl> sb < < " Chunks : \ n " ; <nl> - for ( const auto & entry : _chunkMap ) { <nl> - sb < < " \ t " < < entry . first < < " : " < < entry . second - > toString ( ) < < ' \ n ' ; <nl> + for ( const auto & chunk : chunks ( ) ) { <nl> + sb < < " \ t " < < chunk - > toString ( ) < < ' \ n ' ; <nl> } <nl> <nl> sb < < " Ranges : \ n " ; <nl> for ( const auto & entry : _chunkMapViews . chunkRangeMap ) { <nl> - sb < < " \ t " < < entry . first < < " : " < < entry . second . range . toString ( ) < < " @ " <nl> - < < entry . second . shardId < < ' \ n ' ; <nl> + sb < < " \ t " < < entry . range . toString ( ) < < " @ " < < entry . shardId < < ' \ n ' ; <nl> } <nl> <nl> sb < < " Shard versions : \ n " ; <nl> std : : string ChunkManager : : toString ( ) const { <nl> } <nl> <nl> ChunkManager : : ChunkMapViews ChunkManager : : _constructChunkMapViews ( const OID & epoch , <nl> - const ChunkMap & chunkMap ) { <nl> - <nl> - ChunkRangeMap chunkRangeMap = <nl> - SimpleBSONObjComparator : : kInstance . makeBSONObjIndexedMap < ShardAndChunkRange > ( ) ; <nl> - <nl> + const ChunkMap & chunkMap , <nl> + Ordering shardKeyOrdering ) { <nl> + ChunkRangeMap chunkRangeMap ; <nl> ShardVersionMap shardVersions ; <nl> - <nl> ChunkMap : : const_iterator current = chunkMap . cbegin ( ) ; <nl> <nl> while ( current ! = chunkMap . cend ( ) ) { <nl> ChunkManager : : ChunkMapViews ChunkManager : : _constructChunkMapViews ( const OID & epo <nl> const BSONObj rangeMin = firstChunkInRange - > getMin ( ) ; <nl> const BSONObj rangeMax = rangeLast - > second - > getMax ( ) ; <nl> <nl> - const auto oldSize = chunkRangeMap . size ( ) ; <nl> - const auto insertIterator = chunkRangeMap . insert ( <nl> - chunkRangeMap . end ( ) , <nl> - std : : make_pair ( <nl> - rangeMax , <nl> - ShardAndChunkRange { { rangeMin , rangeMax } , firstChunkInRange - > getShardId ( ) } ) ) ; <nl> - uassert ( ErrorCodes : : ConflictingOperationInProgress , <nl> - str : : stream ( ) < < " Metadata contains two chunks with the same max value " <nl> - < < rangeMax , <nl> - oldSize + 1 = = chunkRangeMap . size ( ) ) ; <nl> - <nl> - <nl> - if ( insertIterator ! = chunkRangeMap . begin ( ) ) { <nl> + if ( ! chunkRangeMap . empty ( ) ) { <nl> + uassert ( <nl> + ErrorCodes : : ConflictingOperationInProgress , <nl> + str : : stream ( ) <nl> + < < " Metadata contains chunks with the same or out - of - order max value ; " <nl> + " expected " <nl> + < < chunkRangeMap . back ( ) . max ( ) <nl> + < < " < " <nl> + < < rangeMax , <nl> + SimpleBSONObjComparator : : kInstance . evaluate ( chunkRangeMap . back ( ) . max ( ) < rangeMax ) ) ; <nl> / / Make sure there are no gaps in the ranges <nl> uassert ( ErrorCodes : : ConflictingOperationInProgress , <nl> str : : stream ( ) < < " Gap or an overlap between ranges " <nl> - < < insertIterator - > second . range . toString ( ) <nl> + < < ChunkRange ( rangeMin , rangeMax ) . toString ( ) <nl> < < " and " <nl> - < < std : : prev ( insertIterator ) - > second . range . toString ( ) , <nl> - SimpleBSONObjComparator : : kInstance . evaluate ( std : : prev ( insertIterator ) - > first = = <nl> + < < chunkRangeMap . back ( ) . range . toString ( ) , <nl> + SimpleBSONObjComparator : : kInstance . evaluate ( chunkRangeMap . back ( ) . max ( ) = = <nl> rangeMin ) ) ; <nl> } <nl> <nl> + chunkRangeMap . emplace_back ( <nl> + ShardAndChunkRange { { rangeMin , rangeMax } , <nl> + firstChunkInRange - > getShardId ( ) , <nl> + extractKeyStringInternal ( rangeMax , shardKeyOrdering ) } ) ; <nl> + <nl> / / If a shard has chunks it must have a shard version , otherwise we have an invalid chunk <nl> / / somewhere , which should have been caught at chunk load time <nl> invariant ( maxShardVersion . isSet ( ) ) ; <nl> ChunkManager : : ChunkMapViews ChunkManager : : _constructChunkMapViews ( const OID & epo <nl> invariant ( ! chunkRangeMap . empty ( ) ) ; <nl> invariant ( ! shardVersions . empty ( ) ) ; <nl> <nl> - checkAllElementsAreOfType ( MinKey , chunkRangeMap . begin ( ) - > second . min ( ) ) ; <nl> - checkAllElementsAreOfType ( MaxKey , chunkRangeMap . rbegin ( ) - > first ) ; <nl> + checkAllElementsAreOfType ( MinKey , chunkRangeMap . front ( ) . min ( ) ) ; <nl> + checkAllElementsAreOfType ( MaxKey , chunkRangeMap . back ( ) . max ( ) ) ; <nl> + <nl> + DEV for ( size_t i = 0 ; i < chunkRangeMap . size ( ) - 1 ; + + i ) { <nl> + const auto & c1 = chunkRangeMap [ i ] ; <nl> + const auto & c2 = chunkRangeMap [ i + 1 ] ; <nl> + <nl> + invariant ( SimpleBSONObjComparator : : kInstance . evaluate ( c1 . max ( ) = = c2 . min ( ) ) , <nl> + str : : stream ( ) < < " Found gap between " < < c1 . range . toString ( ) < < " and " <nl> + < < c2 . range . toString ( ) ) ; <nl> + } <nl> } <nl> <nl> return { std : : move ( chunkRangeMap ) , std : : move ( shardVersions ) } ; <nl> } <nl> <nl> + std : : string ChunkManager : : _extractKeyString ( const BSONObj & shardKeyValue ) const { <nl> + return extractKeyStringInternal ( shardKeyValue , _shardKeyOrdering ) ; <nl> + } <nl> + <nl> + ChunkManager : : ChunkRangeMap : : const_iterator ChunkManager : : _rangeMapUpperBound ( <nl> + const BSONObj & key ) const { <nl> + <nl> + / / This class is necessary , because the last argument to std : : upper_bound is a functor which <nl> + / / implements the BinaryPredicate concept . A binary predicate pred must be able to evaluate both <nl> + / / pred ( * iter1 , * iter2 ) and pred ( * iter1 , value ) . The type of " value " in this case is <nl> + / / std : : string , while the type of * Iter is ShardAndChunkRange . <nl> + struct Key { <nl> + static const std : : string & extract ( const std : : string & k ) { <nl> + return k ; <nl> + } <nl> + static void extract ( std : : string & & k ) = delete ; <nl> + static const std : : string & extract ( const ShardAndChunkRange & scr ) { <nl> + return scr . ksMax ; <nl> + } <nl> + static const std : : string & extract ( ShardAndChunkRange & & ) = delete ; <nl> + } ; <nl> + <nl> + return std : : upper_bound ( _chunkMapViews . chunkRangeMap . cbegin ( ) , <nl> + _chunkMapViews . chunkRangeMap . cend ( ) , <nl> + _extractKeyString ( key ) , <nl> + [ ] ( const auto & lhs , const auto & rhs ) - > bool { <nl> + return Key : : extract ( lhs ) < Key : : extract ( rhs ) ; <nl> + } ) ; <nl> + } <nl> + <nl> + std : : pair < ChunkManager : : ChunkRangeMap : : const_iterator , ChunkManager : : ChunkRangeMap : : const_iterator > <nl> + ChunkManager : : _overlappingRanges ( const mongo : : BSONObj & min , <nl> + const mongo : : BSONObj & max , <nl> + bool isMaxInclusive ) const { <nl> + dassert ( SimpleBSONObjComparator : : kInstance . evaluate ( min < = max ) ) ; <nl> + const auto begin = _rangeMapUpperBound ( min ) ; <nl> + auto end = _rangeMapUpperBound ( max ) ; <nl> + <nl> + / / The chunk range map must always cover the entire key space <nl> + invariant ( begin ! = _chunkMapViews . chunkRangeMap . cend ( ) ) ; <nl> + <nl> + / / Bump the end chunk , because the second iterator in the returned pair is exclusive . There is <nl> + / / one caveat - if the exclusive max boundary of the range looked up is the same as the <nl> + / / inclusive min of the end chunk returned , it is still possible that the min is not in the end <nl> + / / chunk , in which case bumping the end will result in one extra chunk claimed to cover the <nl> + / / range . <nl> + if ( end ! = _chunkMapViews . chunkRangeMap . cend ( ) & & <nl> + ( isMaxInclusive | | SimpleBSONObjComparator : : kInstance . evaluate ( max > end - > min ( ) ) ) ) { <nl> + + + end ; <nl> + } <nl> + <nl> + return { begin , end } ; <nl> + } <nl> + <nl> std : : shared_ptr < ChunkManager > ChunkManager : : makeNew ( <nl> NamespaceString nss , <nl> boost : : optional < UUID > uuid , <nl> std : : shared_ptr < ChunkManager > ChunkManager : : makeNew ( <nl> bool unique , <nl> OID epoch , <nl> const std : : vector < ChunkType > & chunks ) { <nl> - <nl> - return ChunkManager ( <nl> - std : : move ( nss ) , <nl> - uuid , <nl> - std : : move ( shardKeyPattern ) , <nl> - std : : move ( defaultCollator ) , <nl> - std : : move ( unique ) , <nl> - SimpleBSONObjComparator : : kInstance . makeBSONObjIndexedMap < std : : shared_ptr < Chunk > > ( ) , <nl> - { 0 , 0 , epoch } ) <nl> + return ChunkManager ( std : : move ( nss ) , <nl> + std : : move ( uuid ) , <nl> + std : : move ( shardKeyPattern ) , <nl> + std : : move ( defaultCollator ) , <nl> + std : : move ( unique ) , <nl> + { } , <nl> + { 0 , 0 , epoch } ) <nl> . makeUpdated ( chunks ) ; <nl> } <nl> <nl> std : : shared_ptr < ChunkManager > ChunkManager : : makeUpdated ( <nl> const std : : vector < ChunkType > & changedChunks ) { <nl> + <nl> const auto startingCollectionVersion = getVersion ( ) ; <nl> auto chunkMap = _chunkMap ; <nl> <nl> std : : shared_ptr < ChunkManager > ChunkManager : : makeUpdated ( <nl> invariant ( chunkVersion > = collectionVersion ) ; <nl> collectionVersion = chunkVersion ; <nl> <nl> + const auto chunkMinKeyString = _extractKeyString ( chunk . getMin ( ) ) ; <nl> + const auto chunkMaxKeyString = _extractKeyString ( chunk . getMax ( ) ) ; <nl> + <nl> / / Returns the first chunk with a max key that is > min - implies that the chunk overlaps <nl> / / min <nl> - const auto low = chunkMap . upper_bound ( chunk . getMin ( ) ) ; <nl> + const auto low = chunkMap . upper_bound ( chunkMinKeyString ) ; <nl> <nl> / / Returns the first chunk with a max key that is > max - implies that the next chunk cannot <nl> / / not overlap max <nl> - const auto high = chunkMap . upper_bound ( chunk . getMax ( ) ) ; <nl> + const auto high = chunkMap . upper_bound ( chunkMaxKeyString ) ; <nl> <nl> / / Erase all chunks from the map , which overlap the chunk we got from the persistent store <nl> chunkMap . erase ( low , high ) ; <nl> <nl> / / Insert only the chunk itself <nl> - chunkMap . insert ( std : : make_pair ( chunk . getMax ( ) , std : : make_shared < Chunk > ( chunk ) ) ) ; <nl> + chunkMap . insert ( std : : make_pair ( chunkMaxKeyString , std : : make_shared < Chunk > ( chunk ) ) ) ; <nl> } <nl> <nl> / / If at least one diff was applied , the metadata is correct , but it might not have changed so <nl> std : : shared_ptr < ChunkManager > ChunkManager : : makeUpdated ( <nl> std : : move ( chunkMap ) , <nl> collectionVersion ) ) ; <nl> } <nl> + <nl> } / / namespace mongo <nl> mmm a / src / mongo / s / chunk_manager . h <nl> ppp b / src / mongo / s / chunk_manager . h <nl> <nl> # include < map > <nl> # include < set > <nl> # include < string > <nl> + # include < vector > <nl> <nl> # include " mongo / base / disallow_copying . h " <nl> # include " mongo / db / namespace_string . h " <nl> struct QuerySolutionNode ; <nl> class OperationContext ; <nl> <nl> / / Ordered map from the max for each chunk to an entry describing the chunk <nl> - using ChunkMap = BSONObjIndexedMap < std : : shared_ptr < Chunk > > ; <nl> + using ChunkMap = std : : map < std : : string , std : : shared_ptr < Chunk > > ; <nl> <nl> / / Map from a shard is to the max chunk version on that shard <nl> using ShardVersionMap = std : : map < ShardId , ChunkVersion > ; <nl> class ChunkManager : public std : : enable_shared_from_this < ChunkManager > { <nl> return _chunkMap . size ( ) ; <nl> } <nl> <nl> + / * * <nl> + * Returns true if a document with the given " shardKey " is owned by the shard with the given <nl> + * " shardId " in this routing table . If " shardKey " is empty returns false . If " shardKey " is not a <nl> + * valid shard key , the behaviour is undefined . <nl> + * / <nl> + bool keyBelongsToShard ( const BSONObj & shardKey , const ShardId & shardId ) const ; <nl> + <nl> + / * * <nl> + * Returns true if any chunk owned by the shard with the given " shardId " overlaps " range " . <nl> + * / <nl> + bool rangeOverlapsShard ( const ChunkRange & range , const ShardId & shardId ) const ; <nl> + <nl> + / * * <nl> + * Given a shardKey , returns the first chunk which is owned by shardId and overlaps or sorts <nl> + * after that shardKey . The returned iterator range always contains one or zero entries . If zero <nl> + * entries are returned , this means no such chunk exists . <nl> + * / <nl> + ConstRangeOfChunks getNextChunkOnShard ( const BSONObj & shardKey , const ShardId & shardId ) const ; <nl> + <nl> / * * <nl> * Given a shard key ( or a prefix ) that has been extracted from a document , returns the chunk <nl> * that contains that key . <nl> class ChunkManager : public std : : enable_shared_from_this < ChunkManager > { <nl> <nl> ChunkRange range ; <nl> ShardId shardId ; <nl> + std : : string ksMax ; <nl> } ; <nl> <nl> - using ChunkRangeMap = BSONObjIndexedMap < ShardAndChunkRange > ; <nl> + using ChunkRangeMap = std : : vector < ShardAndChunkRange > ; <nl> <nl> / * * <nl> * Contains different transformations of the chunk map for efficient querying <nl> class ChunkManager : public std : : enable_shared_from_this < ChunkManager > { <nl> / * * <nl> * Does a single pass over the chunkMap and constructs the ChunkMapViews object . <nl> * / <nl> - static ChunkMapViews _constructChunkMapViews ( const OID & epoch , const ChunkMap & chunkMap ) ; <nl> + static ChunkMapViews _constructChunkMapViews ( const OID & epoch , <nl> + const ChunkMap & chunkMap , <nl> + Ordering shardKeyOrdering ) ; <nl> <nl> ChunkManager ( NamespaceString nss , <nl> - boost : : optional < UUID > , <nl> + boost : : optional < UUID > uuid , <nl> KeyPattern shardKeyPattern , <nl> std : : unique_ptr < CollatorInterface > defaultCollator , <nl> bool unique , <nl> ChunkMap chunkMap , <nl> ChunkVersion collectionVersion ) ; <nl> <nl> + std : : string _extractKeyString ( const BSONObj & shardKeyValue ) const ; <nl> + <nl> + ChunkRangeMap : : const_iterator _rangeMapUpperBound ( const BSONObj & key ) const ; <nl> + <nl> + std : : pair < ChunkRangeMap : : const_iterator , ChunkRangeMap : : const_iterator > _overlappingRanges ( <nl> + const BSONObj & min , const BSONObj & max , bool isMaxInclusive ) const ; <nl> + <nl> / / The shard versioning mechanism hinges on keeping track of the number of times we reload <nl> / / ChunkManagers . <nl> const unsigned long long _sequenceNumber ; <nl> class ChunkManager : public std : : enable_shared_from_this < ChunkManager > { <nl> / / The key pattern used to shard the collection <nl> const ShardKeyPattern _shardKeyPattern ; <nl> <nl> + const Ordering _shardKeyOrdering ; <nl> + <nl> / / Default collation to use for routing data queries for this collection <nl> const std : : unique_ptr < CollatorInterface > _defaultCollator ; <nl> <nl> | SERVER - 32526 Use KeyString for ChunkMap key instead of BSONObj | mongodb/mongo | 9b3ff62c2776039725342f0f22a843e6b375f1be | 2018-01-13T14:02:11Z |
mmm a / main / main . cpp <nl> ppp b / main / main . cpp <nl> bool Main : : iteration ( ) { <nl> return exit ; <nl> } <nl> <nl> - if ( OS : : get_singleton ( ) - > is_in_low_processor_usage_mode ( ) | | ! DisplayServer : : get_singleton ( ) - > can_any_window_draw ( ) ) { <nl> - OS : : get_singleton ( ) - > delay_usec ( OS : : get_singleton ( ) - > get_low_processor_usage_mode_sleep_usec ( ) ) ; / / apply some delay to force idle time <nl> - } else { <nl> - uint32_t frame_delay = Engine : : get_singleton ( ) - > get_frame_delay ( ) ; <nl> - if ( frame_delay ) { <nl> - OS : : get_singleton ( ) - > delay_usec ( Engine : : get_singleton ( ) - > get_frame_delay ( ) * 1000 ) ; <nl> - } <nl> + const uint32_t frame_delay = Engine : : get_singleton ( ) - > get_frame_delay ( ) ; <nl> + if ( frame_delay ) { <nl> + / / Add fixed frame delay to decrease CPU / GPU usage . This doesn ' t take <nl> + / / the actual frame time into account . <nl> + / / Due to the high fluctuation of the actual sleep duration , it ' s not recommended <nl> + / / to use this as a FPS limiter . <nl> + OS : : get_singleton ( ) - > delay_usec ( frame_delay * 1000 ) ; <nl> + } <nl> + <nl> + / / Add a dynamic frame delay to decrease CPU / GPU usage . This takes the <nl> + / / previous frame time into account for a smoother result . <nl> + uint64_t dynamic_delay = 0 ; <nl> + if ( OS : : get_singleton ( ) - > is_in_low_processor_usage_mode ( ) | | ! DisplayServer : : get_singleton ( ) - > window_can_draw ( ) ) { <nl> + dynamic_delay = OS : : get_singleton ( ) - > get_low_processor_usage_mode_sleep_usec ( ) ; <nl> + } <nl> + const int target_fps = Engine : : get_singleton ( ) - > get_target_fps ( ) ; <nl> + if ( target_fps > 0 & & ! Engine : : get_singleton ( ) - > is_editor_hint ( ) ) { <nl> + / / Override the low processor usage mode sleep delay if the target FPS is lower . <nl> + dynamic_delay = MAX ( dynamic_delay , ( uint64_t ) ( 1000000 / target_fps ) ) ; <nl> } <nl> <nl> - int target_fps = Engine : : get_singleton ( ) - > get_target_fps ( ) ; <nl> - if ( target_fps > 0 & & ! Engine : : get_singleton ( ) - > is_editor_hint ( ) ) { <nl> - uint64_t time_step = 1000000L / target_fps ; <nl> - target_ticks + = time_step ; <nl> + if ( dynamic_delay > 0 ) { <nl> + target_ticks + = dynamic_delay ; <nl> uint64_t current_ticks = OS : : get_singleton ( ) - > get_ticks_usec ( ) ; <nl> + <nl> if ( current_ticks < target_ticks ) { <nl> OS : : get_singleton ( ) - > delay_usec ( target_ticks - current_ticks ) ; <nl> } <nl> + <nl> current_ticks = OS : : get_singleton ( ) - > get_ticks_usec ( ) ; <nl> - target_ticks = MIN ( MAX ( target_ticks , current_ticks - time_step ) , current_ticks + time_step ) ; <nl> + target_ticks = MIN ( MAX ( target_ticks , current_ticks - dynamic_delay ) , current_ticks + dynamic_delay ) ; <nl> } <nl> <nl> # ifdef TOOLS_ENABLED <nl> | Improve the low processor mode sleep precision | godotengine/godot | 1c28b269d8e691c9da2605c712120864ee1b467f | 2020-06-15T17:30:39Z |
mmm a / cocos / renderer / CCPrimitiveCommand . cpp <nl> ppp b / cocos / renderer / CCPrimitiveCommand . cpp <nl> void PrimitiveCommand : : execute ( ) const <nl> _glProgramState - > apply ( _mv ) ; <nl> <nl> _primitive - > draw ( ) ; <nl> + CC_INCREMENT_GL_DRAWN_BATCHES_AND_VERTICES ( 1 , _primitive - > getCount ( ) ) ; <nl> } <nl> <nl> NS_CC_END <nl> | add draw calls and draw verts for PrimitiveCommand | cocos2d/cocos2d-x | f702b4e9b38d71f7823ef63172c0fc149bad5485 | 2014-09-16T04:04:35Z |
mmm a / docs / en / operations / settings / settings . md <nl> ppp b / docs / en / operations / settings / settings . md <nl> ClickHouse supports the following algorithms of choosing replicas : <nl> - [ Nearest hostname ] ( # load_balancing - nearest_hostname ) <nl> - [ In order ] ( # load_balancing - in_order ) <nl> - [ First or random ] ( # load_balancing - first_or_random ) <nl> + - [ Round robin ] ( # load_balancing - round_robin ) <nl> <nl> # # # Random ( by Default ) { # load_balancing - random } <nl> <nl> This algorithm chooses the first replica in the set or a random replica if the f <nl> <nl> The ` first_or_random ` algorithm solves the problem of the ` in_order ` algorithm . With ` in_order ` , if one replica goes down , the next one gets a double load while the remaining replicas handle the usual amount of traffic . When using the ` first_or_random ` algorithm , the load is evenly distributed among replicas that are still available . <nl> <nl> + # # # Round robin { # load_balancing - round_robin } <nl> + <nl> + ` ` ` sql <nl> + load_balancing = round_robin <nl> + ` ` ` <nl> + <nl> + This algorithm uses round robin policy across replicas with the same number of errors ( only the queries with ` round_robin ` policy is accounted ) . <nl> + <nl> # # prefer \ _localhost \ _replica { # settings - prefer - localhost - replica } <nl> <nl> Enables / disables preferable using the localhost replica when processing distributed queries . <nl> mmm a / src / Client / ConnectionPoolWithFailover . cpp <nl> ppp b / src / Client / ConnectionPoolWithFailover . cpp <nl> <nl> <nl> # include < IO / ConnectionTimeouts . h > <nl> <nl> - <nl> namespace ProfileEvents <nl> { <nl> extern const Event DistributedConnectionMissingTable ; <nl> IConnectionPool : : Entry ConnectionPoolWithFailover : : get ( const ConnectionTimeouts <nl> case LoadBalancing : : FIRST_OR_RANDOM : <nl> get_priority = [ ] ( size_t i ) - > size_t { return i > = 1 ; } ; <nl> break ; <nl> + case LoadBalancing : : ROUND_ROBIN : <nl> + if ( last_used > = nested_pools . size ( ) ) <nl> + last_used = 0 ; <nl> + + + last_used ; <nl> + / * Consider nested_pools . size ( ) equals to 5 <nl> + * last_used = 1 - > get_priority : 0 1 2 3 4 <nl> + * last_used = 2 - > get_priority : 5 0 1 2 3 <nl> + * last_used = 3 - > get_priority : 5 4 0 1 2 <nl> + * . . . <nl> + * * / <nl> + get_priority = [ & ] ( size_t i ) { + + i ; return i < last_used ? nested_pools . size ( ) - i : i - last_used ; } ; <nl> + break ; <nl> } <nl> <nl> return Base : : get ( try_get_entry , get_priority ) ; <nl> std : : vector < ConnectionPoolWithFailover : : TryResult > ConnectionPoolWithFailover : : g <nl> case LoadBalancing : : FIRST_OR_RANDOM : <nl> get_priority = [ ] ( size_t i ) - > size_t { return i > = 1 ; } ; <nl> break ; <nl> + case LoadBalancing : : ROUND_ROBIN : <nl> + if ( last_used > = nested_pools . size ( ) ) <nl> + last_used = 0 ; <nl> + + + last_used ; <nl> + / * Consider nested_pools . size ( ) equals to 5 <nl> + * last_used = 1 - > get_priority : 0 1 2 3 4 <nl> + * last_used = 2 - > get_priority : 5 0 1 2 3 <nl> + * last_used = 3 - > get_priority : 5 4 0 1 2 <nl> + * . . . <nl> + * * / <nl> + get_priority = [ & ] ( size_t i ) { + + i ; return i < last_used ? nested_pools . size ( ) - i : i - last_used ; } ; <nl> + break ; <nl> } <nl> <nl> bool fallback_to_stale_replicas = settings ? bool ( settings - > fallback_to_stale_replicas_for_distributed_queries ) : true ; <nl> mmm a / src / Client / ConnectionPoolWithFailover . h <nl> ppp b / src / Client / ConnectionPoolWithFailover . h <nl> class ConnectionPoolWithFailover : public IConnectionPool , private PoolWithFailo <nl> <nl> private : <nl> std : : vector < size_t > hostname_differences ; / / / Distances from name of this host to the names of hosts of pools . <nl> + size_t last_used = 0 ; / / / Last used for round_robin policy . <nl> LoadBalancing default_load_balancing ; <nl> } ; <nl> <nl> mmm a / src / Core / SettingsCollection . cpp <nl> ppp b / src / Core / SettingsCollection . cpp <nl> void SettingURI : : deserialize ( ReadBuffer & buf , SettingsBinaryFormat ) <nl> M ( RANDOM , " random " ) \ <nl> M ( NEAREST_HOSTNAME , " nearest_hostname " ) \ <nl> M ( IN_ORDER , " in_order " ) \ <nl> - M ( FIRST_OR_RANDOM , " first_or_random " ) <nl> + M ( FIRST_OR_RANDOM , " first_or_random " ) \ <nl> + M ( ROUND_ROBIN , " round_robin " ) <nl> IMPLEMENT_SETTING_ENUM ( LoadBalancing , LOAD_BALANCING_LIST_OF_NAMES , ErrorCodes : : UNKNOWN_LOAD_BALANCING ) <nl> <nl> <nl> mmm a / src / Core / SettingsCollection . h <nl> ppp b / src / Core / SettingsCollection . h <nl> enum class LoadBalancing <nl> / / / a replica is selected among the replicas with the minimum number of errors <nl> / / / with the minimum number of distinguished characters in the replica name and local hostname <nl> NEAREST_HOSTNAME , <nl> - / / / replicas are walked through strictly in order ; the number of errors does not matter <nl> + / / replicas with the same number of errors are accessed in the same order <nl> + / / as they are specified in the configuration . <nl> IN_ORDER , <nl> / / / if first replica one has higher number of errors , <nl> / / / pick a random one from replicas with minimum number of errors <nl> FIRST_OR_RANDOM , <nl> + / / round robin across replicas with the same number of errors . <nl> + ROUND_ROBIN , <nl> } ; <nl> using SettingLoadBalancing = SettingEnum < LoadBalancing > ; <nl> <nl> mmm a / src / Storages / Distributed / DirectoryMonitor . cpp <nl> ppp b / src / Storages / Distributed / DirectoryMonitor . cpp <nl> <nl> # include < common / StringRef . h > <nl> # include < Common / ActionBlocker . h > <nl> # include < Interpreters / Context . h > <nl> + # include < Interpreters / Cluster . h > <nl> # include < Storages / Distributed / DirectoryMonitor . h > <nl> # include < Storages / StorageDistributed . h > <nl> # include < IO / ReadBufferFromFile . h > <nl> mmm a / src / Storages / StorageDistributed . cpp <nl> ppp b / src / Storages / StorageDistributed . cpp <nl> <nl> <nl> # include < Interpreters / ClusterProxy / SelectStreamFactory . h > <nl> # include < Interpreters / ClusterProxy / executeQuery . h > <nl> + # include < Interpreters / Cluster . h > <nl> # include < Interpreters / ExpressionAnalyzer . h > <nl> # include < Interpreters / InterpreterAlterQuery . h > <nl> # include < Interpreters / InterpreterDescribeQuery . h > <nl> <nl> # include < Interpreters / getTableExpressions . h > <nl> <nl> # include < Core / Field . h > <nl> + # include < Core / Settings . h > <nl> <nl> # include < IO / ReadHelpers . h > <nl> <nl> mmm a / src / Storages / StorageDistributed . h <nl> ppp b / src / Storages / StorageDistributed . h <nl> <nl> # include < Common / SimpleIncrement . h > <nl> # include < Client / ConnectionPool . h > <nl> # include < Client / ConnectionPoolWithFailover . h > <nl> - # include < Core / Settings . h > <nl> - # include < Interpreters / Cluster . h > <nl> # include < Parsers / ASTFunction . h > <nl> # include < common / logger_useful . h > <nl> # include < Common / ActionBlocker . h > <nl> <nl> namespace DB <nl> { <nl> <nl> + struct Settings ; <nl> class Context ; <nl> <nl> class VolumeJBOD ; <nl> using VolumeJBODPtr = std : : shared_ptr < VolumeJBOD > ; <nl> class ExpressionActions ; <nl> using ExpressionActionsPtr = std : : shared_ptr < ExpressionActions > ; <nl> <nl> + class Cluster ; <nl> + using ClusterPtr = std : : shared_ptr < Cluster > ; <nl> + <nl> / * * A distributed table that resides on multiple servers . <nl> * Uses data from the specified database and tables on each server . <nl> * <nl> new file mode 100644 <nl> index 00000000000 . . e69de29bb2d <nl> new file mode 100644 <nl> index 00000000000 . . 9efd681e74e <nl> mmm / dev / null <nl> ppp b / tests / integration / test_distributed_load_balancing / configs / remote_servers . xml <nl> <nl> + < yandex > <nl> + < remote_servers > <nl> + < replicas_cluster > <nl> + < shard > <nl> + < replica > <nl> + < host > n1 < / host > <nl> + < port > 9000 < / port > <nl> + < / replica > <nl> + < replica > <nl> + < host > n2 < / host > <nl> + < port > 9000 < / port > <nl> + < / replica > <nl> + < replica > <nl> + < host > n3 < / host > <nl> + < port > 9000 < / port > <nl> + < / replica > <nl> + < / shard > <nl> + < / replicas_cluster > <nl> + <nl> + < shards_cluster > <nl> + < node > <nl> + < host > n1 < / host > <nl> + < port > 9000 < / port > <nl> + < / node > <nl> + < node > <nl> + < host > n2 < / host > <nl> + < port > 9000 < / port > <nl> + < / node > <nl> + < node > <nl> + < host > n3 < / host > <nl> + < port > 9000 < / port > <nl> + < / node > <nl> + < / shards_cluster > <nl> + < / remote_servers > <nl> + < / yandex > <nl> + <nl> new file mode 100644 <nl> index 00000000000 . . fa6dfb20a88 <nl> mmm / dev / null <nl> ppp b / tests / integration / test_distributed_load_balancing / test . py <nl> <nl> + # pylint : disable = unused - argument <nl> + # pylint : disable = redefined - outer - name <nl> + # pylint : disable = line - too - long <nl> + <nl> + import uuid <nl> + import pytest <nl> + <nl> + from helpers . cluster import ClickHouseCluster <nl> + <nl> + cluster = ClickHouseCluster ( __file__ ) <nl> + <nl> + n1 = cluster . add_instance ( ' n1 ' , main_configs = [ ' configs / remote_servers . xml ' ] ) <nl> + n2 = cluster . add_instance ( ' n2 ' , main_configs = [ ' configs / remote_servers . xml ' ] ) <nl> + n3 = cluster . add_instance ( ' n3 ' , main_configs = [ ' configs / remote_servers . xml ' ] ) <nl> + <nl> + nodes = len ( cluster . instances ) <nl> + queries = nodes * 5 <nl> + <nl> + def create_tables ( ) : <nl> + for n in cluster . instances . values ( ) : <nl> + n . query ( ' DROP TABLE IF EXISTS data ' ) <nl> + n . query ( ' DROP TABLE IF EXISTS dist ' ) <nl> + n . query ( ' CREATE TABLE data ( key Int ) Engine = Memory ( ) ' ) <nl> + n . query ( " " " <nl> + CREATE TABLE dist AS data <nl> + Engine = Distributed ( <nl> + replicas_cluster , <nl> + currentDatabase ( ) , <nl> + data ) <nl> + " " " . format ( ) ) <nl> + <nl> + def make_uuid ( ) : <nl> + return uuid . uuid4 ( ) . hex <nl> + <nl> + @ pytest . fixture ( scope = ' module ' , autouse = True ) <nl> + def start_cluster ( ) : <nl> + try : <nl> + cluster . start ( ) <nl> + create_tables ( ) <nl> + yield cluster <nl> + finally : <nl> + cluster . shutdown ( ) <nl> + <nl> + def get_node ( query_node , * args , * * kwargs ) : <nl> + query_id = make_uuid ( ) <nl> + <nl> + settings = { <nl> + ' query_id ' : query_id , <nl> + ' log_queries ' : 1 , <nl> + ' log_queries_min_type ' : ' QUERY_START ' , <nl> + ' prefer_localhost_replica ' : 0 , <nl> + } <nl> + if ' settings ' not in kwargs : <nl> + kwargs [ ' settings ' ] = settings <nl> + else : <nl> + kwargs [ ' settings ' ] . update ( settings ) <nl> + <nl> + query_node . query ( ' SELECT * FROM dist ' , * args , * * kwargs ) <nl> + <nl> + for n in cluster . instances . values ( ) : <nl> + n . query ( ' SYSTEM FLUSH LOGS ' ) <nl> + <nl> + rows = query_node . query ( " " " <nl> + SELECT c . host_name <nl> + FROM ( <nl> + SELECT _shard_num <nl> + FROM cluster ( shards_cluster , system . query_log ) <nl> + WHERE <nl> + initial_query_id = ' { query_id } ' AND <nl> + is_initial_query = 0 AND <nl> + type = ' QueryFinish ' <nl> + ORDER BY event_date DESC , event_time DESC <nl> + LIMIT 1 <nl> + ) a <nl> + JOIN system . clusters c <nl> + ON a . _shard_num = c . shard_num AND cluster = ' shards_cluster ' <nl> + " " " . format ( query_id = query_id ) ) <nl> + return rows . strip ( ) <nl> + <nl> + # TODO : right now random distribution looks bad , but works <nl> + def test_load_balancing_default ( ) : <nl> + unique_nodes = set ( ) <nl> + for _ in range ( 0 , queries ) : <nl> + unique_nodes . add ( get_node ( n1 , settings = { ' load_balancing ' : ' random ' } ) ) <nl> + assert len ( unique_nodes ) = = nodes , unique_nodes <nl> + <nl> + def test_load_balancing_nearest_hostname ( ) : <nl> + unique_nodes = set ( ) <nl> + for _ in range ( 0 , queries ) : <nl> + unique_nodes . add ( get_node ( n1 , settings = { ' load_balancing ' : ' nearest_hostname ' } ) ) <nl> + assert len ( unique_nodes ) = = 1 , unique_nodes <nl> + assert unique_nodes = = set ( [ ' n1 ' ] ) <nl> + <nl> + def test_load_balancing_in_order ( ) : <nl> + unique_nodes = set ( ) <nl> + for _ in range ( 0 , queries ) : <nl> + unique_nodes . add ( get_node ( n1 , settings = { ' load_balancing ' : ' in_order ' } ) ) <nl> + assert len ( unique_nodes ) = = 1 , unique_nodes <nl> + assert unique_nodes = = set ( [ ' n1 ' ] ) <nl> + <nl> + def test_load_balancing_first_or_random ( ) : <nl> + unique_nodes = set ( ) <nl> + for _ in range ( 0 , queries ) : <nl> + unique_nodes . add ( get_node ( n1 , settings = { ' load_balancing ' : ' first_or_random ' } ) ) <nl> + assert len ( unique_nodes ) = = 1 , unique_nodes <nl> + assert unique_nodes = = set ( [ ' n1 ' ] ) <nl> + <nl> + # TODO : last_used will be reset on config reload , hence may fail <nl> + def test_load_balancing_round_robin ( ) : <nl> + unique_nodes = set ( ) <nl> + for _ in range ( 0 , nodes ) : <nl> + unique_nodes . add ( get_node ( n1 , settings = { ' load_balancing ' : ' round_robin ' } ) ) <nl> + assert len ( unique_nodes ) = = nodes , unique_nodes <nl> + assert unique_nodes = = set ( [ ' n1 ' , ' n2 ' , ' n3 ' ] ) <nl> | Merge pull request from azat / load - balancing - round - robin | ClickHouse/ClickHouse | ff0262626ab08919909d967b2859ee107656e659 | 2020-06-17T10:34:59Z |
mmm a / python / mpm . py <nl> ppp b / python / mpm . py <nl> def clear_grid ( ) : <nl> <nl> @ ti . kernel <nl> def p2g ( ) : <nl> + ti . parallelize ( 4 ) <nl> for p in x : <nl> base = ti . cast ( x [ p ] * inv_dx - 0 . 5 , ti . i32 ) <nl> fx = x [ p ] * inv_dx - ti . cast ( base , ti . f32 ) <nl> mmm a / python / taichi_lang / __init__ . py <nl> ppp b / python / taichi_lang / __init__ . py <nl> <nl> profiler_print = lambda : core . get_current_program ( ) . profiler_print ( ) <nl> profiler_clear = lambda : core . get_current_program ( ) . profiler_clear ( ) <nl> <nl> + parallelize = core . parallelize <nl> + vectorize = core . vectorize <nl> + block_dim = core . block_dim <nl> + cache = core . cache <nl> + <nl> + schedules = [ parallelize , vectorize , block_dim , cache ] <nl> + <nl> __all__ = [ kernel , layout , var , global_var , f64 , float64 , f32 , float32 , i32 , <nl> int32 , print , core , index , make_expr_group , i , j , k , ij , ijk , <nl> inside_kernel , Matrix , Vector , cfg , current_cfg , outer_product , <nl> - profiler_print , profiler_clear ] <nl> + profiler_print , profiler_clear ] + schedules <nl> mmm a / src / python_bindings . cpp <nl> ppp b / src / python_bindings . cpp <nl> PYBIND11_MODULE ( taichi_lang_core , m ) { <nl> } ) ; <nl> <nl> m . def ( " print_ " , Print_ ) ; <nl> + <nl> + / / Schedules <nl> + m . def ( " parallelize " , Parallelize ) ; <nl> + m . def ( " vectorize " , Vectorize ) ; <nl> + m . def ( " block_dim " , BlockDim ) ; <nl> + m . def ( " cache " , Cache ) ; <nl> } <nl> <nl> TLANG_NAMESPACE_END <nl> | schedules | taichi-dev/taichi | 540475ac4964289ef2e5793c9018feb723d1c177 | 2019-06-09T18:03:27Z |
mmm a / api / envoy / api / v2 / route / route_components . proto <nl> ppp b / api / envoy / api / v2 / route / route_components . proto <nl> message CorsPolicy { <nl> core . RuntimeFractionalPercent shadow_enabled = 10 ; <nl> } <nl> <nl> - / / [ # next - free - field : 31 ] <nl> + / / [ # next - free - field : 32 ] <nl> message RouteAction { <nl> enum ClusterNotFoundResponseCode { <nl> / / HTTP status code - 503 Service Unavailable . <nl> message RouteAction { <nl> <nl> InternalRedirectAction internal_redirect_action = 26 ; <nl> <nl> + / / An internal redirect is handled , iff the number of previous internal redirects that a <nl> + / / downstream request has encountered is lower than this value , and <nl> + / / : ref : ` internal_redirect_action < envoy_api_field_route . RouteAction . internal_redirect_action > ` <nl> + / / is set to : ref : ` HANDLE_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_route . RouteAction . InternalRedirectAction . HANDLE_INTERNAL_REDIRECT > ` <nl> + / / In the case where a downstream request is bounced among multiple routes by internal redirect , <nl> + / / the first route that hits this threshold , or has <nl> + / / : ref : ` internal_redirect_action < envoy_api_field_route . RouteAction . internal_redirect_action > ` <nl> + / / set to <nl> + / / : ref : ` PASS_THROUGH_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_route . RouteAction . InternalRedirectAction . PASS_THROUGH_INTERNAL_REDIRECT > ` <nl> + / / will pass the redirect back to downstream . <nl> + / / <nl> + / / If not specified , at most one redirect will be followed . <nl> + google . protobuf . UInt32Value max_internal_redirects = 31 ; <nl> + <nl> / / Indicates that the route has a hedge policy . Note that if this is set , <nl> / / it ' ll take precedence over the virtual host level hedge policy entirely <nl> / / ( e . g . : policies are not merged , most internal one becomes the enforced policy ) . <nl> mmm a / api / envoy / config / route / v3alpha / route_components . proto <nl> ppp b / api / envoy / config / route / v3alpha / route_components . proto <nl> message CorsPolicy { <nl> core . v3alpha . RuntimeFractionalPercent shadow_enabled = 10 ; <nl> } <nl> <nl> - / / [ # next - free - field : 31 ] <nl> + / / [ # next - free - field : 32 ] <nl> message RouteAction { <nl> option ( udpa . annotations . versioning ) . previous_message_type = " envoy . api . v2 . route . RouteAction " ; <nl> <nl> message RouteAction { <nl> <nl> InternalRedirectAction internal_redirect_action = 26 ; <nl> <nl> + / / An internal redirect is handled , iff the number of previous internal redirects that a <nl> + / / downstream request has encountered is lower than this value , and <nl> + / / : ref : ` internal_redirect_action <nl> + / / < envoy_api_field_config . route . v3alpha . RouteAction . internal_redirect_action > ` is set to <nl> + / / : ref : ` HANDLE_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_config . route . v3alpha . RouteAction . InternalRedirectAction . HANDLE_INTERNAL_REDIRECT > ` <nl> + / / In the case where a downstream request is bounced among multiple routes by internal redirect , <nl> + / / the first route that hits this threshold , or has <nl> + / / : ref : ` internal_redirect_action <nl> + / / < envoy_api_field_config . route . v3alpha . RouteAction . internal_redirect_action > ` set to <nl> + / / : ref : ` PASS_THROUGH_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_config . route . v3alpha . RouteAction . InternalRedirectAction . PASS_THROUGH_INTERNAL_REDIRECT > ` <nl> + / / will pass the redirect back to downstream . <nl> + / / <nl> + / / If not specified , at most one redirect will be followed . <nl> + google . protobuf . UInt32Value max_internal_redirects = 31 ; <nl> + <nl> / / Indicates that the route has a hedge policy . Note that if this is set , <nl> / / it ' ll take precedence over the virtual host level hedge policy entirely <nl> / / ( e . g . : policies are not merged , most internal one becomes the enforced policy ) . <nl> mmm a / docs / root / intro / arch_overview / http / http_connection_management . rst <nl> ppp b / docs / root / intro / arch_overview / http / http_connection_management . rst <nl> Envoy supports handling 302 redirects internally , that is capturing a 302 redire <nl> synthesizing a new request , sending it to the upstream specified by the new route match , and <nl> returning the redirected response as the response to the original request . <nl> <nl> - Internal redirects are configured via the ref : ` redirect action <nl> - < envoy_api_field_route . RouteAction . redirect_action > ` field in <nl> + Internal redirects are configured via the ref : ` internal redirect action <nl> + < envoy_api_field_route . RouteAction . internal_redirect_action > ` field and <nl> + ` max internal redirects < envoy_api_field_route . RouteAction . max_internal_redirects > ` field in <nl> route configuration . When redirect handling is on , any 302 response from upstream is <nl> subject to the redirect being handled by Envoy . <nl> <nl> For a redirect to be handled successfully it must pass the following checks : <nl> 2 . Have a * location * header with a valid , fully qualified URL matching the scheme of the original request . <nl> 3 . The request must have been fully processed by Envoy . <nl> 4 . The request must not have a body . <nl> - 5 . The request must have not been previously redirected , as determined by the presence of an x - envoy - original - url header . <nl> + 5 . The number of previously handled internal redirect within a given downstream request does not exceed <nl> + ` max internal redirects < envoy_api_field_route . RouteAction . max_internal_redirects > ` of the route <nl> + that the request or redirected request is hitting . <nl> <nl> Any failure will result in redirect being passed downstream instead . <nl> <nl> + Since a redirected request may be bounced between different routes , any route in the chain of redirects that <nl> + <nl> + 1 . does not have internal redirect enabled <nl> + 2 . or has a ` max internal redirects <nl> + < envoy_api_field_route . RouteAction . max_internal_redirects > ` <nl> + smaller or equal to the redirect chain length when the redirect chain hits it <nl> + <nl> + will cause the redirect to be passed downstream . <nl> + <nl> Once the redirect has passed these checks , the request headers which were shipped to the original <nl> upstream will be modified by : <nl> <nl> mmm a / docs / root / intro / version_history . rst <nl> ppp b / docs / root / intro / version_history . rst <nl> Version history <nl> * router : added support for percentage - based : ref : ` retry budgets < envoy_api_field_cluster . CircuitBreakers . Thresholds . retry_budget > ` <nl> * router : allow using a : ref : ` query parameter < envoy_api_field_route . RouteAction . HashPolicy . query_parameter > ` for HTTP consistent hashing . <nl> * router : exposed DOWNSTREAM_REMOTE_ADDRESS as custom HTTP request / response headers . <nl> + * router : added support for : ref : ` max_internal_redirects < envoy_api_field_route . RouteAction . max_internal_redirects > ` for configurable maximum internal redirect hops . <nl> * router : skip the Location header when the response code is not a 201 or a 3xx . <nl> * router : added : ref : ` auto_sni < envoy_api_field_core . UpstreamHttpProtocolOptions . auto_sni > ` to support setting SNI to transport socket for new upstream connections based on the downstream HTTP host / authority header . <nl> * server : added the : option : ` - - disable - extensions ` CLI option , to disable extensions at startup . <nl> mmm a / generated_api_shadow / envoy / api / v2 / route / route_components . proto <nl> ppp b / generated_api_shadow / envoy / api / v2 / route / route_components . proto <nl> message CorsPolicy { <nl> core . RuntimeFractionalPercent shadow_enabled = 10 ; <nl> } <nl> <nl> - / / [ # next - free - field : 31 ] <nl> + / / [ # next - free - field : 32 ] <nl> message RouteAction { <nl> enum ClusterNotFoundResponseCode { <nl> / / HTTP status code - 503 Service Unavailable . <nl> message RouteAction { <nl> <nl> InternalRedirectAction internal_redirect_action = 26 ; <nl> <nl> + / / An internal redirect is handled , iff the number of previous internal redirects that a <nl> + / / downstream request has encountered is lower than this value , and <nl> + / / : ref : ` internal_redirect_action < envoy_api_field_route . RouteAction . internal_redirect_action > ` <nl> + / / is set to : ref : ` HANDLE_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_route . RouteAction . InternalRedirectAction . HANDLE_INTERNAL_REDIRECT > ` <nl> + / / In the case where a downstream request is bounced among multiple routes by internal redirect , <nl> + / / the first route that hits this threshold , or has <nl> + / / : ref : ` internal_redirect_action < envoy_api_field_route . RouteAction . internal_redirect_action > ` <nl> + / / set to <nl> + / / : ref : ` PASS_THROUGH_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_route . RouteAction . InternalRedirectAction . PASS_THROUGH_INTERNAL_REDIRECT > ` <nl> + / / will pass the redirect back to downstream . <nl> + / / <nl> + / / If not specified , at most one redirect will be followed . <nl> + google . protobuf . UInt32Value max_internal_redirects = 31 ; <nl> + <nl> / / Indicates that the route has a hedge policy . Note that if this is set , <nl> / / it ' ll take precedence over the virtual host level hedge policy entirely <nl> / / ( e . g . : policies are not merged , most internal one becomes the enforced policy ) . <nl> mmm a / generated_api_shadow / envoy / config / route / v3alpha / route_components . proto <nl> ppp b / generated_api_shadow / envoy / config / route / v3alpha / route_components . proto <nl> message CorsPolicy { <nl> core . v3alpha . RuntimeFractionalPercent shadow_enabled = 10 ; <nl> } <nl> <nl> - / / [ # next - free - field : 31 ] <nl> + / / [ # next - free - field : 32 ] <nl> message RouteAction { <nl> option ( udpa . annotations . versioning ) . previous_message_type = " envoy . api . v2 . route . RouteAction " ; <nl> <nl> message RouteAction { <nl> <nl> InternalRedirectAction internal_redirect_action = 26 ; <nl> <nl> + / / An internal redirect is handled , iff the number of previous internal redirects that a <nl> + / / downstream request has encountered is lower than this value , and <nl> + / / : ref : ` internal_redirect_action <nl> + / / < envoy_api_field_config . route . v3alpha . RouteAction . internal_redirect_action > ` is set to <nl> + / / : ref : ` HANDLE_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_config . route . v3alpha . RouteAction . InternalRedirectAction . HANDLE_INTERNAL_REDIRECT > ` <nl> + / / In the case where a downstream request is bounced among multiple routes by internal redirect , <nl> + / / the first route that hits this threshold , or has <nl> + / / : ref : ` internal_redirect_action <nl> + / / < envoy_api_field_config . route . v3alpha . RouteAction . internal_redirect_action > ` set to <nl> + / / : ref : ` PASS_THROUGH_INTERNAL_REDIRECT <nl> + / / < envoy_api_enum_value_config . route . v3alpha . RouteAction . InternalRedirectAction . PASS_THROUGH_INTERNAL_REDIRECT > ` <nl> + / / will pass the redirect back to downstream . <nl> + / / <nl> + / / If not specified , at most one redirect will be followed . <nl> + google . protobuf . UInt32Value max_internal_redirects = 31 ; <nl> + <nl> / / Indicates that the route has a hedge policy . Note that if this is set , <nl> / / it ' ll take precedence over the virtual host level hedge policy entirely <nl> / / ( e . g . : policies are not merged , most internal one becomes the enforced policy ) . <nl> mmm a / include / envoy / router / router . h <nl> ppp b / include / envoy / router / router . h <nl> class RouteEntry : public ResponseEntry { <nl> * / <nl> virtual InternalRedirectAction internalRedirectAction ( ) const PURE ; <nl> <nl> + / * * <nl> + * @ returns the threshold of number of previously handled internal redirects , for this route to <nl> + * stop handle internal redirects . <nl> + * / <nl> + virtual uint32_t maxInternalRedirects ( ) const PURE ; <nl> + <nl> / * * <nl> * @ return std : : string & the name of the route . <nl> * / <nl> mmm a / include / envoy / stream_info / BUILD <nl> ppp b / include / envoy / stream_info / BUILD <nl> envoy_cc_library ( <nl> external_deps = [ " abseil_optional " ] , <nl> deps = [ " / / source / common / protobuf " ] , <nl> ) <nl> + <nl> + envoy_cc_library ( <nl> + name = " uint32_accessor_interface " , <nl> + hdrs = [ " uint32_accessor . h " ] , <nl> + deps = [ <nl> + " : filter_state_interface " , <nl> + ] , <nl> + ) <nl> new file mode 100644 <nl> index 00000000000 . . 122658ec87e <nl> mmm / dev / null <nl> ppp b / include / envoy / stream_info / uint32_accessor . h <nl> <nl> + # pragma once <nl> + <nl> + # include " envoy / common / pure . h " <nl> + # include " envoy / stream_info / filter_state . h " <nl> + <nl> + namespace Envoy { <nl> + namespace StreamInfo { <nl> + <nl> + / * * <nl> + * A FilterState object that tracks a single uint32_t value . <nl> + * / <nl> + class UInt32Accessor : public FilterState : : Object { <nl> + public : <nl> + / * * <nl> + * Increments the tracked value by 1 . <nl> + * / <nl> + virtual void increment ( ) PURE ; <nl> + <nl> + / * * <nl> + * @ return the tracked value . <nl> + * / <nl> + virtual uint32_t value ( ) const PURE ; <nl> + } ; <nl> + <nl> + } / / namespace StreamInfo <nl> + } / / namespace Envoy <nl> mmm a / source / common / http / async_client_impl . h <nl> ppp b / source / common / http / async_client_impl . h <nl> class AsyncStreamImpl : public AsyncClient : : Stream , <nl> Router : : InternalRedirectAction internalRedirectAction ( ) const override { <nl> return Router : : InternalRedirectAction : : PassThrough ; <nl> } <nl> + uint32_t maxInternalRedirects ( ) const override { return 1 ; } <nl> const std : : string & routeName ( ) const override { return route_name_ ; } <nl> std : : unique_ptr < const HashPolicyImpl > hash_policy_ ; <nl> static const NullHedgePolicy hedge_policy_ ; <nl> mmm a / source / common / router / BUILD <nl> ppp b / source / common / router / BUILD <nl> envoy_cc_library ( <nl> " / / source / common / network : application_protocol_lib " , <nl> " / / source / common / network : transport_socket_options_lib " , <nl> " / / source / common / stream_info : stream_info_lib " , <nl> + " / / source / common / stream_info : uint32_accessor_lib " , <nl> " / / source / common / tracing : http_tracer_lib " , <nl> " / / source / common / upstream : load_balancer_lib " , <nl> " @ envoy_api / / envoy / extensions / filters / http / router / v3alpha : pkg_cc_proto " , <nl> mmm a / source / common / router / config_impl . cc <nl> ppp b / source / common / router / config_impl . cc <nl> RouteEntryImplBase : : RouteEntryImplBase ( const VirtualHostImpl & vhost , <nl> route . hidden_envoy_deprecated_per_filter_config ( ) , factory_context , <nl> validator ) , <nl> route_name_ ( route . name ( ) ) , time_source_ ( factory_context . dispatcher ( ) . timeSource ( ) ) , <nl> - internal_redirect_action_ ( convertInternalRedirectAction ( route . route ( ) ) ) { <nl> + internal_redirect_action_ ( convertInternalRedirectAction ( route . route ( ) ) ) , <nl> + max_internal_redirects_ ( <nl> + PROTOBUF_GET_WRAPPED_OR_DEFAULT ( route . route ( ) , max_internal_redirects , 1 ) ) { <nl> if ( route . route ( ) . has_metadata_match ( ) ) { <nl> const auto filter_it = route . route ( ) . metadata_match ( ) . filter_metadata ( ) . find ( <nl> Envoy : : Config : : MetadataFilters : : get ( ) . ENVOY_LB ) ; <nl> mmm a / source / common / router / config_impl . h <nl> ppp b / source / common / router / config_impl . h <nl> class RouteEntryImplBase : public RouteEntry , <nl> InternalRedirectAction internalRedirectAction ( ) const override { <nl> return internal_redirect_action_ ; <nl> } <nl> + uint32_t maxInternalRedirects ( ) const override { return max_internal_redirects_ ; } <nl> <nl> / / Router : : DirectResponseEntry <nl> std : : string newPath ( const Http : : HeaderMap & headers ) const override ; <nl> class RouteEntryImplBase : public RouteEntry , <nl> InternalRedirectAction internalRedirectAction ( ) const override { <nl> return parent_ - > internalRedirectAction ( ) ; <nl> } <nl> + uint32_t maxInternalRedirects ( ) const override { return parent_ - > maxInternalRedirects ( ) ; } <nl> <nl> / / Router : : Route <nl> const DirectResponseEntry * directResponseEntry ( ) const override { return nullptr ; } <nl> class RouteEntryImplBase : public RouteEntry , <nl> const std : : string route_name_ ; <nl> TimeSource & time_source_ ; <nl> InternalRedirectAction internal_redirect_action_ ; <nl> + uint32_t max_internal_redirects_ { 1 } ; <nl> } ; <nl> <nl> / * * <nl> mmm a / source / common / router / router . cc <nl> ppp b / source / common / router / router . cc <nl> <nl> # include " common / router / debug_config . h " <nl> # include " common / router / retry_state_impl . h " <nl> # include " common / runtime / runtime_impl . h " <nl> + # include " common / stream_info / uint32_accessor_impl . h " <nl> # include " common / tracing / http_tracer_impl . h " <nl> <nl> # include " extensions / filters / http / well_known_names . h " <nl> <nl> namespace Envoy { <nl> namespace Router { <nl> namespace { <nl> + constexpr char NumInternalRedirectsFilterStateName [ ] = " num_internal_redirects " ; <nl> + <nl> uint32_t getLength ( const Buffer : : Instance * instance ) { return instance ? instance - > length ( ) : 0 ; } <nl> <nl> bool schemeIsHttp ( const Http : : HeaderMap & downstream_headers , <nl> bool schemeIsHttp ( const Http : : HeaderMap & downstream_headers , <nl> } <nl> <nl> bool convertRequestHeadersForInternalRedirect ( Http : : HeaderMap & downstream_headers , <nl> + StreamInfo : : FilterState & filter_state , <nl> + uint32_t max_internal_redirects , <nl> const Http : : HeaderEntry & internal_redirect , <nl> const Network : : Connection & connection ) { <nl> - / / Envoy does not currently support multiple rounds of redirects . <nl> - if ( downstream_headers . EnvoyOriginalUrl ( ) ) { <nl> - return false ; <nl> - } <nl> / / Make sure the redirect response contains a URL to redirect to . <nl> if ( internal_redirect . value ( ) . getStringView ( ) . length ( ) = = 0 ) { <nl> return false ; <nl> bool convertRequestHeadersForInternalRedirect ( Http : : HeaderMap & downstream_header <nl> return false ; <nl> } <nl> <nl> + / / Don ' t allow serving TLS responses over plaintext . <nl> bool scheme_is_http = schemeIsHttp ( downstream_headers , connection ) ; <nl> if ( scheme_is_http & & absolute_url . scheme ( ) = = Http : : Headers : : get ( ) . SchemeValues . Https ) { <nl> - / / Don ' t allow serving TLS responses over plaintext . <nl> return false ; <nl> } <nl> <nl> + / / Make sure that performing the redirect won ' t result in exceeding the configured number of <nl> + / / redirects allowed for this route . <nl> + if ( ! filter_state . hasData < StreamInfo : : UInt32Accessor > ( NumInternalRedirectsFilterStateName ) ) { <nl> + filter_state . setData ( NumInternalRedirectsFilterStateName , <nl> + std : : make_shared < StreamInfo : : UInt32AccessorImpl > ( 0 ) , <nl> + StreamInfo : : FilterState : : StateType : : Mutable , <nl> + StreamInfo : : FilterState : : LifeSpan : : DownstreamRequest ) ; <nl> + } <nl> + StreamInfo : : UInt32Accessor & num_internal_redirect = <nl> + filter_state . getDataMutable < StreamInfo : : UInt32Accessor > ( NumInternalRedirectsFilterStateName ) ; <nl> + <nl> + if ( num_internal_redirect . value ( ) > = max_internal_redirects ) { <nl> + return false ; <nl> + } <nl> + num_internal_redirect . increment ( ) ; <nl> + <nl> / / Preserve the original request URL for the second pass . <nl> downstream_headers . setEnvoyOriginalUrl ( <nl> absl : : StrCat ( scheme_is_http ? Http : : Headers : : get ( ) . SchemeValues . Http <nl> bool Filter : : setupRedirect ( const Http : : HeaderMap & headers , UpstreamRequest & upst <nl> attempting_internal_redirect_with_complete_stream_ = <nl> upstream_request . upstream_timing_ . last_upstream_rx_byte_received_ & & downstream_end_stream_ ; <nl> <nl> + StreamInfo : : FilterState & filter_state = callbacks_ - > streamInfo ( ) . filterState ( ) ; <nl> + <nl> / / As with setupRetry , redirects are not supported for streaming requests yet . <nl> if ( downstream_end_stream_ & & <nl> ! callbacks_ - > decodingBuffer ( ) & & / / Redirects with body not yet supported . <nl> location ! = nullptr & & <nl> - convertRequestHeadersForInternalRedirect ( * downstream_headers_ , * location , <nl> + convertRequestHeadersForInternalRedirect ( * downstream_headers_ , filter_state , <nl> + route_entry_ - > maxInternalRedirects ( ) , * location , <nl> * callbacks_ - > connection ( ) ) & & <nl> callbacks_ - > recreateStream ( ) ) { <nl> cluster_ - > stats ( ) . upstream_internal_redirect_succeeded_total_ . inc ( ) ; <nl> mmm a / source / common / stream_info / BUILD <nl> ppp b / source / common / stream_info / BUILD <nl> envoy_cc_library ( <nl> " / / include / envoy / stream_info : stream_info_interface " , <nl> ] , <nl> ) <nl> + <nl> + envoy_cc_library ( <nl> + name = " uint32_accessor_lib " , <nl> + hdrs = [ " uint32_accessor_impl . h " ] , <nl> + deps = [ <nl> + " / / include / envoy / stream_info : uint32_accessor_interface " , <nl> + ] , <nl> + ) <nl> new file mode 100644 <nl> index 00000000000 . . 8254ece897a <nl> mmm / dev / null <nl> ppp b / source / common / stream_info / uint32_accessor_impl . h <nl> <nl> + # pragma once <nl> + <nl> + # include " envoy / stream_info / uint32_accessor . h " <nl> + <nl> + namespace Envoy { <nl> + namespace StreamInfo { <nl> + <nl> + / * <nl> + * A FilterState object that tracks a single uint32_t value . <nl> + * / <nl> + class UInt32AccessorImpl : public UInt32Accessor { <nl> + public : <nl> + UInt32AccessorImpl ( uint32_t value ) : value_ ( value ) { } <nl> + <nl> + / / From FilterState : : Object <nl> + ProtobufTypes : : MessagePtr serializeAsProto ( ) const override { <nl> + auto message = std : : make_unique < ProtobufWkt : : UInt32Value > ( ) ; <nl> + message - > set_value ( value_ ) ; <nl> + return message ; <nl> + } <nl> + <nl> + / / From UInt32Accessor . <nl> + void increment ( ) override { value_ + + ; } <nl> + uint32_t value ( ) const override { return value_ ; } <nl> + <nl> + private : <nl> + uint32_t value_ ; <nl> + } ; <nl> + <nl> + } / / namespace StreamInfo <nl> + } / / namespace Envoy <nl> mmm a / test / common / router / BUILD <nl> ppp b / test / common / router / BUILD <nl> envoy_cc_test ( <nl> " / / source / common / network : application_protocol_lib " , <nl> " / / source / common / network : utility_lib " , <nl> " / / source / common / router : router_lib " , <nl> + " / / source / common / stream_info : uint32_accessor_lib " , <nl> " / / source / common / upstream : upstream_includes " , <nl> " / / source / common / upstream : upstream_lib " , <nl> " / / test / common / http : common_lib " , <nl> mmm a / test / common / router / config_impl_test . cc <nl> ppp b / test / common / router / config_impl_test . cc <nl> TEST_F ( RouteMatcherTest , ClusterHeader ) { <nl> route - > routeEntry ( ) - > grpcTimeoutOffset ( ) ; <nl> route - > routeEntry ( ) - > upgradeMap ( ) ; <nl> route - > routeEntry ( ) - > internalRedirectAction ( ) ; <nl> + route - > routeEntry ( ) - > maxInternalRedirects ( ) ; <nl> } <nl> } <nl> <nl> mmm a / test / common / router / router_test . cc <nl> ppp b / test / common / router / router_test . cc <nl> <nl> # include " common / router / config_impl . h " <nl> # include " common / router / debug_config . h " <nl> # include " common / router / router . h " <nl> + # include " common / stream_info / uint32_accessor_impl . h " <nl> # include " common / tracing / http_tracer_impl . h " <nl> # include " common / upstream / upstream_impl . h " <nl> <nl> class RouterTestBase : public testing : : Test { <nl> ON_CALL ( callbacks_ . route_ - > route_entry_ , internalRedirectAction ( ) ) <nl> . WillByDefault ( Return ( InternalRedirectAction : : Handle ) ) ; <nl> ON_CALL ( callbacks_ , connection ( ) ) . WillByDefault ( Return ( & connection_ ) ) ; <nl> + setMaxInternalRedirects ( 1 ) ; <nl> + } <nl> + <nl> + void setMaxInternalRedirects ( uint32_t max_internal_redirects ) { <nl> + ON_CALL ( callbacks_ . route_ - > route_entry_ , maxInternalRedirects ( ) ) <nl> + . WillByDefault ( Return ( max_internal_redirects ) ) ; <nl> + } <nl> + <nl> + void setNumPreviousRedirect ( uint32_t num_previous_redirects ) { <nl> + callbacks_ . streamInfo ( ) . filterState ( ) . setData ( <nl> + " num_internal_redirects " , <nl> + std : : make_shared < StreamInfo : : UInt32AccessorImpl > ( num_previous_redirects ) , <nl> + StreamInfo : : FilterState : : StateType : : Mutable , <nl> + StreamInfo : : FilterState : : LifeSpan : : DownstreamRequest ) ; <nl> } <nl> <nl> void enableHedgeOnPerTryTimeout ( ) { <nl> TEST_F ( RouterTest , RetryRespectsRetryHostPredicate ) { <nl> EXPECT_TRUE ( verifyHostUpstreamStats ( 1 , 1 ) ) ; <nl> } <nl> <nl> - TEST_F ( RouterTest , InternalRedirectRejectedOnSecondPass ) { <nl> + TEST_F ( RouterTest , InternalRedirectRejectedWhenReachingMaxInternalRedirect ) { <nl> enableRedirects ( ) ; <nl> - default_request_headers_ . setEnvoyOriginalUrl ( " http : / / www . foo . com " ) ; <nl> + setMaxInternalRedirects ( 3 ) ; <nl> + setNumPreviousRedirect ( 3 ) ; <nl> sendRequest ( ) ; <nl> <nl> response_decoder_ - > decodeHeaders ( std : : move ( redirect_headers_ ) , false ) ; <nl> TEST_F ( RouterTest , InternalRedirectRejectedWithCrossSchemeRedirect ) { <nl> <nl> TEST_F ( RouterTest , HttpInternalRedirectSucceeded ) { <nl> enableRedirects ( ) ; <nl> + setMaxInternalRedirects ( 3 ) ; <nl> + setNumPreviousRedirect ( 2 ) ; <nl> default_request_headers_ . setForwardedProto ( " http " ) ; <nl> sendRequest ( ) ; <nl> <nl> TEST_F ( RouterTest , HttpInternalRedirectSucceeded ) { <nl> <nl> / / In production , the HCM recreateStream would have called this . <nl> router_ . onDestroy ( ) ; <nl> + EXPECT_EQ ( 3 , callbacks_ . streamInfo ( ) <nl> + . filterState ( ) <nl> + . getDataMutable < StreamInfo : : UInt32Accessor > ( " num_internal_redirects " ) <nl> + . value ( ) ) ; <nl> } <nl> <nl> TEST_F ( RouterTest , HttpsInternalRedirectSucceeded ) { <nl> auto ssl_connection = std : : make_shared < Ssl : : MockConnectionInfo > ( ) ; <nl> enableRedirects ( ) ; <nl> + setMaxInternalRedirects ( 3 ) ; <nl> + setNumPreviousRedirect ( 1 ) ; <nl> <nl> sendRequest ( ) ; <nl> <nl> mmm a / test / common / stream_info / BUILD <nl> ppp b / test / common / stream_info / BUILD <nl> envoy_cc_test ( <nl> " / / test / mocks / stream_info : stream_info_mocks " , <nl> ] , <nl> ) <nl> + <nl> + envoy_cc_test ( <nl> + name = " uint32_accessor_impl_test " , <nl> + srcs = [ " uint32_accessor_impl_test . cc " ] , <nl> + deps = [ <nl> + " / / source / common / stream_info : uint32_accessor_lib " , <nl> + ] , <nl> + ) <nl> new file mode 100644 <nl> index 00000000000 . . f9979892dbd <nl> mmm / dev / null <nl> ppp b / test / common / stream_info / uint32_accessor_impl_test . cc <nl> <nl> + # include " common / stream_info / uint32_accessor_impl . h " <nl> + <nl> + # include " gtest / gtest . h " <nl> + <nl> + namespace Envoy { <nl> + namespace StreamInfo { <nl> + namespace { <nl> + <nl> + TEST ( UInt32AccessorImplTest , ConstructorInitsValue ) { <nl> + uint32_t init_value = 0xdeadbeef ; <nl> + UInt32AccessorImpl accessor ( init_value ) ; <nl> + EXPECT_EQ ( init_value , accessor . value ( ) ) ; <nl> + } <nl> + <nl> + TEST ( UInt32AccessorImplTest , IncrementValue ) { <nl> + uint32_t init_value = 0xdeadbeef ; <nl> + UInt32AccessorImpl accessor ( init_value ) ; <nl> + accessor . increment ( ) ; <nl> + EXPECT_EQ ( 0xdeadbef0 , accessor . value ( ) ) ; <nl> + } <nl> + <nl> + } / / namespace <nl> + } / / namespace StreamInfo <nl> + } / / namespace Envoy <nl> mmm a / test / integration / redirect_integration_test . cc <nl> ppp b / test / integration / redirect_integration_test . cc <nl> <nl> <nl> namespace Envoy { <nl> <nl> + namespace { <nl> + constexpr char HandleThreeHopLocationFormat [ ] = <nl> + " http : / / handle . internal . redirect . max . three . hop / path { } " ; <nl> + } <nl> + <nl> class RedirectIntegrationTest : public HttpProtocolIntegrationTest { <nl> public : <nl> void initialize ( ) override { <nl> class RedirectIntegrationTest : public HttpProtocolIntegrationTest { <nl> envoy : : config : : route : : v3alpha : : RouteAction : : HANDLE_INTERNAL_REDIRECT ) ; <nl> config_helper_ . addVirtualHost ( handle ) ; <nl> <nl> + auto handle_max_3_hop = <nl> + config_helper_ . createVirtualHost ( " handle . internal . redirect . max . three . hop " ) ; <nl> + handle_max_3_hop . mutable_routes ( 0 ) - > mutable_route ( ) - > set_internal_redirect_action ( <nl> + envoy : : config : : route : : v3alpha : : RouteAction : : HANDLE_INTERNAL_REDIRECT ) ; <nl> + handle_max_3_hop . mutable_routes ( 0 ) <nl> + - > mutable_route ( ) <nl> + - > mutable_max_internal_redirects ( ) <nl> + - > set_value ( 3 ) ; <nl> + config_helper_ . addVirtualHost ( handle_max_3_hop ) ; <nl> + <nl> HttpProtocolIntegrationTest : : initialize ( ) ; <nl> } <nl> <nl> protected : <nl> + / / Returns the next stream that the fake upstream receives . <nl> + FakeStreamPtr waitForNextStream ( ) { <nl> + FakeStreamPtr new_stream = nullptr ; <nl> + auto wait_new_stream_fn = [ this , <nl> + & new_stream ] ( FakeHttpConnectionPtr & connection ) - > AssertionResult { <nl> + AssertionResult result = connection - > waitForNewStream ( * dispatcher_ , new_stream , false , <nl> + std : : chrono : : milliseconds ( 50 ) ) ; <nl> + if ( result ) { <nl> + ASSERT ( new_stream ) ; <nl> + } <nl> + return result ; <nl> + } ; <nl> + <nl> + / / Using a while loop to poll for new connections and new streams on all <nl> + / / connections because connection reuse may or may not be triggered . <nl> + while ( new_stream = = nullptr ) { <nl> + FakeHttpConnectionPtr new_connection = nullptr ; <nl> + <nl> + AssertionResult result = fake_upstreams_ [ 0 ] - > waitForHttpConnection ( <nl> + * dispatcher_ , new_connection , std : : chrono : : milliseconds ( 50 ) , 60 , 100 ) ; <nl> + if ( result ) { <nl> + ASSERT ( new_connection ) ; <nl> + upstream_connections_ . push_back ( std : : move ( new_connection ) ) ; <nl> + } <nl> + <nl> + for ( auto & connection : upstream_connections_ ) { <nl> + result = wait_new_stream_fn ( connection ) ; <nl> + if ( result ) { <nl> + break ; <nl> + } <nl> + } <nl> + } <nl> + <nl> + AssertionResult result = new_stream - > waitForEndStream ( * dispatcher_ ) ; <nl> + ASSERT ( result ) ; <nl> + return new_stream ; <nl> + } <nl> + <nl> Http : : TestHeaderMapImpl redirect_response_ { <nl> { " : status " , " 302 " } , { " content - length " , " 0 " } , { " location " , " http : / / authority2 / new / url " } } ; <nl> + <nl> + std : : vector < FakeHttpConnectionPtr > upstream_connections_ ; <nl> } ; <nl> <nl> / / By default if internal redirects are not configured , redirects are proxied . <nl> TEST_P ( RedirectIntegrationTest , BasicInternalRedirect ) { <nl> - > value ( ) ) ; <nl> } <nl> <nl> + TEST_P ( RedirectIntegrationTest , InternalRedirectWithThreeHopLimit ) { <nl> + / / Validate that header sanitization is only called once . <nl> + config_helper_ . addConfigModifier ( <nl> + [ ] ( envoy : : extensions : : filters : : network : : http_connection_manager : : v3alpha : : <nl> + HttpConnectionManager & hcm ) { hcm . set_via ( " via_value " ) ; } ) ; <nl> + initialize ( ) ; <nl> + fake_upstreams_ [ 0 ] - > set_allow_unexpected_disconnects ( true ) ; <nl> + <nl> + codec_client_ = makeHttpConnection ( lookupPort ( " http " ) ) ; <nl> + <nl> + default_request_headers_ . setHost ( " handle . internal . redirect . max . three . hop " ) ; <nl> + default_request_headers_ . setPath ( " / path0 " ) ; <nl> + IntegrationStreamDecoderPtr response = <nl> + codec_client_ - > makeHeaderOnlyRequest ( default_request_headers_ ) ; <nl> + <nl> + std : : vector < FakeStreamPtr > upstream_requests ; <nl> + / / Four requests to upstream : 1 original request + 3 following redirect <nl> + for ( int i = 0 ; i < 4 ; i + + ) { <nl> + upstream_requests . push_back ( waitForNextStream ( ) ) ; <nl> + <nl> + EXPECT_EQ ( fmt : : format ( " / path { } " , i ) , <nl> + upstream_requests . back ( ) - > headers ( ) . Path ( ) - > value ( ) . getStringView ( ) ) ; <nl> + EXPECT_EQ ( " handle . internal . redirect . max . three . hop " , <nl> + upstream_requests . back ( ) - > headers ( ) . Host ( ) - > value ( ) . getStringView ( ) ) ; <nl> + EXPECT_EQ ( " via_value " , upstream_requests . back ( ) - > headers ( ) . Via ( ) - > value ( ) . getStringView ( ) ) ; <nl> + <nl> + auto next_location = fmt : : format ( HandleThreeHopLocationFormat , i + 1 ) ; <nl> + redirect_response_ . setLocation ( next_location ) ; <nl> + upstream_requests . back ( ) - > encodeHeaders ( redirect_response_ , true ) ; <nl> + } <nl> + <nl> + response - > waitForEndStream ( ) ; <nl> + ASSERT_TRUE ( response - > complete ( ) ) ; <nl> + EXPECT_EQ ( " 302 " , response - > headers ( ) . Status ( ) - > value ( ) . getStringView ( ) ) ; <nl> + EXPECT_EQ ( <nl> + 1 , <nl> + test_server_ - > counter ( " cluster . cluster_0 . upstream_internal_redirect_failed_total " ) - > value ( ) ) ; <nl> + } <nl> + <nl> TEST_P ( RedirectIntegrationTest , InternalRedirectToDestinationWithBody ) { <nl> / / Validate that header sanitization is only called once . <nl> config_helper_ . addConfigModifier ( <nl> mmm a / test / mocks / router / mocks . h <nl> ppp b / test / mocks / router / mocks . h <nl> class MockRouteEntry : public RouteEntry { <nl> MOCK_CONST_METHOD0 ( includeAttemptCount , bool ( ) ) ; <nl> MOCK_CONST_METHOD0 ( upgradeMap , const UpgradeMap & ( ) ) ; <nl> MOCK_CONST_METHOD0 ( internalRedirectAction , InternalRedirectAction ( ) ) ; <nl> + MOCK_CONST_METHOD0 ( maxInternalRedirects , uint32_t ( ) ) ; <nl> MOCK_CONST_METHOD0 ( routeName , const std : : string & ( ) ) ; <nl> <nl> std : : string cluster_name_ { " fake_cluster " } ; <nl> | router : Add support for per - route configurable maximum number of internal redirect hops ( ) | envoyproxy/envoy | 1f208984f1eb6253c3f2ee49139831f6766b673e | 2020-01-13T15:32:35Z |
mmm a / tensorflow / core / api_def / base_api / api_def_Exp . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_Exp . pbtxt <nl> <nl> op { <nl> graph_op_name : " Exp " <nl> - summary : " Computes exponential of x element - wise . \ \ \ \ ( y = e ^ x \ \ \ \ ) . " <nl> + summary : " Computes exponential of x element - wise . \ \ ( y = e ^ x \ \ ) . " <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_GatherNd . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_GatherNd . pbtxt <nl> END <nl> ( K - 1 ) - dimensional tensor of indices into ` params ` , where each element defines a <nl> slice of ` params ` : <nl> <nl> - output [ i_0 , . . . , i_ { K - 2 } ] = params [ indices [ i0 , . . . , i_ { K - 2 } ] ] <nl> + output [ \ \ ( i_0 , . . . , i_ { K - 2 } \ \ ) ] = params [ indices [ \ \ ( i_0 , . . . , i_ { K - 2 } \ \ ) ] ] <nl> <nl> Whereas in @ { tf . gather } ` indices ` defines slices into the first <nl> dimension of ` params ` , in ` tf . gather_nd ` , ` indices ` defines slices into the <nl> mmm a / tensorflow / core / api_def / base_api / api_def_MatrixExponential . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_MatrixExponential . pbtxt <nl> END <nl> } <nl> summary : " Computes the matrix exponential of one or more square matrices : " <nl> description : < < END <nl> - exp ( A ) = \ sum_ { n = 0 } ^ \ infty A ^ n / n ! <nl> + \ \ ( exp ( A ) = \ sum_ { n = 0 } ^ \ infty A ^ n / n ! \ \ ) <nl> <nl> The exponential is computed using a combination of the scaling and squaring <nl> method and the Pade approximation . Details can be founds in : <nl> mmm a / tensorflow / core / api_def / base_api / api_def_MatrixLogarithm . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_MatrixLogarithm . pbtxt <nl> END <nl> summary : " Computes the matrix logarithm of one or more square matrices : " <nl> description : < < END <nl> <nl> - log ( exp ( A ) ) = A <nl> + \ \ ( log ( exp ( A ) ) = A \ \ ) <nl> <nl> This op is only defined for complex matrices . If A is positive - definite and <nl> real , then casting to a complex matrix , taking the logarithm and casting back <nl> mmm a / tensorflow / core / api_def / base_api / api_def_Polygamma . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_Polygamma . pbtxt <nl> <nl> op { <nl> graph_op_name : " Polygamma " <nl> - summary : " Compute the polygamma function \ \ \ \ ( \ \ psi ^ { ( n ) } ( x ) \ \ \ \ ) . " <nl> + summary : " Compute the polygamma function \ \ ( \ psi ^ { ( n ) } ( x ) \ \ ) . " <nl> description : < < END <nl> The polygamma function is defined as : <nl> <nl> mmm a / tensorflow / core / api_def / base_api / api_def_ReduceJoin . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_ReduceJoin . pbtxt <nl> END <nl> summary : " Joins a string Tensor across the given dimensions . " <nl> description : < < END <nl> Computes the string join across dimensions in the given string Tensor of shape <nl> - ` [ d_0 , d_1 , . . . , d_n - 1 ] ` . Returns a new Tensor created by joining the input <nl> + ` [ \ \ ( d_0 , d_1 , . . . , d_ { n - 1 } \ \ ) ] ` . Returns a new Tensor created by joining the input <nl> strings with the given separator ( default : empty string ) . Negative indices are <nl> counted backwards from the end , with ` - 1 ` being equivalent to ` n - 1 ` . <nl> <nl> mmm a / tensorflow / core / api_def / base_api / api_def_ScatterNdAdd . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_ScatterNdAdd . pbtxt <nl> within a given variable according to ` indices ` . <nl> ` ref ` is a ` Tensor ` with rank ` P ` and ` indices ` is a ` Tensor ` of rank ` Q ` . <nl> <nl> ` indices ` must be integer tensor , containing indices into ` ref ` . <nl> - It must be shape ` [ d_0 , . . . , d_ { Q - 2 } , K ] ` where ` 0 < K < = P ` . <nl> + It must be shape ` \ \ ( [ d_0 , . . . , d_ { Q - 2 } , K ] \ \ ) ` where ` 0 < K < = P ` . <nl> <nl> The innermost dimension of ` indices ` ( with length ` K ` ) corresponds to <nl> indices into elements ( if ` K = P ` ) or slices ( if ` K < P ` ) along the ` K ` th <nl> dimension of ` ref ` . <nl> ` updates ` is ` Tensor ` of rank ` Q - 1 + P - K ` with shape : <nl> <nl> ` ` ` <nl> - [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . <nl> + $ $ [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . $ $ <nl> ` ` ` <nl> <nl> For example , say we want to add 4 scattered elements to a rank - 1 tensor to 8 <nl> mmm a / tensorflow / core / api_def / base_api / api_def_ScatterNdNonAliasingAdd . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_ScatterNdNonAliasingAdd . pbtxt <nl> respect to both ` input ` and ` updates ` . <nl> ` input ` is a ` Tensor ` with rank ` P ` and ` indices ` is a ` Tensor ` of rank ` Q ` . <nl> <nl> ` indices ` must be integer tensor , containing indices into ` input ` . <nl> - It must be shape ` [ d_0 , . . . , d_ { Q - 2 } , K ] ` where ` 0 < K < = P ` . <nl> + It must be shape ` \ \ ( [ d_0 , . . . , d_ { Q - 2 } , K ] \ \ ) ` where ` 0 < K < = P ` . <nl> <nl> The innermost dimension of ` indices ` ( with length ` K ` ) corresponds to <nl> indices into elements ( if ` K = P ` ) or ` ( P - K ) ` - dimensional slices <nl> indices into elements ( if ` K = P ` ) or ` ( P - K ) ` - dimensional slices <nl> ` updates ` is ` Tensor ` of rank ` Q - 1 + P - K ` with shape : <nl> <nl> ` ` ` <nl> - [ d_0 , . . . , d_ { Q - 2 } , input . shape [ K ] , . . . , input . shape [ P - 1 ] ] . <nl> + $ $ [ d_0 , . . . , d_ { Q - 2 } , input . shape [ K ] , . . . , input . shape [ P - 1 ] ] . $ $ <nl> ` ` ` <nl> <nl> For example , say we want to add 4 scattered elements to a rank - 1 tensor to 8 <nl> mmm a / tensorflow / core / api_def / base_api / api_def_ScatterNdSub . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_ScatterNdSub . pbtxt <nl> within a given variable according to ` indices ` . <nl> ` ref ` is a ` Tensor ` with rank ` P ` and ` indices ` is a ` Tensor ` of rank ` Q ` . <nl> <nl> ` indices ` must be integer tensor , containing indices into ` ref ` . <nl> - It must be shape ` [ d_0 , . . . , d_ { Q - 2 } , K ] ` where ` 0 < K < = P ` . <nl> + It must be shape ` \ \ ( [ d_0 , . . . , d_ { Q - 2 } , K ] \ \ ) ` where ` 0 < K < = P ` . <nl> <nl> The innermost dimension of ` indices ` ( with length ` K ` ) corresponds to <nl> indices into elements ( if ` K = P ` ) or slices ( if ` K < P ` ) along the ` K ` th <nl> dimension of ` ref ` . <nl> ` updates ` is ` Tensor ` of rank ` Q - 1 + P - K ` with shape : <nl> <nl> ` ` ` <nl> - [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . <nl> + $ $ [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . $ $ <nl> ` ` ` <nl> <nl> For example , say we want to subtract 4 scattered elements from a rank - 1 tensor <nl> mmm a / tensorflow / core / api_def / base_api / api_def_ScatterNdUpdate . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_ScatterNdUpdate . pbtxt <nl> variable according to ` indices ` . <nl> ` ref ` is a ` Tensor ` with rank ` P ` and ` indices ` is a ` Tensor ` of rank ` Q ` . <nl> <nl> ` indices ` must be integer tensor , containing indices into ` ref ` . <nl> - It must be shape ` [ d_0 , . . . , d_ { Q - 2 } , K ] ` where ` 0 < K < = P ` . <nl> + It must be shape ` \ \ ( [ d_0 , . . . , d_ { Q - 2 } , K ] \ \ ) ` where ` 0 < K < = P ` . <nl> <nl> The innermost dimension of ` indices ` ( with length ` K ` ) corresponds to <nl> indices into elements ( if ` K = P ` ) or slices ( if ` K < P ` ) along the ` K ` th <nl> dimension of ` ref ` . <nl> ` updates ` is ` Tensor ` of rank ` Q - 1 + P - K ` with shape : <nl> <nl> ` ` ` <nl> - [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . <nl> + $ $ [ d_0 , . . . , d_ { Q - 2 } , ref . shape [ K ] , . . . , ref . shape [ P - 1 ] ] . $ $ <nl> ` ` ` <nl> <nl> For example , say we want to update 4 scattered elements to a rank - 1 tensor to <nl> mmm a / tensorflow / core / api_def / base_api / api_def_Softmax . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_Softmax . pbtxt <nl> END <nl> description : < < END <nl> For each batch ` i ` and class ` j ` we have <nl> <nl> - softmax [ i , j ] = exp ( logits [ i , j ] ) / sum_j ( exp ( logits [ i , j ] ) ) <nl> + $ $ softmax [ i , j ] = exp ( logits [ i , j ] ) / sum_j ( exp ( logits [ i , j ] ) ) $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyAdagrad . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyAdagrad . pbtxt <nl> END <nl> summary : " Update relevant entries in \ ' * var \ ' and \ ' * accum \ ' according to the adagrad scheme . " <nl> description : < < END <nl> That is for rows we have grad for , we update var and accum as follows : <nl> - accum + = grad * grad <nl> - var - = lr * grad * ( 1 / sqrt ( accum ) ) <nl> + $ $ accum + = grad * grad $ $ <nl> + $ $ var - = lr * grad * ( 1 / sqrt ( accum ) ) $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyCenteredRMSProp . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyCenteredRMSProp . pbtxt <nl> mean_square = decay * mean_square + ( 1 - decay ) * gradient * * 2 <nl> mean_grad = decay * mean_grad + ( 1 - decay ) * gradient <nl> Delta = learning_rate * gradient / sqrt ( mean_square + epsilon - mean_grad * * 2 ) <nl> <nl> - ms < - rho * ms_ { t - 1 } + ( 1 - rho ) * grad * grad <nl> - mom < - momentum * mom_ { t - 1 } + lr * grad / sqrt ( ms + epsilon ) <nl> - var < - var - mom <nl> + $ $ ms < - rho * ms_ { t - 1 } + ( 1 - rho ) * grad * grad $ $ <nl> + $ $ mom < - momentum * mom_ { t - 1 } + lr * grad / sqrt ( ms + epsilon ) $ $ <nl> + $ $ var < - var - mom $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyFtrl . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyFtrl . pbtxt <nl> END <nl> summary : " Update relevant entries in \ ' * var \ ' according to the Ftrl - proximal scheme . " <nl> description : < < END <nl> That is for rows we have grad for , we update var , accum and linear as follows : <nl> - accum_new = accum + grad * grad <nl> - linear + = grad + ( accum_new ^ ( - lr_power ) - accum ^ ( - lr_power ) ) / lr * var <nl> - quadratic = 1 . 0 / ( accum_new ^ ( lr_power ) * lr ) + 2 * l2 <nl> - var = ( sign ( linear ) * l1 - linear ) / quadratic if | linear | > l1 else 0 . 0 <nl> - accum = accum_new <nl> + $ $ accum_new = accum + grad * grad $ $ <nl> + $ $ linear + = grad + ( accum_ { new } ^ { - lr_ { power } } - accum ^ { - lr_ { power } } / lr * var $ $ <nl> + $ $ quadratic = 1 . 0 / ( accum_ { new } ^ { lr_ { power } } * lr ) + 2 * l2 $ $ <nl> + $ $ var = ( sign ( linear ) * l1 - linear ) / quadratic \ if \ | linear | > l1 \ else \ 0 . 0 $ $ <nl> + $ $ accum = accum_ { new } $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyMomentum . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyMomentum . pbtxt <nl> Set use_nesterov = True if you want to use Nesterov momentum . <nl> <nl> That is for rows we have grad for , we update var and accum as follows : <nl> <nl> - accum = accum * momentum + grad <nl> - var - = lr * accum <nl> + $ $ accum = accum * momentum + grad $ $ <nl> + $ $ var - = lr * accum $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyProximalAdagrad . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyProximalAdagrad . pbtxt <nl> END <nl> summary : " Sparse update entries in \ ' * var \ ' and \ ' * accum \ ' according to FOBOS algorithm . " <nl> description : < < END <nl> That is for rows we have grad for , we update var and accum as follows : <nl> - accum + = grad * grad <nl> - prox_v = var <nl> - prox_v - = lr * grad * ( 1 / sqrt ( accum ) ) <nl> - var = sign ( prox_v ) / ( 1 + lr * l2 ) * max { | prox_v | - lr * l1 , 0 } <nl> + $ $ accum + = grad * grad $ $ <nl> + $ $ prox_v = var $ $ <nl> + $ $ prox_v - = lr * grad * ( 1 / sqrt ( accum ) ) $ $ <nl> + $ $ var = sign ( prox_v ) / ( 1 + lr * l2 ) * max { | prox_v | - lr * l1 , 0 } $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyProximalGradientDescent . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyProximalGradientDescent . pbtxt <nl> END <nl> summary : " Sparse update \ ' * var \ ' as FOBOS algorithm with fixed learning rate . " <nl> description : < < END <nl> That is for rows we have grad for , we update var as follows : <nl> - prox_v = var - alpha * grad <nl> - var = sign ( prox_v ) / ( 1 + alpha * l2 ) * max { | prox_v | - alpha * l1 , 0 } <nl> + $ $ prox_v = var - alpha * grad $ $ <nl> + $ $ var = sign ( prox_v ) / ( 1 + alpha * l2 ) * max { | prox_v | - alpha * l1 , 0 } $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_SparseApplyRMSProp . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_SparseApplyRMSProp . pbtxt <nl> and mom will not update in iterations during which the grad is zero . <nl> mean_square = decay * mean_square + ( 1 - decay ) * gradient * * 2 <nl> Delta = learning_rate * gradient / sqrt ( mean_square + epsilon ) <nl> <nl> - ms < - rho * ms_ { t - 1 } + ( 1 - rho ) * grad * grad <nl> - mom < - momentum * mom_ { t - 1 } + lr * grad / sqrt ( ms + epsilon ) <nl> - var < - var - mom <nl> + $ $ ms < - rho * ms_ { t - 1 } + ( 1 - rho ) * grad * grad $ $ <nl> + $ $ mom < - momentum * mom_ { t - 1 } + lr * grad / sqrt ( ms + epsilon ) $ $ <nl> + $ $ var < - var - mom $ $ <nl> END <nl> } <nl> mmm a / tensorflow / core / api_def / base_api / api_def_UnsortedSegmentSum . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_UnsortedSegmentSum . pbtxt <nl> Read @ { $ math_ops # Segmentation $ the section on segmentation } for an explanation of <nl> segments . <nl> <nl> Computes a tensor such that <nl> - ` ( output [ i ] = sum_ { j . . . } data [ j . . . ] ` where the sum is over tuples ` j . . . ` such <nl> + ` \ \ ( output [ i ] = sum_ { j . . . } data [ j . . . ] \ \ ) ` where the sum is over tuples ` j . . . ` such <nl> that ` segment_ids [ j . . . ] = = i ` . Unlike ` SegmentSum ` , ` segment_ids ` <nl> need not be sorted and need not cover all values in the full <nl> range of valid values . <nl> mmm a / tensorflow / core / api_def / base_api / api_def_Zeta . pbtxt <nl> ppp b / tensorflow / core / api_def / base_api / api_def_Zeta . pbtxt <nl> <nl> op { <nl> graph_op_name : " Zeta " <nl> - summary : " Compute the Hurwitz zeta function \ \ \ \ ( \ \ zeta ( x , q ) \ \ \ \ ) . " <nl> + summary : " Compute the Hurwitz zeta function \ \ ( \ zeta ( x , q ) \ \ ) . " <nl> description : < < END <nl> The Hurwitz zeta function is defined as : <nl> <nl> | Fix math equation rendering format in api definitions | tensorflow/tensorflow | 67dee0adc09534483ce2627ffee629feb5133ae7 | 2018-04-05T19:26:26Z |
mmm a / hphp / hack / src / parser / core / expression_parser . rs <nl> ppp b / hphp / hack / src / parser / core / expression_parser . rs <nl> where <nl> str_maybe , StringLiteralKind : : LiteralDoubleQuoted ) ; <nl> S ! ( make_prefixed_string_expression , self , qualified_name , str_ ) <nl> } <nl> + | TokenKind : : Backtick = > { <nl> + let prefix = S ! ( make_simple_type_specifier , self , qualified_name ) ; <nl> + let left_backtick = self . require_token ( TokenKind : : Backtick , Errors : : error1065 ) ; <nl> + let expr = self . parse_expression_with_reset_precedence ( ) ; <nl> + let right_backtick = self . require_token ( TokenKind : : Backtick , Errors : : error1065 ) ; <nl> + S ! ( make_prefixed_code_expression , self , prefix , left_backtick , expr , right_backtick ) <nl> + } <nl> | _ = > { <nl> / / Not a prefixed string or an attempt at one <nl> self . parse_name_or_collection_literal_expression ( qualified_name ) <nl> mmm a / hphp / hack / src / parser / core / lexer . rs <nl> ppp b / hphp / hack / src / parser / core / lexer . rs <nl> impl < ' a , Token : LexableToken < ' a > > Lexer < ' a , Token > { <nl> ch if ' 1 ' < = ch & & ch < = ' 9 ' = > self . scan_decimal_or_float ( ) , <nl> ' \ ' ' = > self . scan_single_quote_string_literal ( ) , <nl> ' " ' = > self . scan_double_quote_like_string_literal_from_start ( ) , <nl> + ' ` ' = > { <nl> + self . advance ( 1 ) ; <nl> + TokenKind : : Backtick <nl> + } <nl> ' \ \ ' = > { <nl> self . advance ( 1 ) ; <nl> TokenKind : : Backslash <nl> mmm a / hphp / hack / src / parser / rust_parser_errors . rs <nl> ppp b / hphp / hack / src / parser / rust_parser_errors . rs <nl> enum BinopAllowsAwaitInPositions { <nl> enum UnstableFeatures { <nl> UnionIntersectionTypeHints , <nl> ClassLevelWhere , <nl> + ExpressionTrees , <nl> } <nl> <nl> use BinopAllowsAwaitInPositions : : * ; <nl> where <nl> parser_options . tco_union_intersection_type_hints <nl> } <nl> UnstableFeatures : : ClassLevelWhere = > parser_options . po_enable_class_level_where_clauses , <nl> + _ = > false , <nl> } | | self . env . context . active_unstable_features . contains ( feature ) ; <nl> if ! enabled { <nl> self . errors . push ( Self : : make_error_from_node ( <nl> where <nl> ) , <nl> _ = > ( ) , <nl> } , <nl> + PrefixedCodeExpression ( _ ) = > { <nl> + self . check_can_use_feature ( node , & UnstableFeatures : : ExpressionTrees ) <nl> + } <nl> _ = > ( ) , <nl> } <nl> <nl> mmm a / hphp / hack / src / parser / syntax_error . rs <nl> ppp b / hphp / hack / src / parser / syntax_error . rs <nl> pub fn error1060 ( extension : & str ) - > Error { <nl> pub const error1061 : Error = Cow : : Borrowed ( " A Pocket Universes operator ( ' : @ ' ) is expected here . " ) ; <nl> pub const error1063 : Error = Cow : : Borrowed ( " Expected matching separator here . " ) ; <nl> pub const error1064 : Error = Cow : : Borrowed ( " XHP children declarations are no longer supported . " ) ; <nl> + pub const error1065 : Error = Cow : : Borrowed ( " A backtick ` ` ` is expected here . " ) ; <nl> pub const error2001 : Error = Cow : : Borrowed ( " A type annotation is required in strict mode . " ) ; <nl> pub const error2003 : Error = <nl> Cow : : Borrowed ( " A case statement may only appear directly inside a switch . " ) ; <nl> deleted file mode 100644 <nl> index 7b234c56d07 . . 00000000000 <nl> mmm a / hphp / hack / test / full_fidelity / cases / backtick . php <nl> ppp / dev / null <nl> <nl> - < ? hh <nl> - <nl> - function test ( ) : void { <nl> - ` Hello World ` ; <nl> - } <nl> deleted file mode 100644 <nl> index f90349976a9 . . 00000000000 <nl> mmm a / hphp / hack / test / full_fidelity / cases / backtick . php . errors . exp <nl> ppp / dev / null <nl> <nl> - ( 4 , 3 ) - ( 4 , 3 ) This character is invalid . <nl> - ( 4 , 4 ) - ( 4 , 4 ) An expression is expected here . <nl> - ( 4 , 10 ) - ( 4 , 10 ) A semicolon ( ' ; ' ) is expected here . <nl> - ( 4 , 15 ) - ( 4 , 15 ) This character is invalid . <nl> - ( 4 , 15 ) - ( 4 , 16 ) Encountered unexpected token ' ` ' . <nl> new file mode 100644 <nl> index 00000000000 . . 98ab880bd46 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / full_fidelity / cases / test_expression_trees . php <nl> <nl> + < ? hh <nl> + function test ( ) : void { code ` 4 + 10 + $ x ` ; } <nl> new file mode 100644 <nl> index 00000000000 . . dd9cfdd8682 <nl> mmm / dev / null <nl> ppp b / hphp / hack / test / full_fidelity / cases / test_expression_trees . php . errors . exp <nl> @ @ - 0 , 0 + 1 @ @ <nl> + ( 2 , 24 ) - ( 2 , 41 ) Cannot use unstable feature : ' expression_trees ' <nl> similarity index 50 % <nl> rename from hphp / hack / test / full_fidelity / cases / backtick . php . json . exp <nl> rename to hphp / hack / test / full_fidelity / cases / test_expression_trees . php . json . exp <nl> mmm a / hphp / hack / test / full_fidelity / cases / backtick . php . json . exp <nl> ppp b / hphp / hack / test / full_fidelity / cases / test_expression_trees . php . json . exp <nl> <nl> " kind " : " function " , <nl> " text " : " function " , <nl> " offset " : 5 , <nl> - " leading_width " : 1 , <nl> + " leading_width " : 0 , <nl> " width " : 8 , <nl> " trailing_width " : 1 , <nl> - " leading " : [ { " kind " : " end_of_line " , " text " : " \ n " , " offset " : 5 , " width " : 1 } ] , <nl> - " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 14 , " width " : 1 } ] , <nl> - " line_number " : 3 <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 13 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " function_name " : { <nl> <nl> " token " : { <nl> " kind " : " name " , <nl> " text " : " test " , <nl> - " offset " : 15 , <nl> + " offset " : 14 , <nl> " leading_width " : 0 , <nl> " width " : 4 , <nl> " trailing_width " : 0 , <nl> " leading " : [ ] , <nl> " trailing " : [ ] , <nl> - " line_number " : 3 <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " function_type_parameter_list " : { " kind " : " missing " } , <nl> <nl> " token " : { <nl> " kind " : " ( " , <nl> " text " : " ( " , <nl> - " offset " : 19 , <nl> + " offset " : 18 , <nl> " leading_width " : 0 , <nl> " width " : 1 , <nl> " trailing_width " : 0 , <nl> " leading " : [ ] , <nl> " trailing " : [ ] , <nl> - " line_number " : 3 <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " function_parameter_list " : { " kind " : " missing " } , <nl> <nl> " token " : { <nl> " kind " : " ) " , <nl> " text " : " ) " , <nl> - " offset " : 20 , <nl> + " offset " : 19 , <nl> " leading_width " : 0 , <nl> " width " : 1 , <nl> " trailing_width " : 0 , <nl> " leading " : [ ] , <nl> " trailing " : [ ] , <nl> - " line_number " : 3 <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " function_colon " : { <nl> <nl> " token " : { <nl> " kind " : " : " , <nl> " text " : " : " , <nl> - " offset " : 21 , <nl> + " offset " : 20 , <nl> " leading_width " : 0 , <nl> " width " : 1 , <nl> - " trailing_width " : 1 , <nl> + " trailing_width " : 0 , <nl> " leading " : [ ] , <nl> - " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 22 , " width " : 1 } ] , <nl> - " line_number " : 3 <nl> + " trailing " : [ ] , <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " function_type " : { <nl> <nl> " token " : { <nl> " kind " : " void " , <nl> " text " : " void " , <nl> - " offset " : 23 , <nl> + " offset " : 21 , <nl> " leading_width " : 0 , <nl> " width " : 4 , <nl> " trailing_width " : 1 , <nl> " leading " : [ ] , <nl> - " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 27 , " width " : 1 } ] , <nl> - " line_number " : 3 <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 25 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> } <nl> } <nl> } , <nl> <nl> " token " : { <nl> " kind " : " { " , <nl> " text " : " { " , <nl> - " offset " : 28 , <nl> + " offset " : 26 , <nl> " leading_width " : 0 , <nl> " width " : 1 , <nl> " trailing_width " : 1 , <nl> " leading " : [ ] , <nl> - " trailing " : [ { " kind " : " end_of_line " , " text " : " \ n " , " offset " : 29 , " width " : 1 } ] , <nl> - " line_number " : 3 <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 27 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> } <nl> } , <nl> " compound_statements " : { <nl> <nl> { <nl> " kind " : " expression_statement " , <nl> " expression_statement_expression " : { <nl> - " kind " : " token " , <nl> - " token " : { <nl> - " kind " : " error_token " , <nl> - " text " : " ` " , <nl> - " offset " : 30 , <nl> - " leading_width " : 2 , <nl> - " width " : 1 , <nl> - " trailing_width " : 0 , <nl> - " leading " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 30 , " width " : 2 } ] , <nl> - " trailing " : [ ] , <nl> - " line_number " : 4 <nl> - } <nl> - } , <nl> - " expression_statement_semicolon " : { " kind " : " missing " } <nl> - } , <nl> - { <nl> - " kind " : " expression_statement " , <nl> - " expression_statement_expression " : { <nl> - " kind " : " token " , <nl> - " token " : { <nl> - " kind " : " name " , <nl> - " text " : " Hello " , <nl> - " offset " : 33 , <nl> - " leading_width " : 0 , <nl> - " width " : 5 , <nl> - " trailing_width " : 1 , <nl> - " leading " : [ ] , <nl> - " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 38 , " width " : 1 } ] , <nl> - " line_number " : 4 <nl> - } <nl> - } , <nl> - " expression_statement_semicolon " : { " kind " : " missing " } <nl> - } , <nl> - { <nl> - " kind " : " expression_statement " , <nl> - " expression_statement_expression " : { <nl> - " kind " : " token " , <nl> - " token " : { <nl> - " kind " : " name " , <nl> - " text " : " World " , <nl> - " offset " : 39 , <nl> - " leading_width " : 0 , <nl> - " width " : 5 , <nl> - " trailing_width " : 0 , <nl> - " leading " : [ ] , <nl> - " trailing " : [ ] , <nl> - " line_number " : 4 <nl> + " kind " : " prefixed_code " , <nl> + " prefixed_code_prefix " : { <nl> + " kind " : " simple_type_specifier " , <nl> + " simple_type_specifier " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " name " , <nl> + " text " : " code " , <nl> + " offset " : 28 , <nl> + " leading_width " : 0 , <nl> + " width " : 4 , <nl> + " trailing_width " : 0 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ ] , <nl> + " line_number " : 2 <nl> + } <nl> + } <nl> + } , <nl> + " prefixed_code_left_backtick " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " ` " , <nl> + " text " : " ` " , <nl> + " offset " : 32 , <nl> + " leading_width " : 0 , <nl> + " width " : 1 , <nl> + " trailing_width " : 0 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ ] , <nl> + " line_number " : 2 <nl> + } <nl> + } , <nl> + " prefixed_code_expression " : { <nl> + " kind " : " binary_expression " , <nl> + " binary_left_operand " : { <nl> + " kind " : " binary_expression " , <nl> + " binary_left_operand " : { <nl> + " kind " : " literal " , <nl> + " literal_expression " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " decimal_literal " , <nl> + " text " : " 4 " , <nl> + " offset " : 33 , <nl> + " leading_width " : 0 , <nl> + " width " : 1 , <nl> + " trailing_width " : 1 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 34 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> + } <nl> + } <nl> + } , <nl> + " binary_operator " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " + " , <nl> + " text " : " + " , <nl> + " offset " : 35 , <nl> + " leading_width " : 0 , <nl> + " width " : 1 , <nl> + " trailing_width " : 1 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 36 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> + } <nl> + } , <nl> + " binary_right_operand " : { <nl> + " kind " : " literal " , <nl> + " literal_expression " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " decimal_literal " , <nl> + " text " : " 10 " , <nl> + " offset " : 37 , <nl> + " leading_width " : 0 , <nl> + " width " : 2 , <nl> + " trailing_width " : 1 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 39 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> + } <nl> + } <nl> + } <nl> + } , <nl> + " binary_operator " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " + " , <nl> + " text " : " + " , <nl> + " offset " : 40 , <nl> + " leading_width " : 0 , <nl> + " width " : 1 , <nl> + " trailing_width " : 1 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 41 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> + } <nl> + } , <nl> + " binary_right_operand " : { <nl> + " kind " : " variable " , <nl> + " variable_expression " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " variable " , <nl> + " text " : " $ x " , <nl> + " offset " : 42 , <nl> + " leading_width " : 0 , <nl> + " width " : 2 , <nl> + " trailing_width " : 0 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ ] , <nl> + " line_number " : 2 <nl> + } <nl> + } <nl> + } <nl> + } , <nl> + " prefixed_code_right_backtick " : { <nl> + " kind " : " token " , <nl> + " token " : { <nl> + " kind " : " ` " , <nl> + " text " : " ` " , <nl> + " offset " : 44 , <nl> + " leading_width " : 0 , <nl> + " width " : 1 , <nl> + " trailing_width " : 0 , <nl> + " leading " : [ ] , <nl> + " trailing " : [ ] , <nl> + " line_number " : 2 <nl> + } <nl> } <nl> } , <nl> " expression_statement_semicolon " : { <nl> <nl> " leading_width " : 0 , <nl> " width " : 1 , <nl> " trailing_width " : 1 , <nl> - " leading " : [ { " kind " : " extra_token_error " , " text " : " \ n " , " offset " : 46 , " width " : 1 } ] , <nl> - " trailing " : [ { " kind " : " end_of_line " , " text " : " \ n " , " offset " : 46 , " width " : 1 } ] , <nl> - " line_number " : 4 <nl> + " leading " : [ ] , <nl> + " trailing " : [ { " kind " : " whitespace " , " text " : " " , " offset " : 46 , " width " : 1 } ] , <nl> + " line_number " : 2 <nl> } <nl> } <nl> } <nl> <nl> " trailing_width " : 1 , <nl> " leading " : [ ] , <nl> " trailing " : [ { " kind " : " end_of_line " , " text " : " \ n " , " offset " : 48 , " width " : 1 } ] , <nl> - " line_number " : 5 <nl> + " line_number " : 2 <nl> } <nl> } <nl> } <nl> <nl> " trailing_width " : 0 , <nl> " leading " : [ ] , <nl> " trailing " : [ ] , <nl> - " line_number " : 6 <nl> + " line_number " : 3 <nl> } <nl> } <nl> } <nl> ] <nl> } <nl> } , <nl> - " program_text " : " < ? hh \ n \ nfunction test ( ) : void { \ n ` Hello World ` ; \ n } \ n " , <nl> + " program_text " : " < ? hh \ nfunction test ( ) : void { code ` 4 + 10 + $ x ` ; } \ n " , <nl> " version " : " 2020 - 06 - 30 - 0000 " <nl> } <nl> | Parse ExpressionTrees | facebook/hhvm | a6577bffd6ce053374e2b7065fff53bf8f6519ad | 2020-08-19T05:28:51Z |
mmm a / runtime / Stubs . cpp <nl> ppp b / runtime / Stubs . cpp <nl> <nl> # include < mach / mach_time . h > <nl> # include < sys / resource . h > <nl> # include < sys / errno . h > <nl> + # include < pthread . h > <nl> + # include < unistd . h > <nl> # include < cstring > <nl> # include < cstdint > <nl> # include < cstdio > <nl> _swift_initBenchmark ( ) { <nl> r = setvbuf ( stdout , 0 , _IOFBF , 0 ) ; <nl> assert ( r = = 0 ) ; <nl> <nl> + / / XXX - - There doesn ' t seem to be an API to figure out the max value <nl> + struct sched_param pthr_sched_param ; <nl> + pthr_sched_param . sched_priority = 79 ; <nl> + r = pthread_setschedparam ( pthread_self ( ) , SCHED_FIFO , & pthr_sched_param ) ; <nl> + assert ( r = = 0 ) ; <nl> + <nl> eax = 0x80000002 ; <nl> asm ( " cpuid " : " + a " ( eax ) , " = b " ( ebx ) , " = c " ( ecx ) , " = d " ( edx ) ) ; <nl> u . reg [ 0 ] = eax ; <nl> _swift_initBenchmark ( ) { <nl> fprintf ( stderr , " Consider : sudo / usr / local / bin / pstates - D \ n \ n " ) ; <nl> } <nl> <nl> - / / Sigh . . . getpriority ( ) can legitimately return - 1 <nl> - errno = 0 ; <nl> - int pri = getpriority ( PRIO_PROCESS , 0 ) ; <nl> - assert ( errno = = 0 ) ; <nl> - if ( pri > = 0 ) { <nl> + if ( geteuid ( ) ) { <nl> fprintf ( stderr , " WARNING : Non - elevated priority . Results will be less reliable . \ n " ) ; <nl> - fprintf ( stderr , " Consider : sudo nice - n - 15 . / myBench \ n \ n " ) ; <nl> + fprintf ( stderr , " Consider : sudo . / myBench \ n \ n " ) ; <nl> } <nl> <nl> return _swift_startBenchmark ; <nl> | Elevate priority automatically during benchmarking | apple/swift | a570a4956bdd663b16e02cf2602898d4524845e6 | 2012-06-25T14:55:14Z |
mmm a / src / PreferencesDialog . ui <nl> ppp b / src / PreferencesDialog . ui <nl> <nl> < / property > <nl> < widget class = " QWidget " name = " tab_4 " > <nl> < attribute name = " title " > <nl> - < string > General < / string > <nl> + < string > & amp ; General < / string > <nl> < / attribute > <nl> < layout class = " QFormLayout " name = " formLayout_3 " > <nl> < item row = " 1 " column = " 1 " > <nl> <nl> < / layout > <nl> < / widget > <nl> < tabstops > <nl> + < tabstop > locationEdit < / tabstop > <nl> + < tabstop > setLocationButton < / tabstop > <nl> + < tabstop > languageComboBox < / tabstop > <nl> + < tabstop > encodingComboBox < / tabstop > <nl> + < tabstop > foreignKeysCheckBox < / tabstop > <nl> + < tabstop > checkHideSchemaLinebreaks < / tabstop > <nl> + < tabstop > spinPrefetchSize < / tabstop > <nl> < tabstop > treeSyntaxHighlighting < / tabstop > <nl> < tabstop > spinEditorFontSize < / tabstop > <nl> < tabstop > spinLogFontSize < / tabstop > <nl> <nl> < slot > addExtension ( ) < / slot > <nl> < hints > <nl> < hint type = " sourcelabel " > <nl> - < x > 567 < / x > <nl> - < y > 88 < / y > <nl> + < x > 552 < / x > <nl> + < y > 95 < / y > <nl> < / hint > <nl> < hint type = " destinationlabel " > <nl> < x > 245 < / x > <nl> <nl> < slot > removeExtension ( ) < / slot > <nl> < hints > <nl> < hint type = " sourcelabel " > <nl> - < x > 567 < / x > <nl> - < y > 117 < / y > <nl> + < x > 552 < / x > <nl> + < y > 129 < / y > <nl> < / hint > <nl> < hint type = " destinationlabel " > <nl> < x > 245 < / x > <nl> <nl> < / hint > <nl> < / hints > <nl> < / connection > <nl> + < connection > <nl> + < sender > setLocationButton < / sender > <nl> + < signal > clicked ( ) < / signal > <nl> + < receiver > PreferencesDialog < / receiver > <nl> + < slot > chooseLocation ( ) < / slot > <nl> + < hints > <nl> + < hint type = " sourcelabel " > <nl> + < x > 457 < / x > <nl> + < y > 65 < / y > <nl> + < / hint > <nl> + < hint type = " destinationlabel " > <nl> + < x > 294 < / x > <nl> + < y > 202 < / y > <nl> + < / hint > <nl> + < / hints > <nl> + < / connection > <nl> < / connections > <nl> < slots > <nl> < slot > saveSettings ( ) < / slot > <nl> | Merge pull request from samir - aguiar / ui - fix | sqlitebrowser/sqlitebrowser | 103a9a27c96d09223c41b40b3282d293b9806071 | 2015-02-06T20:34:31Z |
mmm a / tools / run_tests / run_microbenchmark . py <nl> ppp b / tools / run_tests / run_microbenchmark . py <nl> def collect_summary ( bm_name , args ) : <nl> ' bm_error ' , <nl> ' bm_chttp2_hpack ' , <nl> ' bm_chttp2_transport ' , <nl> + ' bm_pollset ' , <nl> ' bm_metadata ' , <nl> ' bm_fullstack_trickle ' , <nl> ] , <nl> | Add bm | grpc/grpc | 407101b2c41c111e549935703b19dcdb5c5d0f09 | 2017-03-22T16:17:32Z |
mmm a / CHANGELOG . md <nl> ppp b / CHANGELOG . md <nl> Note : This is in reverse chronological order , so newer entries are added to the <nl> Swift 3 . 0 <nl> mmmmmmmmm <nl> <nl> + * [ SE - 0111 ] ( https : / / github . com / apple / swift - evolution / blob / master / proposals / 0111 - remove - arg - label - type - significance . md ) : <nl> + <nl> + Argument labels have been removed from Swift function types . Instead , they are <nl> + part of the name of a function , subscript , or initializer . Calls to a function <nl> + or initializer , or uses of a subscript , still require argument labels , as they <nl> + always have : <nl> + <nl> + ` ` ` swift <nl> + func doSomething ( x : Int , y : Int ) { } <nl> + doSomething ( x : 0 , y : 0 ) / / argument labels are required <nl> + ` ` ` <nl> + <nl> + However , unapplied references to functions or initializers no longer carry <nl> + argument labels . For example : <nl> + <nl> + ` ` ` swift <nl> + let f = doSomething ( x : y : ) / / inferred type is now ( Int , Int ) - > Void <nl> + ` ` ` <nl> + <nl> + Additionally , explicitly - written function types can no longer carry argument <nl> + labels , although one can still provide parameter name for documentation <nl> + purposes using the ' _ ' in the argument label position : <nl> + <nl> + ` ` ` swift <nl> + typealias CompletionHandler = <nl> + ( token : Token , error : Error ? ) - > Void / / error : function types cannot have argument labels <nl> + <nl> + typealias CompletionHandler = <nl> + ( _ token : Token , _ error : Error ? ) - > Void / / error : okay : names are for documentation purposes <nl> + ` ` ` <nl> + <nl> + * [ SE - 0025 ] ( https : / / github . com / apple / swift - evolution / blob / master / proposals / 0025 - scoped - access - level . md ) : A declaration marked as ` private ` can now only be accessed within the lexical scope it is declared in ( essentially the enclosing curly braces ` { } ` ) . A ` private ` declaration at the top level of a file can be accessed anywhere in that file , as in Swift 2 . The access level formerly known as ` private ` is now called ` fileprivate ` . <nl> + <nl> * [ SE - 0131 ] ( https : / / github . com / apple / swift - evolution / blob / master / proposals / 0131 - anyhashable . md ) : <nl> The standard library provides a new type ` AnyHashable ` for use in heterogenous <nl> hashed collections . Untyped ` NSDictionary ` and ` NSSet ` APIs from Objective - C <nl> mmm a / CODE_OWNERS . TXT <nl> ppp b / CODE_OWNERS . TXT <nl> N : Jordan Rose <nl> E : jordan_rose @ apple . com <nl> D : ClangImporter , Serialization , ( Objective - ) C printer , Driver <nl> <nl> - N : Nadav Rotem <nl> - E : nrotem @ apple . com <nl> + N : Erik Eckstein <nl> + E : eeckstein @ apple . com <nl> D : SILOptimizer <nl> <nl> N : Anna Zaks <nl> mmm a / apinotes / CMakeLists . txt <nl> ppp b / apinotes / CMakeLists . txt <nl> set ( SWIFT_API_NOTES_INPUTS <nl> CoreLocation <nl> CoreSpotlight <nl> CoreText <nl> + CryptoTokenKit <nl> Dispatch <nl> EventKit <nl> ExternalAccessory <nl> new file mode 100644 <nl> index 000000000000 . . 1fca179fa85f <nl> mmm / dev / null <nl> ppp b / apinotes / CryptoTokenKit . apinotes <nl> <nl> + mmm <nl> + Name : CryptoTokenKit <nl> + Classes : <nl> + - Name : TKSmartCardSlotManager <nl> + Methods : <nl> + - Selector : ' getSlotWithName : reply : ' <nl> + MethodKind : Instance <nl> + SwiftName : getSlot ( withName : reply : ) <nl> + - Name : TKSmartCard <nl> + Properties : <nl> + - Name : valid <nl> + SwiftName : isValid <nl> + - Name : sensitive <nl> + SwiftName : isSensitive <nl> + Methods : <nl> + - Selector : ' transmitRequest : reply : ' <nl> + MethodKind : Instance <nl> + SwiftName : transmit ( _ : reply : ) <nl> + - Selector : ' userInteractionForSecurePINVerificationWithPINFormat : APDU : PINByteOffset : ' <nl> + MethodKind : Instance <nl> + SwiftName : userInteractionForSecurePINVerification ( _ : apdu : pinByteOffset : ) <nl> + - Selector : ' userInteractionForSecurePINChangeWithPINFormat : APDU : currentPINByteOffset : newPINByteOffset : ' <nl> + MethodKind : Instance <nl> + SwiftName : userInteractionForSecurePINChange ( _ : apdu : currentPINByteOffset : newPINByteOffset : ) <nl> + - Selector : ' sendIns : p1 : p2 : data : le : reply : ' <nl> + MethodKind : Instance <nl> + SwiftPrivate : true <nl> + - Selector : ' sendIns : p1 : p2 : data : le : sw : error : ' <nl> + MethodKind : Instance <nl> + SwiftPrivate : true <nl> + - Selector : ' inSessionWithError : executeBlock : ' <nl> + MethodKind : Instance <nl> + SwiftPrivate : true <nl> + - Name : TKSmartCardATRInterfaceGroup <nl> + SwiftName : TKSmartCardATR . InterfaceGroup <nl> + - Name : TKTokenKeyAlgorithm <nl> + SwiftName : TKTokenSessionDelegate . KeyAlgorithm <nl> + - Name : TKTokenKeyExchangeParameters <nl> + SwiftName : TKTokenSessionDelegate . KeyExchangeParameters <nl> + Protocols : <nl> + - Name : TKTokenSessionDelegate <nl> + Methods : <nl> + - Selector : ' tokenSession : supportsOperation : usingKey : algorithm : ' <nl> + MethodKind : Instance <nl> + SwiftName : tokenSession ( _ : supports : keyObjectID : algorithm : ) <nl> + - Selector : ' tokenSession : signData : usingKey : algorithm : error : ' <nl> + MethodKind : Instance <nl> + SwiftName : tokenSession ( _ : sign : keyObjectID : algorithm : ) <nl> + - Selector : ' tokenSession : decryptData : usingKey : algorithm : error : ' <nl> + MethodKind : Instance <nl> + SwiftName : tokenSession ( _ : decrypt : keyObjectID : algorithm : ) <nl> + - Selector : ' tokenSession : performKeyExchangeWithPublicKey : usingKey : algorithm : parameters : error : ' <nl> + MethodKind : Instance <nl> + SwiftName : tokenSession ( _ : performKeyExchange : keyObjectID : algorithm : parameters : ) <nl> + - Name : TKTokenDelegate <nl> + Methods : <nl> + - Selector : ' token : createSessionWithError : ' <nl> + MethodKind : Instance <nl> + SwiftName : createSession ( _ : ) <nl> + Enumerators : <nl> + - Name : TKSmartCardPINEncodingASCII <nl> + SwiftName : ascii <nl> + - Name : TKSmartCardPINEncodingBCD <nl> + SwiftName : bcd <nl> + - Name : TKSmartCardProtocolT0 <nl> + SwiftName : t0 <nl> + - Name : TKSmartCardProtocolT1 <nl> + SwiftName : t1 <nl> + - Name : TKSmartCardProtocolT15 <nl> + SwiftName : t15 <nl> + Tags : <nl> + - Name : TKSmartCardSlotState <nl> + SwiftName : TKSmartCardSlot . State <nl> + - Name : TKSmartCardPINCharset <nl> + SwiftName : TKSmartCardPINFormat . Charset <nl> + - Name : TKSmartCardPINEncoding <nl> + SwiftName : TKSmartCardPINFormat . Encoding <nl> + - Name : TKSmartCardPINJustification <nl> + SwiftName : TKSmartCardPINFormat . Justification <nl> + - Name : TKSmartCardPINCompletion <nl> + SwiftName : TKSmartCardUserInteractionForPINOperation . Completion <nl> + - Name : TKSmartCardPINConfirmation <nl> + SwiftName : TKSmartCardUserInteractionForSecurePINChange . Confirmation <nl> + - Name : TKErrorCode <nl> + NSErrorDomain : TKErrorDomain <nl> mmm a / benchmark / CMakeLists . txt <nl> ppp b / benchmark / CMakeLists . txt <nl> set ( SWIFT_BENCH_MODULES <nl> single - source / CaptureProp <nl> single - source / Chars <nl> single - source / ClassArrayGetter <nl> - single - source / Data <nl> single - source / DeadArray <nl> single - source / DictionaryBridge <nl> single - source / DictionaryLiteral <nl> set ( SWIFT_BENCH_MODULES <nl> single - source / Hash <nl> single - source / Histogram <nl> single - source / Integrate <nl> + single - source / IterateData <nl> single - source / Join <nl> single - source / LinkedList <nl> single - source / MapReduce <nl> mmm a / benchmark / single - source / BitCount . swift <nl> ppp b / benchmark / single - source / BitCount . swift <nl> import Foundation <nl> import TestsUtils <nl> <nl> func countBitSet ( _ num : Int ) - > Int { <nl> - let bits = sizeof ( Int . self ) * 8 <nl> + let bits = MemoryLayout < Int > . size * 8 <nl> var cnt : Int = 0 <nl> var mask : Int = 1 <nl> for _ in 0 . . . bits { <nl> mmm a / benchmark / single - source / DictTest2 . swift <nl> ppp b / benchmark / single - source / DictTest2 . swift <nl> public func run_Dictionary2 ( _ N : Int ) { <nl> CheckResults ( res = = ref_result , " Incorrect results in Dictionary2 : \ ( res ) ! = \ ( ref_result ) " ) <nl> } <nl> <nl> - class Box < T : Hashable > : Hashable { <nl> + class Box < T : Hashable > : Hashable where T : Equatable { <nl> var value : T <nl> <nl> init ( _ v : T ) { <nl> mmm a / benchmark / single - source / Histogram . swift <nl> ppp b / benchmark / single - source / Histogram . swift <nl> import TestsUtils <nl> <nl> typealias rrggbb_t = UInt32 <nl> <nl> - func output_sorted_sparse_rgb_histogram < S : Sequence where S . Iterator . Element = = rrggbb_t > ( _ samples : S , _ N : Int ) { <nl> + func output_sorted_sparse_rgb_histogram < S : Sequence > ( _ samples : S , _ N : Int ) <nl> + where S . Iterator . Element = = rrggbb_t { <nl> var histogram = Dictionary < rrggbb_t , Int > ( ) <nl> for _ in 1 . . . 50 * N { <nl> for sample in samples { / / This part is really awful , I agree <nl> mmm a / benchmark / single - source / Integrate . swift <nl> ppp b / benchmark / single - source / Integrate . swift <nl> class Integrate { <nl> <nl> let fun : ( Double ) - > Double <nl> <nl> - init ( f : ( Double ) - > Double ) { <nl> + init ( f : @ escaping ( Double ) - > Double ) { <nl> fun = f <nl> } <nl> <nl> similarity index 93 % <nl> rename from benchmark / single - source / Data . swift <nl> rename to benchmark / single - source / IterateData . swift <nl> mmm a / benchmark / single - source / Data . swift <nl> ppp b / benchmark / single - source / IterateData . swift <nl> <nl> - / / = = = mmm Data . swift mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / = = = mmm IterateData . swift mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm = = = / / <nl> / / <nl> / / This source file is part of the Swift . org open source project <nl> / / <nl> mmm a / benchmark / single - source / PopFrontGeneric . swift <nl> ppp b / benchmark / single - source / PopFrontGeneric . swift <nl> extension Array : MyArrayBufferProtocol { <nl> } <nl> } <nl> <nl> - func myArrayReplace < B : MyArrayBufferProtocol , C : Collection <nl> - where C . Iterator . Element = = B . Element , B . Index = = Int <nl> - > ( <nl> - _ target : inout B , _ subRange : Range < Int > , _ newValues : C <nl> - ) { <nl> + func myArrayReplace < <nl> + B : MyArrayBufferProtocol , <nl> + C : Collection <nl> + > ( _ target : inout B , _ subRange : Range < Int > , _ newValues : C ) <nl> + where C . Iterator . Element = = B . Element , B . Index = = Int { <nl> target . myReplace ( subRange , with : newValues ) <nl> } <nl> <nl> mmm a / benchmark / utils / DriverUtils . swift <nl> ppp b / benchmark / utils / DriverUtils . swift <nl> struct Test { <nl> let index : Int <nl> let f : ( Int ) - > ( ) <nl> var run : Bool <nl> - init ( name : String , n : Int , f : ( Int ) - > ( ) ) { <nl> + init ( name : String , n : Int , f : @ escaping ( Int ) - > ( ) ) { <nl> self . name = name <nl> self . index = n <nl> self . f = f <nl> mmm a / benchmark / utils / main . swift <nl> ppp b / benchmark / utils / main . swift <nl> import Calculator <nl> import CaptureProp <nl> import Chars <nl> import ClassArrayGetter <nl> - import Data <nl> import DeadArray <nl> import DictTest <nl> import DictTest2 <nl> import Hanoi <nl> import Hash <nl> import Histogram <nl> import Integrate <nl> + import IterateData <nl> import Join <nl> import LinkedList <nl> import MapReduce <nl> mmm a / docs / AccessControl . rst <nl> ppp b / docs / AccessControl . rst <nl> The general guiding principle of Swift access control : <nl> * * No entity can be defined in terms of another entity that has a lower <nl> access level . * * <nl> <nl> - There are three levels of access : " private " , " internal " , and " public " . <nl> - Private entities can only be accessed from within the source file where they <nl> - are defined . Internal entities can be accessed anywhere within the module they <nl> - are defined . Public entities can be accessed from anywhere within the module <nl> - and from any other context that imports the current module . <nl> + There are four levels of access : " private " , " fileprivate " , " internal " , and <nl> + " public " . Private entities can only be accessed from within the lexical scope <nl> + where they are defined . File - private entities can only be accessed from within <nl> + the source file where they are defined . Internal entities can be accessed <nl> + anywhere within the module they are defined . Public entities can be accessed <nl> + from anywhere within the module and from any other context that imports the <nl> + current module . <nl> <nl> The names ` ` public ` ` and ` ` private ` ` have precedent in many languages ; <nl> - ` ` internal ` ` comes from C # . In the future , ` ` public ` ` may be used for both API <nl> - and SPI , at which point we may design additional annotations to distinguish the <nl> - two . <nl> + ` ` internal ` ` comes from C # and ` ` fileprivate ` ` from the Swift community . In the <nl> + future , ` ` public ` ` may be used for both API and SPI , at which point we may <nl> + design additional annotations to distinguish the two . <nl> <nl> By default , most entities in a source file have ` ` internal ` ` access . <nl> This optimizes for the most common case — a single - target application <nl> project — while not accidentally revealing entities to clients of a framework <nl> module . <nl> <nl> + . . warning : : This document has not yet been updated for SE - 0117 , which adds the <nl> + " open " level of access . <nl> + <nl> + <nl> . . contents : : : local : <nl> <nl> Rules <nl> = = = = = = <nl> <nl> Access to a particular entity is considered relative to the current <nl> - * access context . * The access context of an entity is the current <nl> - file ( if ` ` private ` ` ) , the current module ( if ` ` internal ` ` ) , or the current <nl> - program ( if ` ` public ` ` ) . A reference to an entity may only be written within <nl> - the entity ' s access context . <nl> + * access scope . * The access scope of an entity is its immediate lexical scope <nl> + ( if ` ` private ` ` ) , the current file ( if ` ` fileprivate ` ` ) , the current module ( if <nl> + ` ` internal ` ` ) , or the current program ( if ` ` public ` ` ) . A reference to an entity <nl> + may only be written within the entity ' s access scope . <nl> <nl> If a particular entity is not accessible , it does not appear in name lookup , <nl> unlike in C + + . However , access control does not restrict access to members via <nl> visibility of symbols in a linked binary . <nl> Globals and Members <nl> mmmmmmmmmmmmmmmmmm - <nl> <nl> - A global function , constant , or variable may have any access level less than <nl> - or equal to the access level of its type . That is , a ` ` private ` ` constant can <nl> - have ` ` public ` ` type , but not the other way around . <nl> + All globals and members have a default access level of ` ` internal ` ` , except <nl> + within extensions ( as described below ) . <nl> + <nl> + A declaration may have any access level less than or equal to the access level <nl> + of its type . That is , a ` ` private ` ` constant can have ` ` public ` ` type , but not <nl> + the other way around . It is legal for a member to have greater access than its <nl> + enclosing type , but this has no effect . <nl> <nl> Accessors for variables have the same access level as their associated variable . <nl> The setter may be explicitly annotated with an access level less than or equal <nl> An initializer , method , subscript , or property may have any access level less <nl> than or equal to the access level of its type ( including the implicit ' Self ' <nl> type ) , with a few additional rules : <nl> <nl> - - If the type ' s access level is ` ` private ` ` , the access level of members <nl> - defaults to ` ` private ` ` . If the type ' s access level is ` ` internal ` ` or <nl> - ` ` public ` ` , the access level of members defaults to ` ` internal ` ` . <nl> - <nl> - If a member is used to satisfy a protocol requirement , its access level must <nl> be at least as high as the protocol conformance ' s ; see : ref : ` Protocols ` below . <nl> <nl> struct , enum , or class may be extended whenever it is accessible . <nl> A class may be subclassed whenever it is accessible . A class may have any <nl> access level less than or equal to the access level of its superclass . <nl> <nl> - Members in an extension have the same default access level as members declared <nl> - within the extended type . However , an extension may be marked with an explicit <nl> - access modifier ( e . g . ` ` private extension ` ` ) , in which case the default <nl> - access level of members within the extension is changed to match . <nl> + Members within constrained extensions must have access less than or equal to <nl> + the access level of the types used in the constraints . <nl> + <nl> + An extension may be marked with an explicit access modifier ( e . g . ` ` private <nl> + extension ` ` ) , in which case the default access level of members within the <nl> + extension is changed to match . No member within such an extension may have <nl> + broader access than the new default . <nl> <nl> Extensions with explicit access modifiers may not add new protocol <nl> conformances , since Swift does not support private protocol conformances <nl> or other extensions from outside the module . Therefore , members of a subclass <nl> or extension will not conflict with or inadvertently be considered to override <nl> non - accessible members of the superclass . <nl> <nl> - Both ` ` private ` ` and ` ` internal ` ` increase opportunities for devirtualization , <nl> + Access levels lower than ` ` public ` ` increase opportunities for devirtualization , <nl> though it is still possible to put a subclass of a ` ` private ` ` class within the <nl> - same file . <nl> + same scope . <nl> <nl> Most information about a non - ` ` public ` ` entity still has to be put into a <nl> module file for now , since we don ' t have resilience implemented . This can be <nl> selector for members , everything can be inspected at runtime , and even a <nl> private member can cause selector conflicts . In this case , access control is <nl> only useful for discipline purposes . <nl> <nl> - Members explicitly marked ` ` private ` ` are * not * exposed to Objective - C unless <nl> - they are also marked ` ` @ objc ` ` ( or ` ` @ IBAction ` ` or similar ) , even if declared <nl> - within a class implicitly or explicitly marked ` ` @ objc ` ` . <nl> + Members explicitly marked ` ` private ` ` or ` ` fileprivate ` ` are * not * exposed to <nl> + Objective - C unless they are also marked ` ` @ objc ` ` ( or ` ` @ IBAction ` ` or <nl> + similar ) , even if declared within a class implicitly or explicitly marked <nl> + ` ` @ objc ` ` . <nl> <nl> Any ` ` public ` ` entities will be included in the generated header . In an <nl> application or unit test target , ` ` internal ` ` entities will be exposed as well . <nl> This proposal omits two forms of access control commonly found in other <nl> languages , a " class - implementation - only " access ( often called " private " ) , and a <nl> " class and any subclasses " access ( often called " protected " ) . We chose not to <nl> include these levels of access control because they do not add useful <nl> - functionality beyond ` ` private ` ` , ` ` internal ` ` , and ` ` public ` ` . <nl> + functionality beyond ` ` private ` ` , ` ` fileprivate ` ` , ` ` internal ` ` , and ` ` public ` ` . <nl> <nl> " class - only " <nl> If " class - only " includes extensions of the class , it is clear that it <nl> functionality beyond ` ` private ` ` , ` ` internal ` ` , and ` ` public ` ` . <nl> limit forces code to be declared within the class that might otherwise <nl> naturally be a top - level helper or an extension method on another type . <nl> <nl> - ` ` private ` ` serves the proper use case of limiting access to the <nl> + ` ` private ` ` and ` ` fileprivate ` ` serve the use case of limiting access to the <nl> implementation details of a class ( even from the rest of the module ! ) while <nl> - not requiring that all of those implementation details be written lexically <nl> - inside the class . <nl> + not tying access to the notion of type . <nl> <nl> " protected " <nl> " protected " access provides no guarantees of information hiding , since any <nl> Potential Future Directions <nl> = = = = = = = = = = = = = = = = = = = = = = = = = = = <nl> <nl> - Allowing ` ` private ` ` or ` ` internal ` ` protocol conformances , which are only <nl> - accessible at compile - time from a particular access context . <nl> + accessible at compile - time from a particular access scope . <nl> <nl> - Limiting particular capabilities , such as marking something ` ` final ( public ) ` ` <nl> to restrict subclassing or overriding outside of the current module . <nl> mmm a / docs / OptimizationTips . rst <nl> ppp b / docs / OptimizationTips . rst <nl> in the following ` ` C . array1 ` ` and ` ` D . array1 ` ` will be accessed directly <nl> d . array2 [ i ] = . . . / / Will access D . array2 through dynamic dispatch . <nl> } <nl> <nl> - Advice : Use ' private ' when declaration does not need to be accessed outside of file <nl> mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm <nl> - <nl> - Applying the ` ` private ` ` keyword to a declaration restricts the visibility of <nl> - the declaration to the file in which it is declared . This allows the compiler to <nl> - be able to ascertain all other potentially overriding declarations . Thus the <nl> - absence of any such declarations enables the compiler to infer the ` ` final ` ` <nl> - keyword automatically and remove indirect calls for methods and field accesses <nl> - accordingly . For instance in the following , ` ` e . doSomething ( ) ` ` and <nl> - ` ` f . myPrivateVar ` ` , will be able to be accessed directly assuming ` ` E ` ` , ` ` F ` ` <nl> - do not have any overriding declarations in the same file : <nl> + Advice : Use ' private ' and ' fileprivate ' when declaration does not need to be accessed outside of file <nl> + mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - - <nl> + <nl> + Applying the ` ` private ` ` or ` ` fileprivate ` ` keywords to a declaration restricts <nl> + the visibility of the declaration to the file in which it is declared . This <nl> + allows the compiler to be able to ascertain all other potentially overriding <nl> + declarations . Thus the absence of any such declarations enables the compiler to <nl> + infer the ` ` final ` ` keyword automatically and remove indirect calls for methods <nl> + and field accesses accordingly . For instance in the following , <nl> + ` ` e . doSomething ( ) ` ` and ` ` f . myPrivateVar ` ` , will be able to be accessed directly <nl> + assuming ` ` E ` ` , ` ` F ` ` do not have any overriding declarations in the same file : <nl> <nl> : : <nl> <nl> do not have any overriding declarations in the same file : <nl> } <nl> <nl> class F { <nl> - private var myPrivateVar : Int <nl> + fileprivate var myPrivateVar : Int <nl> } <nl> <nl> func usingE ( _ e : E ) { <nl> mmm a / include / swift / AST / DiagnosticsSema . def <nl> ppp b / include / swift / AST / DiagnosticsSema . def <nl> ERROR ( autoclosure_function_type , none , <nl> ( ) ) <nl> ERROR ( autoclosure_function_input_nonunit , none , <nl> " autoclosure argument type must be ' ( ) ' " , ( ) ) <nl> + <nl> + / / FIXME : drop these when we drop @ noescape <nl> ERROR ( noescape_function_type , none , <nl> " @ noescape may only be applied to parameters of function type " , <nl> ( ) ) <nl> ERROR ( noescape_implied_by_autoclosure , none , <nl> " redundantly specified " , ( ) ) <nl> ERROR ( noescape_conflicts_escaping_autoclosure , none , <nl> " @ noescape conflicts with @ autoclosure ( escaping ) " , ( ) ) <nl> + <nl> ERROR ( escaping_function_type , none , <nl> " @ escaping may only be applied to parameters of function type " , ( ) ) <nl> <nl> WARNING ( optional_pattern_match_promotion , none , <nl> ERROR ( type_of_metatype , none , <nl> " ' . dynamicType ' is not allowed after a type name " , ( ) ) <nl> ERROR ( invalid_noescape_use , none , <nl> - " @ noescape % select { value | parameter } 1 % 0 may only be called " , <nl> + " non - escaping % select { value | parameter } 1 % 0 may only be called " , <nl> ( Identifier , bool ) ) <nl> NOTE ( noescape_autoclosure , none , <nl> - " parameter % 0 is implicitly @ noescape because it was declared @ autoclosure " , <nl> + " parameter % 0 is implicitly non - escaping because it was declared @ autoclosure " , <nl> + ( Identifier ) ) <nl> + NOTE ( noescape_parameter , none , <nl> + " parameter % 0 is implicitly non - escaping " , <nl> ( Identifier ) ) <nl> <nl> ERROR ( closure_noescape_use , none , <nl> - " closure use of @ noescape parameter % 0 may allow it to escape " , <nl> + " closure use of non - escaping parameter % 0 may allow it to escape " , <nl> ( Identifier ) ) <nl> ERROR ( decl_closure_noescape_use , none , <nl> - " declaration closing over @ noescape parameter % 0 may allow it to escape " , <nl> + " declaration closing over non - escaping parameter % 0 may allow it to escape " , <nl> ( Identifier ) ) <nl> <nl> ERROR ( capture_across_type_decl , none , <nl> mmm a / include / swift / AST / Module . h <nl> ppp b / include / swift / AST / Module . h <nl> class ModuleDecl : public TypeDecl , public DeclContext { <nl> DebugClient = R ; <nl> } <nl> <nl> - / / / Retrieve the magic __dso_handle variable . <nl> - VarDecl * getDSOHandle ( ) ; <nl> - <nl> / / / Returns true if this module was or is being compiled for testing . <nl> bool isTestingEnabled ( ) const { <nl> return Flags . TestingEnabled ; <nl> mmm a / include / swift / AST / PrintOptions . h <nl> ppp b / include / swift / AST / PrintOptions . h <nl> struct PrintOptions { <nl> / / / Whether we are printing part of SIL body . <nl> bool PrintInSILBody = false ; <nl> <nl> + / / / Whether to print the types as if they appear as function parameters . This <nl> + / / / governs whether we print a function type with an explicit @ escaping . This <nl> + / / / is also set and restored internally when visiting a type in a parameter <nl> + / / / position . <nl> + bool PrintAsInParamType = false ; <nl> + <nl> / / / Whether to use an empty line to separate two members in a single decl . <nl> bool EmptyLineBetweenMembers = false ; <nl> <nl> mmm a / include / swift / AST / Types . h <nl> ppp b / include / swift / AST / Types . h <nl> class AnyFunctionType : public TypeBase { <nl> enum : uint16_t { AutoClosureMask = 0x010 } ; <nl> enum : uint16_t { NoEscapeMask = 0x020 } ; <nl> enum : uint16_t { ThrowsMask = 0x040 } ; <nl> - enum : uint16_t { ExplicitlyEscapingMask = 0x080 } ; <nl> <nl> uint16_t Bits ; <nl> <nl> class AnyFunctionType : public TypeBase { <nl> <nl> / / Constructor with no defaults . <nl> ExtInfo ( Representation Rep , <nl> - bool IsAutoClosure , bool IsNoEscape , bool IsExplicitlyEscaping , <nl> + bool IsAutoClosure , bool IsNoEscape , <nl> bool Throws ) <nl> : ExtInfo ( Rep , Throws ) { <nl> Bits | = ( IsAutoClosure ? AutoClosureMask : 0 ) ; <nl> Bits | = ( IsNoEscape ? NoEscapeMask : 0 ) ; <nl> - Bits | = ( IsExplicitlyEscaping ? ExplicitlyEscapingMask : 0 ) ; <nl> } <nl> <nl> bool isAutoClosure ( ) const { return Bits & AutoClosureMask ; } <nl> bool isNoEscape ( ) const { return Bits & NoEscapeMask ; } <nl> - bool isExplicitlyEscaping ( ) const { return Bits & ExplicitlyEscapingMask ; } <nl> bool throws ( ) const { return Bits & ThrowsMask ; } <nl> Representation getRepresentation ( ) const { <nl> unsigned rawRep = Bits & RepresentationMask ; <nl> class AnyFunctionType : public TypeBase { <nl> return getExtInfo ( ) . isNoEscape ( ) ; <nl> } <nl> <nl> - / / / \ brief True if the parameter declaration it is attached to has explicitly <nl> - / / / been marked with the @ escaping attribute . This is a temporary measure . <nl> - bool isExplicitlyEscaping ( ) const { <nl> - return getExtInfo ( ) . isExplicitlyEscaping ( ) ; <nl> - } <nl> - <nl> bool throws ( ) const { <nl> return getExtInfo ( ) . throws ( ) ; <nl> } <nl> mmm a / include / swift / Serialization / ModuleFormat . h <nl> ppp b / include / swift / Serialization / ModuleFormat . h <nl> const uint16_t VERSION_MAJOR = 0 ; <nl> / / / in source control , you should also update the comment to briefly <nl> / / / describe what change you made . The content of this comment isn ' t important ; <nl> / / / it just ensures a conflict if two people change the module format . <nl> - const uint16_t VERSION_MINOR = 258 ; / / Last change : precedencegroup <nl> + const uint16_t VERSION_MINOR = 259 ; / / Last change : drop explicitlyEscaping <nl> <nl> using DeclID = PointerEmbeddedInt < unsigned , 31 > ; <nl> using DeclIDField = BCFixed < 31 > ; <nl> namespace decls_block { <nl> FunctionTypeRepresentationField , / / representation <nl> BCFixed < 1 > , / / auto - closure ? <nl> BCFixed < 1 > , / / noescape ? <nl> - BCFixed < 1 > , / / explicitlyEscaping ? <nl> BCFixed < 1 > / / throws ? <nl> > ; <nl> <nl> mmm a / lib / AST / ASTDumper . cpp <nl> ppp b / lib / AST / ASTDumper . cpp <nl> namespace { <nl> } <nl> <nl> printFlag ( T - > isAutoClosure ( ) , " autoclosure " ) ; <nl> - printFlag ( T - > isNoEscape ( ) , " noescape " ) ; <nl> - printFlag ( T - > isExplicitlyEscaping ( ) , " escaping " ) ; <nl> + <nl> + / / Dump out either @ noescape or @ escaping <nl> + printFlag ( T - > isNoEscape ( ) , " @ noescape " ) ; <nl> + printFlag ( ! T - > isNoEscape ( ) , " @ escaping " ) ; <nl> + <nl> printFlag ( T - > throws ( ) , " throws " ) ; <nl> <nl> printRec ( " input " , T - > getInput ( ) ) ; <nl> mmm a / lib / AST / ASTPrinter . cpp <nl> ppp b / lib / AST / ASTPrinter . cpp <nl> void PrintAST : : visitVarDecl ( VarDecl * decl ) { <nl> } <nl> <nl> void PrintAST : : visitParamDecl ( ParamDecl * decl ) { <nl> - return visitVarDecl ( decl ) ; <nl> + / / Set and restore in - parameter - position printing of types <nl> + auto prior = Options . PrintAsInParamType ; <nl> + Options . PrintAsInParamType = true ; <nl> + visitVarDecl ( decl ) ; <nl> + Options . PrintAsInParamType = prior ; <nl> } <nl> <nl> void PrintAST : : printOneParameter ( const ParamDecl * param , bool Curried , <nl> void PrintAST : : printOneParameter ( const ParamDecl * param , bool Curried , <nl> TheTypeLoc . setType ( BGT - > getGenericArgs ( ) [ 0 ] ) ; <nl> } <nl> <nl> + / / Set and restore in - parameter - position printing of types <nl> + auto prior = Options . PrintAsInParamType ; <nl> + Options . PrintAsInParamType = true ; <nl> printTypeLoc ( TheTypeLoc ) ; <nl> + Options . PrintAsInParamType = prior ; <nl> <nl> if ( param - > isVariadic ( ) ) <nl> Printer < < " . . . " ; <nl> class TypePrinter : public TypeVisitor < TypePrinter > { <nl> const PrintOptions & Options ; <nl> Optional < std : : vector < GenericParamList * > > UnwrappedGenericParams ; <nl> <nl> + / / / Whether we are printing something in a function parameter position , and <nl> + / / / thus want to print @ escaping if it escapes . <nl> + bool inParameterPrinting ; <nl> + <nl> void printDeclContext ( DeclContext * DC ) { <nl> switch ( DC - > getContextKind ( ) ) { <nl> case DeclContextKind : : Module : { <nl> class TypePrinter : public TypeVisitor < TypePrinter > { <nl> <nl> public : <nl> TypePrinter ( ASTPrinter & Printer , const PrintOptions & PO ) <nl> - : Printer ( Printer ) , Options ( PO ) { } <nl> + : Printer ( Printer ) , Options ( PO ) , <nl> + inParameterPrinting ( Options . PrintAsInParamType ) { } <nl> <nl> void visit ( Type T ) { <nl> Printer . printTypePre ( TypeLoc : : withoutLoc ( T ) ) ; <nl> class TypePrinter : public TypeVisitor < TypePrinter > { <nl> Printer < < " @ autoclosure " ; <nl> else <nl> Printer < < " @ autoclosure ( escaping ) " ; <nl> - } else if ( info . isNoEscape ( ) ) { <nl> - / / autoclosure implies noescape . <nl> - Printer < < " @ noescape " ; <nl> - } else if ( info . isExplicitlyEscaping ( ) ) { <nl> - Printer < < " @ escaping " ; <nl> + } else if ( inParameterPrinting ) { <nl> + if ( ! info . isNoEscape ( ) ) { <nl> + Printer < < " @ escaping " ; <nl> + } <nl> } <nl> <nl> if ( Options . PrintFunctionRepresentationAttrs ) { <nl> class TypePrinter : public TypeVisitor < TypePrinter > { <nl> <nl> printFunctionExtInfo ( T - > getExtInfo ( ) ) ; <nl> <nl> + / / If we ' re stripping argument labels from types , do it when printing . <nl> + Type inputType = T - > getInput ( ) ; <nl> + if ( auto tupleTy = dyn_cast < TupleType > ( inputType . getPointer ( ) ) ) { <nl> + SmallVector < TupleTypeElt , 4 > elements ; <nl> + elements . reserve ( tupleTy - > getNumElements ( ) ) ; <nl> + for ( const auto & elt : tupleTy - > getElements ( ) ) { <nl> + elements . push_back ( TupleTypeElt ( elt . getType ( ) , Identifier ( ) , <nl> + elt . isVararg ( ) ) ) ; <nl> + } <nl> + inputType = TupleType : : get ( elements , inputType - > getASTContext ( ) ) ; <nl> + } <nl> + <nl> bool needsParens = <nl> - ! isa < ParenType > ( T - > getInput ( ) . getPointer ( ) ) & & <nl> - ! T - > getInput ( ) - > is < TupleType > ( ) ; <nl> + ! isa < ParenType > ( inputType . getPointer ( ) ) & & <nl> + ! inputType - > is < TupleType > ( ) ; <nl> <nl> if ( needsParens ) <nl> Printer < < " ( " ; <nl> - <nl> - visit ( T - > getInput ( ) ) ; <nl> + <nl> + / / Set in - parameter - position printing to print our parameters , then unset it <nl> + / / for the return type ( in case it is also a function ) , and restore at the <nl> + / / end . <nl> + auto prior = inParameterPrinting ; <nl> + inParameterPrinting = true ; <nl> + visit ( inputType ) ; <nl> + inParameterPrinting = false ; <nl> + SWIFT_DEFER { <nl> + inParameterPrinting = prior ; <nl> + } ; <nl> <nl> if ( needsParens ) <nl> Printer < < " ) " ; <nl> mmm a / lib / AST / DocComment . cpp <nl> ppp b / lib / AST / DocComment . cpp <nl> getAnyBaseClassDocComment ( swift : : markup : : MarkupContext & MC , <nl> return None ; <nl> } <nl> <nl> + static Optional < DocComment * > <nl> + getProtocolRequirementDocComment ( swift : : markup : : MarkupContext & MC , <nl> + const ProtocolDecl * ProtoExt , <nl> + const Decl * D ) { <nl> + <nl> + auto getSingleRequirementWithNonemptyDoc = [ ] ( const ProtocolDecl * P , <nl> + const ValueDecl * VD ) <nl> + - > const ValueDecl * { <nl> + SmallVector < ValueDecl * , 2 > Members ; <nl> + P - > lookupQualified ( P - > getType ( ) , VD - > getFullName ( ) , <nl> + NLOptions : : NL_ProtocolMembers , <nl> + / * resolver = * / nullptr , Members ) ; <nl> + SmallVector < const ValueDecl * , 1 > ProtocolRequirements ; <nl> + for ( auto Member : Members ) <nl> + if ( ! Member - > isDefinition ( ) ) <nl> + ProtocolRequirements . push_back ( Member ) ; <nl> + <nl> + if ( ProtocolRequirements . size ( ) = = 1 ) { <nl> + auto Requirement = ProtocolRequirements . front ( ) ; <nl> + if ( ! Requirement - > getRawComment ( ) . isEmpty ( ) ) <nl> + return Requirement ; <nl> + } <nl> + <nl> + return nullptr ; <nl> + } ; <nl> + <nl> + if ( const auto * VD = dyn_cast < ValueDecl > ( D ) ) { <nl> + SmallVector < const ValueDecl * , 4 > RequirementsWithDocs ; <nl> + if ( auto Requirement = getSingleRequirementWithNonemptyDoc ( ProtoExt , VD ) ) <nl> + RequirementsWithDocs . push_back ( Requirement ) ; <nl> + <nl> + for ( auto Proto : ProtoExt - > getInheritedProtocols ( / * resolver = * / nullptr ) ) <nl> + if ( auto Requirement = getSingleRequirementWithNonemptyDoc ( Proto , VD ) ) <nl> + RequirementsWithDocs . push_back ( Requirement ) ; <nl> + <nl> + if ( RequirementsWithDocs . size ( ) = = 1 ) <nl> + return getSingleDocComment ( MC , RequirementsWithDocs . front ( ) ) ; <nl> + } <nl> + return None ; <nl> + } <nl> + <nl> Optional < DocComment * > <nl> swift : : getCascadingDocComment ( swift : : markup : : MarkupContext & MC , const Decl * D ) { <nl> auto Doc = getSingleDocComment ( MC , D ) ; <nl> swift : : getCascadingDocComment ( swift : : markup : : MarkupContext & MC , const Decl * D ) { <nl> if ( auto BaseClassDoc = getAnyBaseClassDocComment ( MC , CD , D ) ) <nl> return BaseClassDoc ; <nl> <nl> - / / FIXME : Look at protocol requirement declarations if a protocol <nl> - / / extension implementation doesn ' t have a doc comment . <nl> + if ( const auto * PE = D - > getDeclContext ( ) - > getAsProtocolExtensionContext ( ) ) <nl> + if ( auto ReqDoc = getProtocolRequirementDocComment ( MC , PE , D ) ) <nl> + return ReqDoc ; <nl> <nl> return None ; <nl> } <nl> mmm a / lib / AST / Module . cpp <nl> ppp b / lib / AST / Module . cpp <nl> void Module : : removeFile ( FileUnit & existingFile ) { <nl> Files . erase ( I . base ( ) ) ; <nl> } <nl> <nl> - VarDecl * Module : : getDSOHandle ( ) { <nl> - if ( DSOHandle ) <nl> - return DSOHandle ; <nl> - <nl> - auto unsafeMutableRawPtr = getASTContext ( ) . getUnsafeMutableRawPointerDecl ( ) ; <nl> - if ( ! unsafeMutableRawPtr ) <nl> - return nullptr ; <nl> - <nl> - auto & ctx = getASTContext ( ) ; <nl> - auto handleVar = new ( ctx ) VarDecl ( / * IsStatic = * / false , / * IsLet = * / false , <nl> - SourceLoc ( ) , <nl> - ctx . getIdentifier ( " __dso_handle " ) , <nl> - unsafeMutableRawPtr - > getDeclaredType ( ) , <nl> - Files [ 0 ] ) ; <nl> - handleVar - > setImplicit ( true ) ; <nl> - handleVar - > getAttrs ( ) . add ( <nl> - new ( ctx ) SILGenNameAttr ( " __dso_handle " , / * Implicit = * / true ) ) ; <nl> - handleVar - > setAccessibility ( Accessibility : : Internal ) ; <nl> - DSOHandle = handleVar ; <nl> - return handleVar ; <nl> - } <nl> - <nl> # define FORWARD ( name , args ) \ <nl> for ( const FileUnit * file : getFiles ( ) ) \ <nl> file - > name args ; <nl> lookupOperatorDeclForName ( const FileUnit & File , SourceLoc Loc , Identifier Name , <nl> / / Record whether they come from re - exported modules . <nl> / / FIXME : We ought to prefer operators elsewhere in this module before we <nl> / / check imports . <nl> + auto ownModule = SF . getParentModule ( ) ; <nl> ImportedOperatorsMap < OP_DECL > importedOperators ; <nl> for ( auto & imported : SourceFile : : Impl : : getImportsForSourceFile ( SF ) ) { <nl> + / / Protect against source files that contrive to import their own modules . <nl> + if ( imported . first . second = = ownModule ) <nl> + continue ; <nl> + <nl> bool isExported = <nl> imported . second . contains ( SourceFile : : ImportFlags : : Exported ) ; <nl> if ( ! includePrivate & & ! isExported ) <nl> mmm a / lib / AST / USRGeneration . cpp <nl> ppp b / lib / AST / USRGeneration . cpp <nl> <nl> # include " llvm / ADT / StringRef . h " <nl> # include " llvm / Support / raw_ostream . h " <nl> # include " clang / AST / ASTContext . h " <nl> + # include " clang / AST / Attr . h " <nl> # include " clang / Index / USRGeneration . h " <nl> # include " clang / Lex / PreprocessingRecord . h " <nl> # include " clang / Lex / Preprocessor . h " <nl> bool ide : : printDeclUSR ( const ValueDecl * D , raw_ostream & OS ) { <nl> <nl> ValueDecl * VD = const_cast < ValueDecl * > ( D ) ; <nl> <nl> - if ( ClangNode ClangN = VD - > getClangNode ( ) ) { <nl> + auto interpretAsClangNode = [ ] ( const ValueDecl * D ) - > ClangNode { <nl> + ClangNode ClangN = D - > getClangNode ( ) ; <nl> + if ( auto ClangD = ClangN . getAsDecl ( ) ) { <nl> + / / NSErrorDomain causes the clang enum to be imported like this : <nl> + / / <nl> + / / struct MyError { <nl> + / / enum Code : Int32 { <nl> + / / case errFirst <nl> + / / case errSecond <nl> + / / } <nl> + / / static var errFirst : MyError . Code { get } <nl> + / / static var errSecond : MyError . Code { get } <nl> + / / } <nl> + / / <nl> + / / The clang enum and enum constants are associated with both the <nl> + / / struct / nested enum , and the static vars / enum cases . <nl> + / / But we want unique USRs for the above symbols , so use the clang USR <nl> + / / for the enum and enum cases , and the Swift USR for the struct and vars . <nl> + / / <nl> + if ( isa < clang : : EnumDecl > ( ClangD ) ) { <nl> + if ( ClangD - > hasAttr < clang : : NSErrorDomainAttr > ( ) & & isa < StructDecl > ( D ) ) <nl> + return ClangNode ( ) ; <nl> + } else if ( auto * ClangEnumConst = dyn_cast < clang : : EnumConstantDecl > ( ClangD ) ) { <nl> + if ( auto * ClangEnum = dyn_cast < clang : : EnumDecl > ( ClangEnumConst - > getDeclContext ( ) ) ) { <nl> + if ( ClangEnum - > hasAttr < clang : : NSErrorDomainAttr > ( ) & & isa < VarDecl > ( D ) ) <nl> + return ClangNode ( ) ; <nl> + } <nl> + } <nl> + } <nl> + return ClangN ; <nl> + } ; <nl> + <nl> + if ( ClangNode ClangN = interpretAsClangNode ( D ) ) { <nl> llvm : : SmallString < 128 > Buf ; <nl> if ( auto ClangD = ClangN . getAsDecl ( ) ) { <nl> bool Ignore = clang : : index : : generateUSRForDecl ( ClangD , Buf ) ; <nl> mmm a / lib / ClangImporter / ClangImporter . cpp <nl> ppp b / lib / ClangImporter / ClangImporter . cpp <nl> void ClangModuleUnit : : collectLinkLibraries ( <nl> } <nl> <nl> StringRef ClangModuleUnit : : getFilename ( ) const { <nl> - if ( ! clangModule ) { <nl> + if ( ! clangModule ) <nl> return " < imports > " ; <nl> - } <nl> - return clangModule - > getASTFile ( ) <nl> - ? clangModule - > getASTFile ( ) - > getName ( ) : StringRef ( ) ; <nl> + if ( const clang : : FileEntry * F = clangModule - > getASTFile ( ) ) <nl> + if ( F - > getName ( ) ) <nl> + return F - > getName ( ) ; <nl> + return StringRef ( ) ; <nl> } <nl> <nl> clang : : TargetInfo & ClangImporter : : getTargetInfo ( ) const { <nl> mmm a / lib / ClangImporter / MappedTypes . def <nl> ppp b / lib / ClangImporter / MappedTypes . def <nl> MAP_TYPE ( " dispatch_block_t " , Block , 0 , " Dispatch " , " dispatch_block_t " , <nl> true , DoNothing ) <nl> MAP_TYPE ( " __swift_shims_dispatch_block_t " , Block , 0 , " Dispatch " , " _DispatchBlock " , <nl> true , DoNothing ) <nl> + MAP_TYPE ( " __swift_shims_dispatch_data_t " , ObjCId , 0 , " Dispatch " , " dispatch_data_t " , <nl> + true , DoNothing ) <nl> <nl> / / Objective - C types . <nl> MAP_TYPE ( " BOOL " , ObjCBool , 8 , " ObjectiveC " , " ObjCBool " , false , DoNothing ) <nl> mmm a / lib / IDE / CodeCompletion . cpp <nl> ppp b / lib / IDE / CodeCompletion . cpp <nl> class CompletionLookup final : public swift : : VisibleDeclConsumer { <nl> DeducedAssociatedTypeCache ; <nl> <nl> Optional < SemanticContextKind > ForcedSemanticContext = None ; <nl> + bool IsUnresolvedMember = false ; <nl> <nl> std : : unique_ptr < ArchetypeTransformer > TransformerPt = nullptr ; <nl> <nl> class CompletionLookup final : public swift : : VisibleDeclConsumer { <nl> if ( ForcedSemanticContext ) <nl> return * ForcedSemanticContext ; <nl> <nl> + if ( IsUnresolvedMember ) { <nl> + if ( isa < EnumElementDecl > ( D ) ) { <nl> + return SemanticContextKind : : ExpressionSpecific ; <nl> + } <nl> + } <nl> + <nl> switch ( Reason ) { <nl> case DeclVisibilityKind : : LocalVariable : <nl> case DeclVisibilityKind : : FunctionParameter : <nl> class CompletionLookup final : public swift : : VisibleDeclConsumer { <nl> return false ; <nl> } <nl> <nl> - void handleOptionSet ( Decl * D , DeclVisibilityKind Reason ) { <nl> - if ( auto * NTD = dyn_cast < NominalTypeDecl > ( D ) ) { <nl> - if ( isOptionSetDecl ( NTD ) ) { <nl> - for ( auto M : NTD - > getMembers ( ) ) { <nl> - if ( auto * VD = dyn_cast < VarDecl > ( M ) ) { <nl> - if ( isOptionSet ( VD - > getType ( ) ) & & VD - > isStatic ( ) ) { <nl> - addVarDeclRef ( VD , Reason ) ; <nl> - } <nl> - } <nl> - } <nl> - } <nl> - } <nl> - } <nl> - <nl> bool isOptionSetDecl ( NominalTypeDecl * D ) { <nl> auto optionSetType = dyn_cast < ProtocolDecl > ( Ctx . getOptionSetDecl ( ) ) ; <nl> if ( ! optionSetType ) <nl> class CompletionLookup final : public swift : : VisibleDeclConsumer { <nl> unboxType ( FT - > getInput ( ) ) ; <nl> unboxType ( FT - > getResult ( ) ) ; <nl> } else if ( auto NTD = T - > getNominalOrBoundGenericNominal ( ) ) { <nl> - if ( HandledDecls . count ( NTD ) = = 0 ) { <nl> - auto Reason = DeclVisibilityKind : : MemberOfCurrentNominal ; <nl> - if ( ! Lookup . handleEnumElement ( NTD , Reason ) ) { <nl> - Lookup . handleOptionSet ( NTD , Reason ) ; <nl> - } <nl> - HandledDecls . insert ( NTD ) ; <nl> - } <nl> + if ( HandledDecls . insert ( NTD ) . second ) <nl> + Lookup . getUnresolvedMemberCompletions ( T ) ; <nl> } <nl> } <nl> <nl> class CompletionLookup final : public swift : : VisibleDeclConsumer { <nl> } <nl> } ; <nl> <nl> - void getUnresolvedMemberCompletions ( SourceLoc Loc , SmallVectorImpl < Type > & Types ) { <nl> + void getUnresolvedMemberCompletions ( ArrayRef < Type > Types ) { <nl> NeedLeadingDot = ! HaveDot ; <nl> for ( auto T : Types ) { <nl> if ( T & & T - > getNominalOrBoundGenericNominal ( ) ) { <nl> - auto Reason = DeclVisibilityKind : : MemberOfCurrentNominal ; <nl> - if ( ! handleEnumElement ( T - > getNominalOrBoundGenericNominal ( ) , Reason ) ) { <nl> - handleOptionSet ( T - > getNominalOrBoundGenericNominal ( ) , Reason ) ; <nl> - } <nl> + / / We can only say . foo where foo is a static member of the contextual <nl> + / / type and has the same type ( or if the member is a function , then the <nl> + / / same result type ) as the contextual type . <nl> + auto contextCanT = T - > getCanonicalType ( ) ; <nl> + FilteredDeclConsumer consumer ( * this , [ = ] ( ValueDecl * VD , DeclVisibilityKind reason ) { <nl> + if ( ! VD - > hasType ( ) ) { <nl> + TypeResolver - > resolveDeclSignature ( VD ) ; <nl> + if ( ! VD - > hasType ( ) ) <nl> + return false ; <nl> + } <nl> + <nl> + auto T = VD - > getType ( ) ; <nl> + while ( auto FT = T - > getAs < AnyFunctionType > ( ) ) <nl> + T = FT - > getResult ( ) ; <nl> + return T - > getCanonicalType ( ) = = contextCanT ; <nl> + } ) ; <nl> + <nl> + auto baseType = MetatypeType : : get ( T ) ; <nl> + llvm : : SaveAndRestore < LookupKind > SaveLook ( Kind , LookupKind : : ValueExpr ) ; <nl> + llvm : : SaveAndRestore < Type > SaveType ( ExprType , baseType ) ; <nl> + llvm : : SaveAndRestore < bool > SaveUnresolved ( IsUnresolvedMember , true ) ; <nl> + lookupVisibleMemberDecls ( consumer , baseType , CurrDeclContext , <nl> + TypeResolver . get ( ) , <nl> + / * includeInstanceMembers = * / false ) ; <nl> } <nl> } <nl> } <nl> <nl> - void getUnresolvedMemberCompletions ( SourceLoc Loc , <nl> - std : : vector < std : : string > & FuncNames , <nl> + void getUnresolvedMemberCompletions ( std : : vector < std : : string > & FuncNames , <nl> bool HasReturn ) { <nl> NeedLeadingDot = ! HaveDot ; <nl> LookupByName Lookup ( * this , FuncNames ) ; <nl> void CodeCompletionCallbacksImpl : : doneParsing ( ) { <nl> eraseErrorTypes ( PE ) ; <nl> Success = typeCheckUnresolvedExpr ( * CurDeclContext , UnresolvedExpr , PE , <nl> PossibleTypes ) ; <nl> - Lookup . getUnresolvedMemberCompletions ( <nl> - P . Context . SourceMgr . getCodeCompletionLoc ( ) , PossibleTypes ) ; <nl> + Lookup . getUnresolvedMemberCompletions ( PossibleTypes ) ; <nl> } <nl> if ( ! Success ) { <nl> Lookup . getUnresolvedMemberCompletions ( <nl> - P . Context . SourceMgr . getCodeCompletionLoc ( ) , <nl> TokensBeforeUnresolvedExpr , <nl> UnresolvedExprInReturn ) ; <nl> } <nl> mmm a / lib / Parse / ParseType . cpp <nl> ppp b / lib / Parse / ParseType . cpp <nl> ParserResult < TupleTypeRepr > Parser : : parseTypeTupleBody ( ) { <nl> SourceLoc secondNameLoc = std : : get < 3 > ( currentLabel ) ; <nl> <nl> / / True tuples have labels . <nl> - if ( ! isFunctionType | | ! Context . LangOpts . SuppressArgumentLabelsInTypes ) { <nl> + if ( ! isFunctionType ) { <nl> / / If there were two names , complain . <nl> if ( firstNameLoc . isValid ( ) & & secondNameLoc . isValid ( ) ) { <nl> auto diag = diagnose ( firstNameLoc , diag : : tuple_type_multiple_labels ) ; <nl> mmm a / lib / SIL / SILModule . cpp <nl> ppp b / lib / SIL / SILModule . cpp <nl> SILFunction * SILModule : : getOrCreateFunction ( SILLocation loc , <nl> <nl> if ( auto fn = lookUpFunction ( name ) ) { <nl> assert ( fn - > getLoweredFunctionType ( ) = = constantType ) ; <nl> - assert ( fn - > getLinkage ( ) = = constant . getLinkage ( ForDefinition ) <nl> - | | fn - > getLinkage ( ) = = constant . getLinkage ( NotForDefinition ) ) ; <nl> + assert ( fn - > getLinkage ( ) = = linkage | | <nl> + ( forDefinition = = ForDefinition_t : : NotForDefinition & & <nl> + fn - > getLinkage ( ) = = <nl> + constant . getLinkage ( ForDefinition_t : : ForDefinition ) ) ) ; <nl> if ( forDefinition ) { <nl> / / In all the cases where getConstantLinkage returns something <nl> / / different for ForDefinition , it returns an available - externally <nl> mmm a / lib / SILGen / RValue . cpp <nl> ppp b / lib / SILGen / RValue . cpp <nl> getElementRange ( CanTupleType tupleType , unsigned eltIndex ) { <nl> RValue RValue : : extractElement ( unsigned n ) & & { <nl> assert ( isComplete ( ) & & " rvalue is not complete " ) ; <nl> <nl> - auto tupleTy = cast < TupleType > ( type ) ; <nl> + CanTupleType tupleTy = dyn_cast < TupleType > ( type ) ; <nl> + if ( ! tupleTy ) { <nl> + assert ( n = = 0 ) ; <nl> + unsigned to = getRValueSize ( type ) ; <nl> + assert ( to = = values . size ( ) ) ; <nl> + RValue element ( { llvm : : makeArrayRef ( values ) . slice ( 0 , to ) , type } ) ; <nl> + makeUsed ( ) ; <nl> + return element ; <nl> + } <nl> + <nl> + <nl> auto range = getElementRange ( tupleTy , n ) ; <nl> unsigned from = range . first , to = range . second ; <nl> <nl> RValue RValue : : extractElement ( unsigned n ) & & { <nl> void RValue : : extractElements ( SmallVectorImpl < RValue > & elements ) & & { <nl> assert ( isComplete ( ) & & " rvalue is not complete " ) ; <nl> <nl> + CanTupleType tupleTy = dyn_cast < TupleType > ( type ) ; <nl> + if ( ! tupleTy ) { <nl> + unsigned to = getRValueSize ( type ) ; <nl> + assert ( to = = values . size ( ) ) ; <nl> + elements . push_back ( { llvm : : makeArrayRef ( values ) . slice ( 0 , to ) , type } ) ; <nl> + makeUsed ( ) ; <nl> + return ; <nl> + } <nl> + <nl> unsigned from = 0 ; <nl> - for ( auto eltType : cast < TupleType > ( type ) . getElementTypes ( ) ) { <nl> + for ( auto eltType : tupleTy . getElementTypes ( ) ) { <nl> unsigned to = from + getRValueSize ( eltType ) ; <nl> elements . push_back ( { llvm : : makeArrayRef ( values ) . slice ( from , to - from ) , <nl> eltType } ) ; <nl> mmm a / lib / SILGen / SILGenApply . cpp <nl> ppp b / lib / SILGen / SILGenApply . cpp <nl> namespace { <nl> / / If we ' re working with an r - value , just expand it out and emit <nl> / / all the elements individually . <nl> if ( arg . isRValue ( ) ) { <nl> - CanTupleType substArgType = cast < TupleType > ( arg . getSubstType ( ) ) ; <nl> - <nl> - / / The original type isn ' t necessarily a tuple . <nl> - assert ( origParamType . matchesTuple ( substArgType ) ) ; <nl> - <nl> - auto loc = arg . getKnownRValueLocation ( ) ; <nl> - SmallVector < RValue , 4 > elts ; <nl> - std : : move ( arg ) . asKnownRValue ( ) . extractElements ( elts ) ; <nl> - for ( auto i : indices ( substArgType . getElementTypes ( ) ) ) { <nl> - emit ( { loc , std : : move ( elts [ i ] ) } , <nl> - origParamType . getTupleElementType ( i ) ) ; <nl> - } <nl> - return ; <nl> + if ( CanTupleType substArgType = <nl> + dyn_cast < TupleType > ( arg . getSubstType ( ) ) ) { <nl> + / / The original type isn ' t necessarily a tuple . <nl> + assert ( origParamType . matchesTuple ( substArgType ) ) ; <nl> + <nl> + auto loc = arg . getKnownRValueLocation ( ) ; <nl> + SmallVector < RValue , 4 > elts ; <nl> + std : : move ( arg ) . asKnownRValue ( ) . extractElements ( elts ) ; <nl> + for ( auto i : indices ( substArgType . getElementTypes ( ) ) ) { <nl> + emit ( { loc , std : : move ( elts [ i ] ) } , <nl> + origParamType . getTupleElementType ( i ) ) ; <nl> + } <nl> + return ; <nl> + } <nl> + <nl> + auto loc = arg . getKnownRValueLocation ( ) ; <nl> + SmallVector < RValue , 1 > elts ; <nl> + std : : move ( arg ) . asKnownRValue ( ) . extractElements ( elts ) ; <nl> + emit ( { loc , std : : move ( elts [ 0 ] ) } , <nl> + origParamType . getTupleElementType ( 0 ) ) ; <nl> + return ; <nl> } <nl> <nl> / / Otherwise , we ' re working with an expression . <nl> mmm a / lib / SILGen / SILGenExpr . cpp <nl> ppp b / lib / SILGen / SILGenExpr . cpp <nl> visitMagicIdentifierLiteralExpr ( MagicIdentifierLiteralExpr * E , SGFContext C ) { <nl> } <nl> <nl> case MagicIdentifierLiteralExpr : : DSOHandle : { <nl> - return SGF . emitRValueForDecl ( E , SGF . SGM . SwiftModule - > getDSOHandle ( ) , <nl> - E - > getType ( ) , AccessSemantics : : Ordinary , C ) ; <nl> + auto SILLoc = SILLocation ( E ) ; <nl> + auto UnsafeRawPointer = SGF . getASTContext ( ) . getUnsafeRawPointerDecl ( ) ; <nl> + auto UnsafeRawPtrTy = <nl> + SGF . getLoweredType ( UnsafeRawPointer - > getDeclaredInterfaceType ( ) ) ; <nl> + SILType BulitinRawPtrTy = SILType : : getRawPointerType ( SGF . getASTContext ( ) ) ; <nl> + <nl> + <nl> + auto DSOGlobal = SGF . SGM . M . lookUpGlobalVariable ( " __dso_handle " ) ; <nl> + if ( ! DSOGlobal ) <nl> + DSOGlobal = SILGlobalVariable : : create ( SGF . SGM . M , <nl> + SILLinkage : : HiddenExternal , <nl> + IsNotFragile , " __dso_handle " , <nl> + BulitinRawPtrTy ) ; <nl> + auto DSOAddr = SGF . B . createGlobalAddr ( SILLoc , DSOGlobal ) ; <nl> + <nl> + auto DSOPointer = SGF . B . createAddressToPointer ( SILLoc , DSOAddr , <nl> + BulitinRawPtrTy ) ; <nl> + <nl> + auto UnsafeRawPtrStruct = SGF . B . createStruct ( SILLoc , UnsafeRawPtrTy , <nl> + { DSOPointer } ) ; <nl> + return RValue ( SGF , E , ManagedValue : : forUnmanaged ( UnsafeRawPtrStruct ) ) ; <nl> } <nl> } <nl> } <nl> mmm a / lib / SILGen / SILGenPoly . cpp <nl> ppp b / lib / SILGen / SILGenPoly . cpp <nl> static void emitForceInto ( SILGenFunction & SGF , SILLocation loc , <nl> temp . finishInitialization ( SGF ) ; <nl> } <nl> <nl> + / / / If the type is a single - element tuple , return the element type . <nl> + static CanType getSingleTupleElement ( CanType type ) { <nl> + if ( auto tupleType = dyn_cast < TupleType > ( type ) ) { <nl> + if ( tupleType - > getNumElements ( ) = = 1 ) <nl> + return tupleType . getElementType ( 0 ) ; <nl> + } <nl> + <nl> + return type ; <nl> + } <nl> + <nl> namespace { <nl> class TranslateArguments { <nl> SILGenFunction & SGF ; <nl> namespace { <nl> if ( inputOrigType . isTuple ( ) & & <nl> inputOrigType . getNumTupleElements ( ) = = 1 ) { <nl> inputOrigType = inputOrigType . getTupleElementType ( 0 ) ; <nl> - inputSubstType = cast < TupleType > ( inputSubstType ) . getElementType ( 0 ) ; <nl> + inputSubstType = getSingleTupleElement ( inputSubstType ) ; <nl> return translate ( inputOrigType , inputSubstType , <nl> outputOrigType , outputSubstType ) ; <nl> } <nl> namespace { <nl> if ( outputOrigType . isTuple ( ) & & <nl> outputOrigType . getNumTupleElements ( ) = = 1 ) { <nl> outputOrigType = outputOrigType . getTupleElementType ( 0 ) ; <nl> - if ( auto outputSubstTuple = dyn_cast < TupleType > ( outputSubstType ) ) { <nl> - if ( outputSubstTuple - > getNumElements ( ) > 0 ) <nl> - outputSubstType = outputSubstTuple . getElementType ( 0 ) ; <nl> - } <nl> + outputSubstType = getSingleTupleElement ( outputSubstType ) ; <nl> return translate ( inputOrigType , inputSubstType , <nl> outputOrigType , outputSubstType ) ; <nl> } <nl> mmm a / lib / SILOptimizer / Utils / Local . cpp <nl> ppp b / lib / SILOptimizer / Utils / Local . cpp <nl> optimizeBridgedSwiftToObjCCast ( SILInstruction * Inst , <nl> assert ( Conf & & " _ObjectiveCBridgeable conformance should exist " ) ; <nl> ( void ) Conf ; <nl> <nl> - bool isCurrentModuleBridgeToObjectiveC = false ; <nl> - <nl> / / Generate code to invoke _bridgeToObjectiveC <nl> SILBuilderWithScope Builder ( Inst ) ; <nl> <nl> optimizeBridgedSwiftToObjCCast ( SILInstruction * Inst , <nl> M . getSwiftModule ( ) - > lookupMember ( Results , Source . getNominalOrBoundGenericNominal ( ) , <nl> M . getASTContext ( ) . Id_bridgeToObjectiveC , Identifier ( ) ) ; <nl> ResultsRef = Results ; <nl> - isCurrentModuleBridgeToObjectiveC = true ; <nl> } <nl> if ( ResultsRef . size ( ) ! = 1 ) <nl> return nullptr ; <nl> <nl> auto MemberDeclRef = SILDeclRef ( Results . front ( ) ) ; <nl> - auto Linkage = ( isCurrentModuleBridgeToObjectiveC ) <nl> - ? ForDefinition_t : : ForDefinition <nl> - : ForDefinition_t : : NotForDefinition ; <nl> - auto * BridgedFunc = M . getOrCreateFunction ( Loc , MemberDeclRef , Linkage ) ; <nl> + auto * BridgedFunc = M . getOrCreateFunction ( Loc , MemberDeclRef , <nl> + ForDefinition_t : : NotForDefinition ) ; <nl> assert ( BridgedFunc & & <nl> " Implementation of _bridgeToObjectiveC could not be found " ) ; <nl> <nl> mmm a / lib / Sema / CSApply . cpp <nl> ppp b / lib / Sema / CSApply . cpp <nl> Expr * ExprRewriter : : coerceToType ( Expr * expr , Type toType , <nl> swift : : AnyFunctionType : : ExtInfo newEI ( fromEI . getRepresentation ( ) , <nl> toEI . isAutoClosure ( ) , <nl> toEI . isNoEscape ( ) | fromEI . isNoEscape ( ) , <nl> - toEI . isExplicitlyEscaping ( ) | fromEI . isExplicitlyEscaping ( ) , <nl> toEI . throws ( ) & fromEI . throws ( ) ) ; <nl> auto newToType = FunctionType : : get ( fromFunc - > getInput ( ) , <nl> fromFunc - > getResult ( ) , newEI ) ; <nl> mmm a / lib / Sema / CSDiag . cpp <nl> ppp b / lib / Sema / CSDiag . cpp <nl> bool FailureDiagnosis : : diagnoseGeneralMemberFailure ( Constraint * constraint ) { <nl> } <nl> } <nl> <nl> - if ( baseObjTy - > is < TupleType > ( ) ) { <nl> + / / If this is a tuple , then the index needs to be valid . <nl> + if ( auto tuple = baseObjTy - > getAs < TupleType > ( ) ) { <nl> + StringRef nameStr = memberName . getBaseName ( ) . str ( ) ; <nl> + int fieldIdx = - 1 ; <nl> + / / Resolve a number reference into the tuple type . <nl> + unsigned Value = 0 ; <nl> + if ( ! nameStr . getAsInteger ( 10 , Value ) & & Value < tuple - > getNumElements ( ) ) { <nl> + fieldIdx = Value ; <nl> + } else { <nl> + fieldIdx = tuple - > getNamedElementId ( memberName . getBaseName ( ) ) ; <nl> + } <nl> + <nl> + if ( fieldIdx ! = - 1 ) <nl> + return false ; / / Lookup is valid . <nl> + <nl> diagnose ( anchor - > getLoc ( ) , diag : : could_not_find_tuple_member , <nl> baseObjTy , memberName ) <nl> . highlight ( anchor - > getSourceRange ( ) ) . highlight ( memberRange ) ; <nl> static void noteArchetypeSource ( const TypeLoc & loc , ArchetypeType * archetype , <nl> } <nl> <nl> <nl> + / / / Check the specified closure to see if it is a multi - statement closure with <nl> + / / / an uninferred type . If so , diagnose the problem with an error and return <nl> + / / / true . <nl> + static bool checkMultistatementClosureForAmbiguity ( ClosureExpr * closure , <nl> + TypeChecker & tc ) { <nl> + if ( closure - > hasSingleExpressionBody ( ) | | <nl> + closure - > hasExplicitResultType ( ) ) <nl> + return false ; <nl> + <nl> + auto closureType = closure - > getType ( ) - > getAs < AnyFunctionType > ( ) ; <nl> + if ( ! closureType | | ! isUnresolvedOrTypeVarType ( closureType - > getResult ( ) ) ) <nl> + return false ; <nl> + <nl> + tc . diagnose ( closure - > getLoc ( ) , diag : : cannot_infer_closure_result_type ) ; <nl> + return true ; <nl> + } <nl> + <nl> + <nl> / / / Emit an error message about an unbound generic parameter existing , and <nl> / / / emit notes referring to the target of a diagnostic , e . g . , the function <nl> / / / or parameter being used . <nl> static void diagnoseUnboundArchetype ( Expr * overallExpr , <nl> ND - > getDeclaredType ( ) ) ; <nl> return ; <nl> } <nl> + <nl> + / / A very common cause of this diagnostic is a situation where a closure expr <nl> + / / has no inferred type , due to being a multiline closure . Check to see if <nl> + / / this is the case and ( if so ) , speculatively diagnose that as the problem . <nl> + bool didDiagnose = false ; <nl> + overallExpr - > forEachChildExpr ( [ & ] ( Expr * subExpr ) - > Expr * { <nl> + auto closure = dyn_cast < ClosureExpr > ( subExpr ) ; <nl> + if ( ! didDiagnose & & closure ) <nl> + didDiagnose = checkMultistatementClosureForAmbiguity ( closure , tc ) ; <nl> + <nl> + return subExpr ; <nl> + } ) ; <nl> + <nl> + if ( didDiagnose ) return ; <nl> + <nl> <nl> / / Otherwise , emit an error message on the expr we have , and emit a note <nl> / / about where the archetype came from . <nl> void FailureDiagnosis : : diagnoseAmbiguity ( Expr * E ) { <nl> / / Unresolved / Anonymous ClosureExprs are common enough that we should give <nl> / / them tailored diagnostics . <nl> if ( auto CE = dyn_cast < ClosureExpr > ( E - > getValueProvidingExpr ( ) ) ) { <nl> - auto CFTy = CE - > getType ( ) - > getAs < AnyFunctionType > ( ) ; <nl> - <nl> / / If this is a multi - statement closure with no explicit result type , emit <nl> / / a note to clue the developer in . <nl> - if ( ! CE - > hasExplicitResultType ( ) & & CFTy & & <nl> - isUnresolvedOrTypeVarType ( CFTy - > getResult ( ) ) ) { <nl> - diagnose ( CE - > getLoc ( ) , diag : : cannot_infer_closure_result_type ) ; <nl> + if ( checkMultistatementClosureForAmbiguity ( CE , CS - > getTypeChecker ( ) ) ) <nl> return ; <nl> - } <nl> - <nl> + <nl> diagnose ( E - > getLoc ( ) , diag : : cannot_infer_closure_type ) <nl> . highlight ( E - > getSourceRange ( ) ) ; <nl> return ; <nl> mmm a / lib / Sema / CSGen . cpp <nl> ppp b / lib / Sema / CSGen . cpp <nl> <nl> # include " swift / AST / ASTWalker . h " <nl> # include " swift / AST / Expr . h " <nl> # include " swift / AST / ParameterList . h " <nl> + # include " swift / AST / PrettyStackTrace . h " <nl> # include " swift / Sema / IDETypeChecking . h " <nl> - # include " llvm / ADT / StringExtras . h " <nl> # include " llvm / ADT / APInt . h " <nl> + # include " llvm / ADT / StringExtras . h " <nl> <nl> using namespace swift ; <nl> using namespace swift : : constraints ; <nl> namespace { <nl> if ( tc . requirePointerArgumentIntrinsics ( expr - > getLoc ( ) ) ) <nl> return nullptr ; <nl> <nl> - return CS . DC - > getParentModule ( ) - > getDSOHandle ( ) - > getInterfaceType ( ) ; <nl> + auto unsafeRawPointer = <nl> + CS . getASTContext ( ) . getUnsafeRawPointerDecl ( ) ; <nl> + return unsafeRawPointer - > getDeclaredType ( ) ; <nl> } <nl> } <nl> } <nl> class InferUnresolvedMemberConstraintGenerator : public ConstraintGenerator { <nl> return Expr - > getType ( ) ; <nl> } <nl> <nl> - void collectResolvedType ( Solution & S , SmallVectorImpl < Type > & PossibleTypes ) { <nl> + bool collectResolvedType ( Solution & S , SmallVectorImpl < Type > & PossibleTypes ) { <nl> if ( auto Bind = S . typeBindings [ VT ] ) { <nl> / / We allow type variables in the overall solution , but must skip any <nl> / / type variables in the binding for VT ; these types must outlive the <nl> / / constraint solver memory arena . <nl> - if ( ! Bind - > hasTypeVariable ( ) ) <nl> + if ( ! Bind - > hasTypeVariable ( ) ) { <nl> PossibleTypes . push_back ( Bind ) ; <nl> + return true ; <nl> + } <nl> } <nl> + return false ; <nl> } <nl> } ; <nl> <nl> bool swift : : typeCheckUnresolvedExpr ( DeclContext & DC , <nl> Expr * E , Expr * Parent , <nl> SmallVectorImpl < Type > & PossibleTypes ) { <nl> + PrettyStackTraceExpr stackTrace ( DC . getASTContext ( ) , <nl> + " type - checking unresolved member " , Parent ) ; <nl> ConstraintSystemOptions Options = ConstraintSystemFlags : : AllowFixes ; <nl> auto * TC = static_cast < TypeChecker * > ( DC . getASTContext ( ) . getLazyResolver ( ) ) ; <nl> ConstraintSystem CS ( * TC , & DC , Options ) ; <nl> bool swift : : typeCheckUnresolvedExpr ( DeclContext & DC , <nl> ConstraintWalker cw ( MCG ) ; <nl> Parent - > walk ( cw ) ; <nl> <nl> + if ( TC - > getLangOpts ( ) . DebugConstraintSolver ) { <nl> + auto & log = DC . getASTContext ( ) . TypeCheckerDebug - > getStream ( ) ; <nl> + log < < " mmmInitial constraints for the given expressionmmm \ n " ; <nl> + Parent - > print ( log ) ; <nl> + log < < " \ n " ; <nl> + CS . print ( log ) ; <nl> + } <nl> + <nl> SmallVector < Solution , 3 > solutions ; <nl> if ( CS . solve ( solutions , FreeTypeVariableBinding : : Allow ) ) { <nl> return false ; <nl> } <nl> + <nl> for ( auto & S : solutions ) { <nl> - MCG . collectResolvedType ( S , PossibleTypes ) ; <nl> + bool resolved = MCG . collectResolvedType ( S , PossibleTypes ) ; <nl> + <nl> + if ( TC - > getLangOpts ( ) . DebugConstraintSolver ) { <nl> + auto & log = DC . getASTContext ( ) . TypeCheckerDebug - > getStream ( ) ; <nl> + log < < " mmm Solution mmm \ n " ; <nl> + S . dump ( log ) ; <nl> + if ( resolved ) <nl> + log < < " mmm Resolved target type mmm \ n " < < PossibleTypes . back ( ) < < " \ n " ; <nl> + } <nl> } <nl> return ! PossibleTypes . empty ( ) ; <nl> } <nl> mmm a / lib / Sema / CSSimplify . cpp <nl> ppp b / lib / Sema / CSSimplify . cpp <nl> ConstraintSystem : : matchTypes ( Type type1 , Type type2 , TypeMatchKind kind , <nl> / / we hit commit_to_conversions below , but we have to add a token restriction <nl> / / to ensure we wrap the metatype value in a metatype erasure . <nl> if ( concrete & & type2 - > isExistentialType ( ) ) { <nl> - <nl> - / / If we ' re binding to an empty existential , we need to make sure that the <nl> - / / conversion is valid . <nl> - if ( kind = = TypeMatchKind : : BindType & & <nl> - type2 - > isEmptyExistentialComposition ( ) ) { <nl> - <nl> - conversionsOrFixes . push_back ( ConversionRestrictionKind : : Existential ) ; <nl> - addConstraint ( ConstraintKind : : SelfObjectOfProtocol , <nl> - type1 , type2 , getConstraintLocator ( locator ) ) ; <nl> - <nl> - return SolutionKind : : Solved ; <nl> - } <nl> - <nl> if ( kind = = TypeMatchKind : : ConformsTo ) { <nl> conversionsOrFixes . push_back ( ConversionRestrictionKind : : <nl> MetatypeToExistentialMetatype ) ; <nl> mmm a / lib / Sema / ConstraintSystem . cpp <nl> ppp b / lib / Sema / ConstraintSystem . cpp <nl> void ConstraintSystem : : recordOpenedTypes ( <nl> static unsigned getNumRemovedArgumentLabels ( ASTContext & ctx , ValueDecl * decl , <nl> bool isCurriedInstanceReference , <nl> FunctionRefKind functionRefKind ) { <nl> - / / Is this functionality enabled at all ? <nl> - if ( ! ctx . LangOpts . SuppressArgumentLabelsInTypes ) return 0 ; <nl> - <nl> / / Only applicable to functions . Nothing else should have argument labels in <nl> / / the type . <nl> auto func = dyn_cast < AbstractFunctionDecl > ( decl ) ; <nl> mmm a / lib / Sema / MiscDiagnostics . cpp <nl> ppp b / lib / Sema / MiscDiagnostics . cpp <nl> static void diagSyntacticUseRestrictions ( TypeChecker & TC , const Expr * E , <nl> <nl> TC . diagnose ( DRE - > getStartLoc ( ) , diag : : invalid_noescape_use , <nl> DRE - > getDecl ( ) - > getName ( ) , isa < ParamDecl > ( DRE - > getDecl ( ) ) ) ; <nl> - if ( AFT - > isAutoClosure ( ) ) <nl> + <nl> + / / If we ' re a parameter , emit a helpful fixit to add @ escaping <nl> + auto paramDecl = dyn_cast < ParamDecl > ( DRE - > getDecl ( ) ) ; <nl> + auto isAutoClosure = AFT - > isAutoClosure ( ) ; <nl> + if ( paramDecl & & ! isAutoClosure ) { <nl> + TC . diagnose ( paramDecl - > getStartLoc ( ) , diag : : noescape_parameter , <nl> + paramDecl - > getName ( ) ) <nl> + . fixItInsert ( paramDecl - > getTypeLoc ( ) . getLoc ( ) , " @ escaping " ) ; <nl> + } else if ( isAutoClosure ) <nl> + / / TODO : add in a fixit for autoclosure <nl> TC . diagnose ( DRE - > getDecl ( ) - > getLoc ( ) , diag : : noescape_autoclosure , <nl> DRE - > getDecl ( ) - > getName ( ) ) ; <nl> } <nl> mmm a / lib / Sema / TypeCheckAttr . cpp <nl> ppp b / lib / Sema / TypeCheckAttr . cpp <nl> void TypeChecker : : checkNoEscapeAttr ( ParamDecl * PD , NoEscapeAttr * attr ) { <nl> return ; <nl> } <nl> <nl> - / / Just stop if we ' ve already applied this attribute . <nl> - if ( FTy - > isNoEscape ( ) ) <nl> - return ; <nl> - <nl> / / This range can be implicit e . g . if we ' re in the middle of diagnosing <nl> / / @ autoclosure . <nl> auto attrRemovalRange = attr - > getRangeWithAt ( ) ; <nl> void TypeChecker : : checkNoEscapeAttr ( ParamDecl * PD , NoEscapeAttr * attr ) { <nl> . fixItRemove ( attrRemovalRange ) <nl> . fixItInsert ( PD - > getTypeLoc ( ) . getSourceRange ( ) . Start , " @ noescape " ) ; <nl> <nl> + / / Stop if we ' ve already applied this attribute . <nl> + if ( FTy - > isNoEscape ( ) ) <nl> + return ; <nl> + <nl> / / Change the type to include the noescape bit . <nl> PD - > overwriteType ( FunctionType : : get ( FTy - > getInput ( ) , FTy - > getResult ( ) , <nl> FTy - > getExtInfo ( ) . withNoEscape ( true ) ) ) ; <nl> mmm a / lib / Sema / TypeCheckCaptures . cpp <nl> ppp b / lib / Sema / TypeCheckCaptures . cpp <nl> class FindCapturedVars : public ASTWalker { <nl> / / Otherwise , diagnose this as an invalid capture . <nl> bool isDecl = AFR . getAbstractFunctionDecl ( ) ! = nullptr ; <nl> <nl> - TC . diagnose ( Loc , isDecl ? diag : : decl_closure_noescape_use : <nl> - diag : : closure_noescape_use , VD - > getName ( ) ) ; <nl> - <nl> - if ( VD - > getType ( ) - > castTo < AnyFunctionType > ( ) - > isAutoClosure ( ) ) <nl> - TC . diagnose ( VD - > getLoc ( ) , diag : : noescape_autoclosure , <nl> - VD - > getName ( ) ) ; <nl> + TC . diagnose ( Loc , isDecl ? diag : : decl_closure_noescape_use <nl> + : diag : : closure_noescape_use , <nl> + VD - > getName ( ) ) ; <nl> + <nl> + / / If we ' re a parameter , emit a helpful fixit to add @ escaping <nl> + auto paramDecl = dyn_cast < ParamDecl > ( VD ) ; <nl> + bool isAutoClosure = <nl> + VD - > getType ( ) - > castTo < AnyFunctionType > ( ) - > isAutoClosure ( ) ; <nl> + if ( paramDecl & & ! isAutoClosure ) { <nl> + TC . diagnose ( paramDecl - > getStartLoc ( ) , diag : : noescape_parameter , <nl> + paramDecl - > getName ( ) ) <nl> + . fixItInsert ( paramDecl - > getTypeLoc ( ) . getLoc ( ) , " @ escaping " ) ; <nl> + } else if ( isAutoClosure ) { <nl> + / / TODO : add in a fixit for autoclosure <nl> + TC . diagnose ( VD - > getLoc ( ) , diag : : noescape_autoclosure , VD - > getName ( ) ) ; <nl> + } <nl> } <nl> } <nl> <nl> mmm a / lib / Sema / TypeCheckConstraints . cpp <nl> ppp b / lib / Sema / TypeCheckConstraints . cpp <nl> namespace { <nl> assert ( ExprStack . back ( ) = = expr ) ; <nl> ExprStack . pop_back ( ) ; <nl> <nl> - / / When we ' re suppressing argument labels in types , mark the direct callee <nl> - / / as being a callee . <nl> - if ( TC . Context . LangOpts . SuppressArgumentLabelsInTypes ) { <nl> - if ( auto call = dyn_cast < CallExpr > ( expr ) ) <nl> - markDirectCallee ( call - > getFn ( ) ) ; <nl> - } <nl> + / / Mark the direct callee as being a callee . <nl> + if ( auto call = dyn_cast < CallExpr > ( expr ) ) <nl> + markDirectCallee ( call - > getFn ( ) ) ; <nl> <nl> / / Fold sequence expressions . <nl> if ( auto seqExpr = dyn_cast < SequenceExpr > ( expr ) ) { <nl> mmm a / lib / Sema / TypeCheckProtocol . cpp <nl> ppp b / lib / Sema / TypeCheckProtocol . cpp <nl> checkWitnessAccessibility ( Accessibility * requiredAccess , <nl> bool * isSetter ) { <nl> * isSetter = false ; <nl> <nl> - / / FIXME : Handle " private ( set ) " requirements . <nl> * requiredAccess = std : : min ( Proto - > getFormalAccess ( ) , * requiredAccess ) ; <nl> - <nl> - if ( * requiredAccess = = Accessibility : : Private ) <nl> - return false ; <nl> + if ( TC . getLangOpts ( ) . EnableSwift3Private ) <nl> + * requiredAccess = std : : max ( * requiredAccess , Accessibility : : FilePrivate ) ; <nl> <nl> Accessibility witnessAccess = witness - > getFormalAccess ( DC ) ; <nl> <nl> checkWitnessAccessibility ( Accessibility * requiredAccess , <nl> * isSetter = true ; <nl> <nl> auto ASD = cast < AbstractStorageDecl > ( witness ) ; <nl> - const DeclContext * accessDC = nullptr ; <nl> - if ( * requiredAccess = = Accessibility : : Internal ) <nl> + const DeclContext * accessDC ; <nl> + switch ( * requiredAccess ) { <nl> + case Accessibility : : Public : <nl> + accessDC = nullptr ; <nl> + break ; <nl> + case Accessibility : : Internal : <nl> accessDC = DC - > getParentModule ( ) ; <nl> + break ; <nl> + case Accessibility : : FilePrivate : <nl> + case Accessibility : : Private : <nl> + accessDC = DC - > getModuleScopeContext ( ) ; <nl> + break ; <nl> + } <nl> + <nl> if ( ! ASD - > isSetterAccessibleFrom ( accessDC ) ) <nl> return true ; <nl> } <nl> mmm a / lib / Sema / TypeCheckType . cpp <nl> ppp b / lib / Sema / TypeCheckType . cpp <nl> Type TypeChecker : : resolveType ( TypeRepr * TyR , DeclContext * DC , <nl> return result ; <nl> } <nl> <nl> + / / / Whether the given DC is a noescape - by - default context , i . e . not a property <nl> + / / / setter <nl> + static bool isDefaultNoEscapeContext ( const DeclContext * DC ) { <nl> + auto funcDecl = dyn_cast < FuncDecl > ( DC ) ; <nl> + return ! funcDecl | | ! funcDecl - > isSetter ( ) ; <nl> + } <nl> + <nl> Type TypeResolver : : resolveType ( TypeRepr * repr , TypeResolutionOptions options ) { <nl> assert ( repr & & " Cannot validate null TypeReprs ! " ) ; <nl> <nl> Type TypeResolver : : resolveType ( TypeRepr * repr , TypeResolutionOptions options ) { <nl> / / error type . <nl> if ( repr - > isInvalid ( ) ) return ErrorType : : get ( TC . Context ) ; <nl> <nl> + / / Remember whether this is a function parameter . <nl> + bool isFunctionParam = <nl> + options . contains ( TR_FunctionInput ) | | <nl> + options . contains ( TR_ImmediateFunctionInput ) ; <nl> + <nl> / / Strip the " is function input " bits unless this is a type that knows about <nl> / / them . <nl> if ( ! isa < InOutTypeRepr > ( repr ) & & ! isa < TupleTypeRepr > ( repr ) & & <nl> Type TypeResolver : : resolveType ( TypeRepr * repr , TypeResolutionOptions options ) { <nl> UnsatisfiedDependency ) ; <nl> <nl> case TypeReprKind : : Function : <nl> - if ( ! ( options & TR_SILType ) ) <nl> - return resolveASTFunctionType ( cast < FunctionTypeRepr > ( repr ) , options ) ; <nl> + if ( ! ( options & TR_SILType ) ) { <nl> + / / Default non - escaping for closure parameters <nl> + auto info = AnyFunctionType : : ExtInfo ( ) . withNoEscape ( <nl> + isFunctionParam & & <nl> + isDefaultNoEscapeContext ( DC ) ) ; <nl> + return resolveASTFunctionType ( cast < FunctionTypeRepr > ( repr ) , options , <nl> + info ) ; <nl> + } <nl> return resolveSILFunctionType ( cast < FunctionTypeRepr > ( repr ) , options ) ; <nl> <nl> case TypeReprKind : : Array : <nl> Type TypeResolver : : resolveAttributedType ( TypeAttributes & attrs , <nl> . fixItReplace ( resultRange , " Never " ) ; <nl> } <nl> <nl> + bool defaultNoEscape = false ; <nl> + / / TODO : Get rid of the need for checking autoclosure , by refactoring <nl> + / / special autoclosure knowledge to just as " isEscaping " or similar . <nl> + if ( isFunctionParam & & ! attrs . has ( TAK_autoclosure ) ) { <nl> + / / Closure params default to non - escaping <nl> + if ( attrs . has ( TAK_noescape ) ) { <nl> + / / FIXME : diagnostic to tell user this is redundant and drop it <nl> + } else if ( ! attrs . has ( TAK_escaping ) ) { <nl> + defaultNoEscape = isDefaultNoEscapeContext ( DC ) ; <nl> + } <nl> + } <nl> + <nl> / / Resolve the function type directly with these attributes . <nl> FunctionType : : ExtInfo extInfo ( rep , <nl> attrs . has ( TAK_autoclosure ) , <nl> - attrs . has ( TAK_noescape ) , <nl> - attrs . has ( TAK_escaping ) , <nl> + defaultNoEscape | attrs . has ( TAK_noescape ) , <nl> fnRepr - > throws ( ) ) ; <nl> <nl> ty = resolveASTFunctionType ( fnRepr , options , extInfo ) ; <nl> mmm a / lib / Serialization / Deserialization . cpp <nl> ppp b / lib / Serialization / Deserialization . cpp <nl> Type ModuleFile : : getType ( TypeID TID ) { <nl> TypeID inputID ; <nl> TypeID resultID ; <nl> uint8_t rawRepresentation ; <nl> - bool autoClosure , noescape , explicitlyEscaping , throws ; <nl> + bool autoClosure , noescape , throws ; <nl> <nl> decls_block : : FunctionTypeLayout : : readRecord ( scratch , inputID , resultID , <nl> rawRepresentation , <nl> autoClosure , <nl> noescape , <nl> - explicitlyEscaping , <nl> throws ) ; <nl> auto representation = getActualFunctionTypeRepresentation ( rawRepresentation ) ; <nl> if ( ! representation . hasValue ( ) ) { <nl> Type ModuleFile : : getType ( TypeID TID ) { <nl> <nl> auto Info = FunctionType : : ExtInfo ( * representation , <nl> autoClosure , noescape , <nl> - explicitlyEscaping , throws ) ; <nl> + throws ) ; <nl> <nl> typeOrOffset = FunctionType : : get ( getType ( inputID ) , getType ( resultID ) , <nl> Info ) ; <nl> mmm a / lib / Serialization / Serialization . cpp <nl> ppp b / lib / Serialization / Serialization . cpp <nl> void Serializer : : writeType ( Type ty ) { <nl> getRawStableFunctionTypeRepresentation ( fnTy - > getRepresentation ( ) ) , <nl> fnTy - > isAutoClosure ( ) , <nl> fnTy - > isNoEscape ( ) , <nl> - fnTy - > isExplicitlyEscaping ( ) , <nl> fnTy - > throws ( ) ) ; <nl> break ; <nl> } <nl> mmm a / stdlib / internal / SwiftExperimental / SwiftExperimental . swift <nl> ppp b / stdlib / internal / SwiftExperimental / SwiftExperimental . swift <nl> precedencegroup CompositionPrecedence { <nl> / / / <nl> / / / - Returns : a function that applies ` ` g ` ` to the result of applying ` ` f ` ` <nl> / / / to the argument of the new function . <nl> - public func ∘ < T , U , V > ( g : ( U ) - > V , f : ( T ) - > U ) - > ( ( T ) - > V ) { <nl> + public func ∘ < T , U , V > ( g : @ escaping ( U ) - > V , f : @ escaping ( T ) - > U ) - > ( ( T ) - > V ) { <nl> return { g ( f ( $ 0 ) ) } <nl> } <nl> <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckCollectionInstance . swift . gyb <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckCollectionInstance . swift . gyb <nl> public func checkSliceableWithBidirectionalIndex < <nl> <nl> <nl> public func checkRangeReplaceable < C , N > ( <nl> - _ makeCollection : ( ) - > C , <nl> + _ makeCollection : @ escaping ( ) - > C , <nl> _ makeNewValues : ( Int ) - > N <nl> ) where <nl> C : RangeReplaceableCollection , <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckCollectionType . swift . gyb <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckCollectionType . swift . gyb <nl> internal func _product < C1 : Collection , C2 : Collection > ( <nl> <nl> testParams = ' ' ' <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( <nl> + makeCollectionOfEquatable : @ escaping ( <nl> [ CollectionWithEquatableElement . Iterator . Element ] <nl> ) - > CollectionWithEquatableElement , <nl> <nl> - wrapValueIntoEquatable : ( <nl> + wrapValueIntoEquatable : @ escaping ( <nl> MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> <nl> extractValueFromEquatable : <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckMutableCollectionType . swift . gyb <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckMutableCollectionType . swift . gyb <nl> public let partitionExhaustiveTests = [ <nl> PartitionExhaustiveTest ( [ 10 , 20 , 30 , 40 , 50 , 60 ] ) , <nl> ] <nl> <nl> - public func withInvalidOrderings ( _ body : ( ( Int , Int ) - > Bool ) - > Void ) { <nl> + public func withInvalidOrderings ( _ body : ( @ escaping ( Int , Int ) - > Bool ) - > Void ) { <nl> / / Test some ordering predicates that don ' t create strict weak orderings <nl> body { ( _ , _ ) in true } <nl> body { ( _ , _ ) in false } <nl> extension TestSuite { <nl> CollectionWithComparableElement : MutableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> - makeCollectionOfComparable : ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> - wrapValueIntoComparable : ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> + makeCollectionOfComparable : @ escaping ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> + wrapValueIntoComparable : @ escaping ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> extractValueFromComparable : ( ( CollectionWithComparableElement . Iterator . Element ) - > MinimalComparableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> self . test ( " \ ( testNamePrefix ) . sorted / $ { ' Predicate ' if predicate else ' WhereElemen <nl> } <nl> <nl> self . test ( " \ ( testNamePrefix ) . sorted / $ { ' Predicate ' if predicate else ' WhereElementIsComparable ' } / InvalidOrderings " ) { <nl> - withInvalidOrderings { ( comparisonPredicate ) in <nl> + withInvalidOrderings { ( comparisonPredicate : @ escaping ( Int , Int ) - > Bool ) in <nl> for i in 0 . . < 7 { <nl> forAllPermutations ( i ) { ( sequence ) in <nl> checkSort_ $ { ' Predicate ' if predicate else ' WhereElementIsComparable ' } ( <nl> self . test ( " \ ( testNamePrefix ) . partition / InvalidOrderings " ) { <nl> CollectionWithComparableElement : BidirectionalCollection & MutableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> - makeCollectionOfComparable : ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> - wrapValueIntoComparable : ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> + makeCollectionOfComparable : @ escaping ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> + wrapValueIntoComparable : @ escaping ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> extractValueFromComparable : ( ( CollectionWithComparableElement . Iterator . Element ) - > MinimalComparableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> self . test ( " \ ( testNamePrefix ) . partition / DispatchesThrough_withUnsafeMutableBuffer <nl> CollectionWithComparableElement : RandomAccessCollection & MutableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> - makeCollectionOfComparable : ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> - wrapValueIntoComparable : ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> + makeCollectionOfComparable : @ escaping ( [ CollectionWithComparableElement . Iterator . Element ] ) - > CollectionWithComparableElement , <nl> + wrapValueIntoComparable : @ escaping ( MinimalComparableValue ) - > CollectionWithComparableElement . Iterator . Element , <nl> extractValueFromComparable : ( ( CollectionWithComparableElement . Iterator . Element ) - > MinimalComparableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> self . test ( " \ ( testNamePrefix ) . sort / $ { ' Predicate ' if predicate else ' WhereElementI <nl> } <nl> <nl> self . test ( " \ ( testNamePrefix ) . sort / $ { ' Predicate ' if predicate else ' WhereElementIsEquatable ' } / InvalidOrderings " ) { <nl> - withInvalidOrderings { ( comparisonPredicate ) in <nl> + withInvalidOrderings { ( comparisonPredicate : @ escaping ( Int , Int ) - > Bool ) in <nl> for i in 0 . . < 7 { <nl> forAllPermutations ( i ) { ( sequence ) in <nl> checkSortInPlace_ $ { ' Predicate ' if predicate else ' WhereElementIsComparable ' } ( <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckRangeReplaceableCollectionType . swift <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckRangeReplaceableCollectionType . swift <nl> extension TestSuite { <nl> CollectionWithEquatableElement : RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> self . test ( " \ ( testNamePrefix ) . OperatorPlus " ) { <nl> CollectionWithEquatableElement : BidirectionalCollection & RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> self . test ( " \ ( testNamePrefix ) . removeLast ( n : Int ) / whereIndexIsBidirectional / remove <nl> CollectionWithEquatableElement : RandomAccessCollection & RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckRangeReplaceableSliceType . swift <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckRangeReplaceableSliceType . swift <nl> extension TestSuite { <nl> CollectionWithEquatableElement : RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> extension TestSuite { <nl> CollectionWithEquatableElement : BidirectionalCollection & RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> extension TestSuite { <nl> CollectionWithEquatableElement : RandomAccessCollection & RangeReplaceableCollection <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeCollection : ( [ C . Iterator . Element ] ) - > C , <nl> - wrapValue : ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> - extractValue : ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeCollection : @ escaping ( [ C . Iterator . Element ] ) - > C , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > C . Iterator . Element , <nl> + extractValue : @ escaping ( C . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeCollectionOfEquatable : ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> + makeCollectionOfEquatable : @ escaping ( [ CollectionWithEquatableElement . Iterator . Element ] ) - > CollectionWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > CollectionWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( CollectionWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all , <nl> mmm a / stdlib / private / StdlibCollectionUnittest / CheckSequenceType . swift <nl> ppp b / stdlib / private / StdlibCollectionUnittest / CheckSequenceType . swift <nl> public struct EnumerateTest { <nl> public struct FilterTest { <nl> public let expected : [ Int ] <nl> public let sequence : [ Int ] <nl> - public let includeElement : ( Int ) - > Bool <nl> + public let includeElement : @ escaping ( Int ) - > Bool <nl> public let loc : SourceLoc <nl> <nl> public init ( <nl> _ expected : [ Int ] , <nl> _ sequence : [ Int ] , <nl> - _ includeElement : ( Int ) - > Bool , <nl> + _ includeElement : @ escaping ( Int ) - > Bool , <nl> file : String = # file , line : UInt = # line <nl> ) { <nl> self . expected = expected <nl> public struct FindTest { <nl> public struct FlatMapTest { <nl> public let expected : [ Int32 ] <nl> public let sequence : [ Int ] <nl> - public let transform : ( Int ) - > [ Int32 ] <nl> + public let transform : @ escaping ( Int ) - > [ Int32 ] <nl> public let loc : SourceLoc <nl> <nl> public init ( <nl> expected : [ Int32 ] , <nl> sequence : [ Int ] , <nl> - transform : ( Int ) - > [ Int32 ] , <nl> + transform : @ escaping ( Int ) - > [ Int32 ] , <nl> file : String = # file , line : UInt = # line <nl> ) { <nl> self . expected = expected <nl> public struct FlatMapTest { <nl> public struct FlatMapToOptionalTest { <nl> public let expected : [ Int32 ] <nl> public let sequence : [ Int ] <nl> - public let transform : ( Int ) - > Int32 ? <nl> + public let transform : @ escaping ( Int ) - > Int32 ? <nl> public let loc : SourceLoc <nl> <nl> public init ( <nl> _ expected : [ Int32 ] , <nl> _ sequence : [ Int ] , <nl> - _ transform : ( Int ) - > Int32 ? , <nl> + _ transform : @ escaping ( Int ) - > Int32 ? , <nl> file : String = # file , line : UInt = # line <nl> ) { <nl> self . expected = expected <nl> public struct LexicographicallyPrecedesTest { <nl> public struct MapTest { <nl> public let expected : [ Int32 ] <nl> public let sequence : [ Int ] <nl> - public let transform : ( Int ) - > Int32 <nl> + public let transform : @ escaping ( Int ) - > Int32 <nl> public let loc : SourceLoc <nl> <nl> public init ( <nl> _ expected : [ Int32 ] , <nl> _ sequence : [ Int ] , <nl> - _ transform : ( Int ) - > Int32 , <nl> + _ transform : @ escaping ( Int ) - > Int32 , <nl> file : String = # file , line : UInt = # line <nl> ) { <nl> self . expected = expected <nl> extension TestSuite { <nl> SequenceWithEquatableElement : Sequence <nl> > ( <nl> _ testNamePrefix : String = " " , <nl> - makeSequence : ( [ S . Iterator . Element ] ) - > S , <nl> - wrapValue : ( OpaqueValue < Int > ) - > S . Iterator . Element , <nl> - extractValue : ( S . Iterator . Element ) - > OpaqueValue < Int > , <nl> + makeSequence : @ escaping ( [ S . Iterator . Element ] ) - > S , <nl> + wrapValue : @ escaping ( OpaqueValue < Int > ) - > S . Iterator . Element , <nl> + extractValue : @ escaping ( S . Iterator . Element ) - > OpaqueValue < Int > , <nl> <nl> - makeSequenceOfEquatable : ( [ SequenceWithEquatableElement . Iterator . Element ] ) - > SequenceWithEquatableElement , <nl> - wrapValueIntoEquatable : ( MinimalEquatableValue ) - > SequenceWithEquatableElement . Iterator . Element , <nl> + makeSequenceOfEquatable : @ escaping ( [ SequenceWithEquatableElement . Iterator . Element ] ) - > SequenceWithEquatableElement , <nl> + wrapValueIntoEquatable : @ escaping ( MinimalEquatableValue ) - > SequenceWithEquatableElement . Iterator . Element , <nl> extractValueFromEquatable : ( ( SequenceWithEquatableElement . Iterator . Element ) - > MinimalEquatableValue ) , <nl> <nl> resiliencyChecks : CollectionMisuseResiliencyChecks = . all <nl> mmm a / stdlib / private / StdlibUnittest / RaceTest . swift <nl> ppp b / stdlib / private / StdlibUnittest / RaceTest . swift <nl> internal struct ClosureBasedRaceTest : RaceTestWithPerTrialData { <nl> } <nl> <nl> public func runRaceTest ( <nl> - trials : Int , threads : Int ? = nil , invoking body : ( ) - > ( ) <nl> + trials : Int , threads : Int ? = nil , invoking body : @ escaping ( ) - > ( ) <nl> ) { <nl> ClosureBasedRaceTest . thread = body <nl> runRaceTest ( ClosureBasedRaceTest . self , trials : trials , threads : threads ) <nl> mmm a / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> ppp b / stdlib / private / StdlibUnittest / StdlibUnittest . swift . gyb <nl> func _defaultTestSuiteFailedCallback ( ) { <nl> <nl> var _testSuiteFailedCallback : ( ) - > Void = _defaultTestSuiteFailedCallback <nl> <nl> - public func _setTestSuiteFailedCallback ( _ callback : ( ) - > Void ) { <nl> + public func _setTestSuiteFailedCallback ( _ callback : @ escaping ( ) - > Void ) { <nl> _testSuiteFailedCallback = callback <nl> } <nl> <nl> func _defaultTrappingExpectationFailedCallback ( ) { <nl> var _trappingExpectationFailedCallback : ( ) - > Void <nl> = _defaultTrappingExpectationFailedCallback <nl> <nl> - public func _setTrappingExpectationFailedCallback ( callback : ( ) - > Void ) { <nl> + public func _setTrappingExpectationFailedCallback ( callback : @ escaping ( ) - > Void ) { <nl> _trappingExpectationFailedCallback = callback <nl> } <nl> <nl> public final class TestSuite { <nl> public func test ( <nl> _ name : String , <nl> file : String = # file , line : UInt = # line , <nl> - _ testFunction : ( ) - > Void <nl> + _ testFunction : @ escaping ( ) - > Void <nl> ) { <nl> _TestBuilder ( testSuite : self , name : name , loc : SourceLoc ( file , line ) ) <nl> . code ( testFunction ) <nl> public final class TestSuite { <nl> return _TestBuilder ( testSuite : self , name : name , loc : SourceLoc ( file , line ) ) <nl> } <nl> <nl> - public func setUp ( _ code : ( ) - > Void ) { <nl> + public func setUp ( _ code : @ escaping ( ) - > Void ) { <nl> _precondition ( _testSetUpCode = = nil , " set - up code already set " ) <nl> _testSetUpCode = code <nl> } <nl> <nl> - public func tearDown ( _ code : ( ) - > Void ) { <nl> + public func tearDown ( _ code : @ escaping ( ) - > Void ) { <nl> _precondition ( _testTearDownCode = = nil , " tear - down code already set " ) <nl> _testTearDownCode = code <nl> } <nl> public final class TestSuite { <nl> _testSuite . _testNameToIndex [ _name ] = _testSuite . _tests . count - 1 <nl> } <nl> <nl> - public func code ( _ testFunction : ( ) - > Void ) { <nl> + public func code ( _ testFunction : @ escaping ( ) - > Void ) { <nl> _build ( . single ( code : testFunction ) ) <nl> } <nl> <nl> public func forEach < Data > ( <nl> in parameterSets : [ Data ] , <nl> - testFunction : ( Data ) - > Void <nl> + testFunction : @ escaping ( Data ) - > Void <nl> ) { <nl> _build ( . parameterized ( <nl> code : { ( i : Int ) in testFunction ( parameterSets [ i ] ) } , <nl> public func expectEqualsUnordered < <nl> Actual : Sequence <nl> > ( <nl> _ expected : Expected , _ actual : Actual , $ { TRACE } , <nl> - compare : ( Expected . Iterator . Element , Expected . Iterator . Element ) <nl> + compare : @ escaping ( Expected . Iterator . Element , Expected . Iterator . Element ) <nl> - > ExpectedComparisonResult <nl> ) where <nl> Expected . Iterator . Element = = Actual . Iterator . Element { <nl> public func expectEqualUnicodeScalars ( <nl> } <nl> } <nl> <nl> - func compose < A , B , C > ( _ f : ( A ) - > B , _ g : ( B ) - > C ) - > ( A ) - > C { <nl> + func compose < A , B , C > ( _ f : @ escaping ( A ) - > B , _ g : @ escaping ( B ) - > C ) - > ( A ) - > C { <nl> return { a in <nl> return g ( f ( a ) ) <nl> } <nl> mmm a / stdlib / private / StdlibUnittest / StringConvertible . swift . gyb <nl> ppp b / stdlib / private / StdlibUnittest / StringConvertible . swift . gyb <nl> public struct CustomPrintableValue <nl> public static var timesDebugDescriptionWasCalled = ResettableValue ( 0 ) <nl> <nl> public static var descriptionImpl = <nl> - ResettableValue < ( value : Int , identity : Int ) - > String > ( { <nl> + ResettableValue < ( _ value : Int , _ identity : Int ) - > String > ( { <nl> ( value : Int , identity : Int ) - > String in <nl> if identity = = 0 { <nl> return " ( value : \ ( value ) ) . description " <nl> public struct CustomPrintableValue <nl> } ) <nl> <nl> public static var debugDescriptionImpl = <nl> - ResettableValue < ( value : Int , identity : Int ) - > String > ( { <nl> + ResettableValue < ( _ value : Int , _ identity : Int ) - > String > ( { <nl> ( value : Int , identity : Int ) - > String in <nl> CustomPrintableValue . timesDescriptionWasCalled . value + = 1 <nl> if identity = = 0 { <nl> extension CustomPrintableValue : CustomStringConvertible { <nl> public var description : String { <nl> CustomPrintableValue . timesDescriptionWasCalled . value + = 1 <nl> return CustomPrintableValue . descriptionImpl . value ( <nl> - value : value , identity : identity ) <nl> + value , identity ) <nl> } <nl> } <nl> <nl> extension CustomPrintableValue : CustomDebugStringConvertible { <nl> public var debugDescription : String { <nl> CustomPrintableValue . timesDebugDescriptionWasCalled . value + = 1 <nl> return CustomPrintableValue . debugDescriptionImpl . value ( <nl> - value : value , identity : identity ) <nl> + value , identity ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / private / SwiftPrivateLibcExtras / Subprocess . swift <nl> ppp b / stdlib / private / SwiftPrivateLibcExtras / Subprocess . swift <nl> public func spawnChild ( _ args : [ String ] ) <nl> <nl> / / If execve ( ) encountered an error , we write the errno encountered to the <nl> / / parent write pipe . <nl> - let errnoSize = sizeof ( errno . dynamicType ) <nl> + let errnoSize = MemoryLayout . _ofInstance ( errno ) . size <nl> var execveErrno = errno <nl> let writtenBytes = withUnsafePointer ( to : & execveErrno ) { <nl> write ( childToParentPipe . writeFD , UnsafePointer ( $ 0 ) , errnoSize ) <nl> mmm a / stdlib / private / SwiftPrivateLibcExtras / SwiftPrivateLibcExtras . swift <nl> ppp b / stdlib / private / SwiftPrivateLibcExtras / SwiftPrivateLibcExtras . swift <nl> public var _stdlib_FD_SETSIZE : CInt { <nl> public struct _stdlib_fd_set { <nl> var _data : [ UInt ] <nl> static var _wordBits : Int { <nl> - return sizeof ( UInt . self ) * 8 <nl> + return MemoryLayout < UInt > . size * 8 <nl> } <nl> <nl> public init ( ) { <nl> mmm a / stdlib / private / SwiftPrivatePthreadExtras / SwiftPrivatePthreadExtras . swift <nl> ppp b / stdlib / private / SwiftPrivatePthreadExtras / SwiftPrivatePthreadExtras . swift <nl> internal class PthreadBlockContextImpl < Argument , Result > : PthreadBlockContext { <nl> let block : ( Argument ) - > Result <nl> let arg : Argument <nl> <nl> - init ( block : ( Argument ) - > Result , arg : Argument ) { <nl> + init ( block : @ escaping ( Argument ) - > Result , arg : Argument ) { <nl> self . block = block <nl> self . arg = arg <nl> super . init ( ) <nl> internal func invokeBlockContext ( <nl> / / / Block - based wrapper for ` pthread_create ` . <nl> public func _stdlib_pthread_create_block < Argument , Result > ( <nl> _ attr : UnsafePointer < pthread_attr_t > ? , <nl> - _ start_routine : ( Argument ) - > Result , <nl> + _ start_routine : @ escaping ( Argument ) - > Result , <nl> _ arg : Argument <nl> ) - > ( CInt , pthread_t ? ) { <nl> let context = PthreadBlockContextImpl ( block : start_routine , arg : arg ) <nl> mmm a / stdlib / private / SwiftReflectionTest / SwiftReflectionTest . swift <nl> ppp b / stdlib / private / SwiftReflectionTest / SwiftReflectionTest . swift <nl> internal func sendAddress ( of instance : AnyObject ) { <nl> debugLog ( " BEGIN \ ( # function ) " ) <nl> defer { debugLog ( " END \ ( # function ) " ) } <nl> var address = Unmanaged . passUnretained ( instance ) . toOpaque ( ) <nl> - sendBytes ( from : & address , count : sizeof ( UInt . self ) ) <nl> + sendBytes ( from : & address , count : MemoryLayout < UInt > . size ) <nl> } <nl> <nl> / / / Send the ` value ` ' s bits to the parent . <nl> internal func sendValue < T > ( _ value : T ) { <nl> debugLog ( " BEGIN \ ( # function ) " ) ; defer { debugLog ( " END \ ( # function ) " ) } <nl> var value = value <nl> - sendBytes ( from : & value , count : sizeof ( T . self ) ) <nl> + sendBytes ( from : & value , count : MemoryLayout < T > . size ) <nl> } <nl> <nl> / / / Read a word - sized unsigned integer from the parent . <nl> internal func readUInt ( ) - > UInt { <nl> debugLog ( " BEGIN \ ( # function ) " ) ; defer { debugLog ( " END \ ( # function ) " ) } <nl> var value : UInt = 0 <nl> - fread ( & value , sizeof ( UInt . self ) , 1 , stdin ) <nl> + fread ( & value , MemoryLayout < UInt > . size , 1 , stdin ) <nl> return value <nl> } <nl> <nl> internal func sendReflectionInfos ( ) { <nl> var numInfos = infos . count <nl> debugLog ( " \ ( numInfos ) reflection info bundles . " ) <nl> precondition ( numInfos > = 1 ) <nl> - sendBytes ( from : & numInfos , count : sizeof ( UInt . self ) ) <nl> + sendBytes ( from : & numInfos , count : MemoryLayout < UInt > . size ) <nl> for info in infos { <nl> debugLog ( " Sending info for \ ( info . imageName ) " ) <nl> for section in info { <nl> internal func sendStringLength ( ) { <nl> / / / Send the size of this architecture ' s pointer type . <nl> internal func sendPointerSize ( ) { <nl> debugLog ( " BEGIN \ ( # function ) " ) ; defer { debugLog ( " END \ ( # function ) " ) } <nl> - let pointerSize = UInt8 ( sizeof ( UnsafeRawPointer . self ) ) <nl> + let pointerSize = UInt8 ( MemoryLayout < UnsafeRawPointer > . size ) <nl> sendValue ( pointerSize ) <nl> } <nl> <nl> public func reflect ( object : AnyObject ) { <nl> / / / an Any existential . <nl> public func reflect < T > ( any : T ) { <nl> let any : Any = any <nl> - let anyPointer = UnsafeMutablePointer < Any > . allocate ( capacity : sizeof ( Any . self ) ) <nl> + let anyPointer = UnsafeMutablePointer < Any > . allocate ( capacity : MemoryLayout < Any > . size ) <nl> anyPointer . initialize ( to : any ) <nl> let anyPointerValue = unsafeBitCast ( anyPointer , to : UInt . self ) <nl> reflect ( instanceAddress : anyPointerValue , kind : . Existential ) <nl> - anyPointer . deallocate ( capacity : sizeof ( Any . self ) ) <nl> + anyPointer . deallocate ( capacity : MemoryLayout < Any > . size ) <nl> } <nl> <nl> / / Reflect an ` Error ` , a . k . a . an " error existential " . <nl> struct ThickFunctionParts { <nl> <nl> / / / Reflect a closure context . The given function must be a Swift - native <nl> / / / @ convention ( thick ) function value . <nl> - public func reflect ( function : ( ) - > ( ) ) { <nl> + public func reflect ( function : @ escaping ( ) - > ( ) ) { <nl> let fn = UnsafeMutablePointer < ThickFunction0 > . allocate ( <nl> - capacity : sizeof ( ThickFunction0 . self ) ) <nl> + capacity : MemoryLayout < ThickFunction0 > . size ) <nl> fn . initialize ( to : ThickFunction0 ( function : function ) ) <nl> <nl> let parts = unsafeBitCast ( fn , to : UnsafePointer < ThickFunctionParts > . self ) <nl> public func reflect ( function : ( ) - > ( ) ) { <nl> <nl> reflect ( instanceAddress : contextPointer , kind : . Object ) <nl> <nl> - fn . deallocate ( capacity : sizeof ( ThickFunction0 . self ) ) <nl> + fn . deallocate ( capacity : MemoryLayout < ThickFunction0 > . size ) <nl> } <nl> <nl> / / / Reflect a closure context . The given function must be a Swift - native <nl> / / / @ convention ( thick ) function value . <nl> - public func reflect ( function : ( Int ) - > ( ) ) { <nl> + public func reflect ( function : @ escaping ( Int ) - > ( ) ) { <nl> let fn = <nl> UnsafeMutablePointer < ThickFunction1 > . allocate ( <nl> - capacity : sizeof ( ThickFunction1 . self ) ) <nl> + capacity : MemoryLayout < ThickFunction1 > . size ) <nl> fn . initialize ( to : ThickFunction1 ( function : function ) ) <nl> <nl> let parts = unsafeBitCast ( fn , to : UnsafePointer < ThickFunctionParts > . self ) <nl> public func reflect ( function : ( Int ) - > ( ) ) { <nl> <nl> reflect ( instanceAddress : contextPointer , kind : . Object ) <nl> <nl> - fn . deallocate ( capacity : sizeof ( ThickFunction1 . self ) ) <nl> + fn . deallocate ( capacity : MemoryLayout < ThickFunction1 > . size ) <nl> } <nl> <nl> / / / Reflect a closure context . The given function must be a Swift - native <nl> / / / @ convention ( thick ) function value . <nl> - public func reflect ( function : ( Int , String ) - > ( ) ) { <nl> + public func reflect ( function : @ escaping ( Int , String ) - > ( ) ) { <nl> let fn = UnsafeMutablePointer < ThickFunction2 > . allocate ( <nl> - capacity : sizeof ( ThickFunction2 . self ) ) <nl> + capacity : MemoryLayout < ThickFunction2 > . size ) <nl> fn . initialize ( to : ThickFunction2 ( function : function ) ) <nl> <nl> let parts = unsafeBitCast ( fn , to : UnsafePointer < ThickFunctionParts > . self ) <nl> public func reflect ( function : ( Int , String ) - > ( ) ) { <nl> <nl> reflect ( instanceAddress : contextPointer , kind : . Object ) <nl> <nl> - fn . deallocate ( capacity : sizeof ( ThickFunction2 . self ) ) <nl> + fn . deallocate ( capacity : MemoryLayout < ThickFunction2 > . size ) <nl> } <nl> <nl> / / / Reflect a closure context . The given function must be a Swift - native <nl> / / / @ convention ( thick ) function value . <nl> - public func reflect ( function : ( Int , String , AnyObject ? ) - > ( ) ) { <nl> + public func reflect ( function : @ escaping ( Int , String , AnyObject ? ) - > ( ) ) { <nl> let fn = UnsafeMutablePointer < ThickFunction3 > . allocate ( <nl> - capacity : sizeof ( ThickFunction3 . self ) ) <nl> + capacity : MemoryLayout < ThickFunction3 > . size ) <nl> fn . initialize ( to : ThickFunction3 ( function : function ) ) <nl> <nl> let parts = unsafeBitCast ( fn , to : UnsafePointer < ThickFunctionParts > . self ) <nl> public func reflect ( function : ( Int , String , AnyObject ? ) - > ( ) ) { <nl> <nl> reflect ( instanceAddress : contextPointer , kind : . Object ) <nl> <nl> - fn . deallocate ( capacity : sizeof ( ThickFunction3 . self ) ) <nl> + fn . deallocate ( capacity : MemoryLayout < ThickFunction3 > . size ) <nl> } <nl> <nl> / / / Call this function to indicate to the parent that there are <nl> mmm a / stdlib / public / SDK / CMakeLists . txt <nl> ppp b / stdlib / public / SDK / CMakeLists . txt <nl> add_subdirectory ( CoreGraphics ) <nl> add_subdirectory ( CoreImage ) <nl> add_subdirectory ( CoreLocation ) <nl> add_subdirectory ( CoreMedia ) <nl> + add_subdirectory ( CryptoTokenKit ) <nl> add_subdirectory ( Dispatch ) <nl> add_subdirectory ( Foundation ) <nl> add_subdirectory ( GameplayKit ) <nl> mmm a / stdlib / public / SDK / CoreAudio / CoreAudio . swift <nl> ppp b / stdlib / public / SDK / CoreAudio / CoreAudio . swift <nl> extension UnsafeBufferPointer { <nl> / / / Initialize an ` UnsafeBufferPointer < Element > ` from an ` AudioBuffer ` . <nl> / / / Binds the the buffer ' s memory type to ` Element ` . <nl> public init ( _ audioBuffer : AudioBuffer ) { <nl> - let count = Int ( audioBuffer . mDataByteSize ) / strideof ( Element . self ) <nl> + let count = Int ( audioBuffer . mDataByteSize ) / MemoryLayout < Element > . stride <nl> let elementPtr = audioBuffer . mData ? . bindMemory ( <nl> to : Element . self , capacity : count ) <nl> self . init ( start : elementPtr , count : count ) <nl> extension UnsafeMutableBufferPointer { <nl> / / / Initialize an ` UnsafeMutableBufferPointer < Element > ` from an <nl> / / / ` AudioBuffer ` . <nl> public init ( _ audioBuffer : AudioBuffer ) { <nl> - let count = Int ( audioBuffer . mDataByteSize ) / strideof ( Element . self ) <nl> + let count = Int ( audioBuffer . mDataByteSize ) / MemoryLayout < Element > . stride <nl> let elementPtr = audioBuffer . mData ? . bindMemory ( <nl> to : Element . self , capacity : count ) <nl> self . init ( start : elementPtr , count : count ) <nl> extension AudioBuffer { <nl> ) { <nl> self . mNumberChannels = UInt32 ( numberOfChannels ) <nl> self . mData = UnsafeMutableRawPointer ( typedBuffer . baseAddress ) <nl> - self . mDataByteSize = UInt32 ( typedBuffer . count * strideof ( Element . self ) ) <nl> + self . mDataByteSize = UInt32 ( typedBuffer . count * MemoryLayout < Element > . stride ) <nl> } <nl> } <nl> <nl> extension AudioBufferList { <nl> public static func sizeInBytes ( maximumBuffers : Int ) - > Int { <nl> _precondition ( maximumBuffers > = 1 , <nl> " AudioBufferList should contain at least one AudioBuffer " ) <nl> - return sizeof ( AudioBufferList . self ) + <nl> - ( maximumBuffers - 1 ) * strideof ( AudioBuffer . self ) <nl> + return MemoryLayout < AudioBufferList > . size + <nl> + ( maximumBuffers - 1 ) * MemoryLayout < AudioBuffer > . stride <nl> } <nl> <nl> / / / Allocate an ` AudioBufferList ` with a capacity for the specified number of <nl> extension AudioBufferList { <nl> " failed to allocate memory for an AudioBufferList " ) <nl> <nl> let listPtr = ablMemory ! . bindMemory ( to : AudioBufferList . self , capacity : 1 ) <nl> - ( ablMemory ! + strideof ( AudioBufferList . self ) ) . bindMemory ( <nl> + ( ablMemory ! + MemoryLayout < AudioBufferList > . stride ) . bindMemory ( <nl> to : AudioBuffer . self , capacity : maximumBuffers ) <nl> let abl = UnsafeMutableAudioBufferListPointer ( listPtr ) <nl> abl . count = maximumBuffers <nl> new file mode 100644 <nl> index 000000000000 . . c02ce91f6b54 <nl> mmm / dev / null <nl> ppp b / stdlib / public / SDK / CryptoTokenKit / CMakeLists . txt <nl> <nl> + add_swift_library ( swiftCryptoTokenKit $ { SWIFT_SDK_OVERLAY_LIBRARY_BUILD_TYPES } IS_SDK_OVERLAY <nl> + CryptoTokenKit . swift <nl> + <nl> + TARGET_SDKS OSX <nl> + SWIFT_MODULE_DEPENDS Foundation <nl> + FRAMEWORK_DEPENDS CryptoTokenKit ) <nl> new file mode 100644 <nl> index 000000000000 . . 34e2f425af17 <nl> mmm / dev / null <nl> ppp b / stdlib / public / SDK / CryptoTokenKit / CryptoTokenKit . swift <nl> <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2016 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See http : / / swift . org / LICENSE . txt for license information <nl> + / / See http : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + @ _exported import CryptoTokenKit <nl> + <nl> + import Foundation <nl> + <nl> + @ available ( OSX 10 . 10 , * ) <nl> + extension TKSmartCard { <nl> + public func send ( ins : UInt8 , p1 : UInt8 , p2 : UInt8 , data : Data ? = nil , <nl> + le : Int ? = nil , reply : @ escaping ( Data ? , UInt16 , Error ? ) - > Void ) { <nl> + <nl> + self . __sendIns ( ins , p1 : p1 , p2 : p2 , data : data , <nl> + le : le . map { NSNumber ( value : $ 0 ) } , reply : reply ) <nl> + } <nl> + <nl> + @ available ( OSX 10 . 12 , * ) <nl> + public func send ( ins : UInt8 , p1 : UInt8 , p2 : UInt8 , data : Data ? = nil , <nl> + le : Int ? = nil ) throws - > ( sw : UInt16 , response : Data ) { <nl> + <nl> + var sw : UInt16 = 0 <nl> + let response = try self . __sendIns ( ins , p1 : p1 , p2 : p2 , data : data , <nl> + le : le . map { NSNumber ( value : $ 0 ) } , sw : & sw ) <nl> + return ( sw : sw , response : response ) <nl> + } <nl> + <nl> + @ available ( OSX 10 . 12 , * ) <nl> + public func withSession < T > ( _ body : @ escaping ( ) throws - > T ) throws - > T { <nl> + var result : T ? = nil <nl> + try self . __inSession ( executeBlock : { <nl> + ( errorPointer : NSErrorPointer ) - > Bool in <nl> + do { <nl> + result = try body ( ) <nl> + return true <nl> + } catch let error as NSError { <nl> + errorPointer ? . pointee = error <nl> + return false <nl> + } <nl> + } ) <nl> + <nl> + / / it is safe to force unwrap the result here , as the self . __inSession <nl> + / / function rethrows the errors which happened inside the block <nl> + return result ! <nl> + } <nl> + } <nl> mmm a / stdlib / public / SDK / Dispatch / Block . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Block . swift <nl> public struct DispatchWorkItemFlags : OptionSet , RawRepresentable { <nl> public class DispatchWorkItem { <nl> internal var _block : _DispatchBlock <nl> <nl> - public init ( qos : DispatchQoS = . unspecified , flags : DispatchWorkItemFlags = [ ] , block : @ convention ( block ) ( ) - > ( ) ) { <nl> + public init ( qos : DispatchQoS = . unspecified , flags : DispatchWorkItemFlags = [ ] , block : @ escaping @ convention ( block ) ( ) - > ( ) ) { <nl> _block = _swift_dispatch_block_create_with_qos_class ( flags . rawValue , <nl> qos . qosClass . rawValue . rawValue , Int32 ( qos . relativePriority ) , block ) <nl> } <nl> public class DispatchWorkItem { <nl> qos : DispatchQoS = . unspecified , <nl> flags : DispatchWorkItemFlags = [ ] , <nl> queue : DispatchQueue , <nl> - execute : @ convention ( block ) ( ) - > Void ) <nl> + execute : @ escaping @ convention ( block ) ( ) - > Void ) <nl> { <nl> if qos ! = . unspecified | | ! flags . isEmpty { <nl> let item = DispatchWorkItem ( qos : qos , flags : flags , block : execute ) <nl> mmm a / stdlib / public / SDK / Dispatch / Data . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Data . swift <nl> public struct DispatchData : RandomAccessCollection , _ObjectiveCBridgeable { <nl> var size = 0 <nl> let data = __dispatch_data_create_map ( __wrapped , & ptr , & size ) <nl> let contentPtr = ptr ! . bindMemory ( <nl> - to : ContentType . self , capacity : size / strideof ( ContentType . self ) ) <nl> + to : ContentType . self , capacity : size / MemoryLayout < ContentType > . stride ) <nl> defer { _fixLifetime ( data ) } <nl> return try body ( contentPtr ) <nl> } <nl> <nl> public func enumerateBytes ( <nl> - block : @ noescape ( buffer : UnsafeBufferPointer < UInt8 > , byteIndex : Int , stop : inout Bool ) - > Void ) <nl> + block : @ noescape ( _ buffer : UnsafeBufferPointer < UInt8 > , _ byteIndex : Int , _ stop : inout Bool ) - > Void ) <nl> { <nl> - _swift_dispatch_data_apply ( __wrapped ) { ( data : __DispatchData , offset : Int , ptr : UnsafeRawPointer , size : Int ) in <nl> + _swift_dispatch_data_apply ( __wrapped ) { ( _ , offset : Int , ptr : UnsafeRawPointer , size : Int ) in <nl> let bytePtr = ptr . bindMemory ( to : UInt8 . self , capacity : size ) <nl> let bp = UnsafeBufferPointer ( start : bytePtr , count : size ) <nl> var stop = false <nl> - block ( buffer : bp , byteIndex : offset , stop : & stop ) <nl> - return ! stop <nl> + block ( bp , offset , & stop ) <nl> + return stop ? 0 : 1 <nl> } <nl> } <nl> <nl> public struct DispatchData : RandomAccessCollection , _ObjectiveCBridgeable { <nl> / / / <nl> / / / - parameter buffer : The buffer of bytes to append . The size is calculated from ` SourceType ` and ` buffer . count ` . <nl> public mutating func append < SourceType > ( _ buffer : UnsafeBufferPointer < SourceType > ) { <nl> - buffer . baseAddress ! . withMemoryRebound ( to : UInt8 . self , capacity : buffer . count * strideof ( SourceType . self ) ) { <nl> - self . append ( $ 0 , count : buffer . count * sizeof ( SourceType . self ) ) <nl> + buffer . baseAddress ! . withMemoryRebound ( to : UInt8 . self , capacity : buffer . count * MemoryLayout < SourceType > . stride ) { <nl> + self . append ( $ 0 , count : buffer . count * MemoryLayout < SourceType > . stride ) <nl> } <nl> } <nl> <nl> public struct DispatchData : RandomAccessCollection , _ObjectiveCBridgeable { <nl> <nl> / / / Copy the contents of the data into a buffer . <nl> / / / <nl> - / / / This function copies the bytes in ` range ` from the data into the buffer . If the count of the ` range ` is greater than ` sizeof ( DestinationType ) * buffer . count ` then the first N bytes will be copied into the buffer . <nl> + / / / This function copies the bytes in ` range ` from the data into the buffer . If the count of the ` range ` is greater than ` MemoryLayout < DestinationType > . size * buffer . count ` then the first N bytes will be copied into the buffer . <nl> / / / - precondition : The range must be within the bounds of the data . Otherwise ` fatalError ` is called . <nl> / / / - parameter buffer : A buffer to copy the data into . <nl> / / / - parameter range : A range in the data to copy into the buffer . If the range is empty , this function will return 0 without copying anything . If the range is nil , as much data as will fit into ` buffer ` is copied . <nl> public struct DispatchData : RandomAccessCollection , _ObjectiveCBridgeable { <nl> precondition ( r . endIndex > = 0 ) <nl> precondition ( r . endIndex < = cnt , " The range is outside the bounds of the data " ) <nl> <nl> - copyRange = r . startIndex . . < ( r . startIndex + Swift . min ( buffer . count * sizeof ( DestinationType . self ) , r . count ) ) <nl> + copyRange = r . startIndex . . < ( r . startIndex + Swift . min ( buffer . count * MemoryLayout < DestinationType > . stride , r . count ) ) <nl> } else { <nl> - copyRange = 0 . . < Swift . min ( buffer . count * sizeof ( DestinationType . self ) , cnt ) <nl> + copyRange = 0 . . < Swift . min ( buffer . count * MemoryLayout < DestinationType > . stride , cnt ) <nl> } <nl> <nl> guard ! copyRange . isEmpty else { return 0 } <nl> extension DispatchData { <nl> } <nl> } <nl> <nl> - typealias _swift_data_applier = @ convention ( block ) @ noescape ( __DispatchData , Int , UnsafeRawPointer , Int ) - > Bool <nl> - <nl> - @ _silgen_name ( " _swift_dispatch_data_apply " ) <nl> - internal func _swift_dispatch_data_apply ( _ data : __DispatchData , _ block : _swift_data_applier ) <nl> - <nl> @ _silgen_name ( " _swift_dispatch_data_empty " ) <nl> internal func _swift_dispatch_data_empty ( ) - > __DispatchData <nl> <nl> mmm a / stdlib / public / SDK / Dispatch / Dispatch . mm <nl> ppp b / stdlib / public / SDK / Dispatch / Dispatch . mm <nl> static void _dispatch_overlay_constructor ( ) { <nl> return _dispatch_data_destructor_munmap ; <nl> } <nl> <nl> - SWIFT_CC ( swift ) DISPATCH_RUNTIME_STDLIB_INTERFACE <nl> - extern " C " bool <nl> - _swift_dispatch_data_apply ( dispatch_data_t data , bool ( ^ applier ) ( dispatch_data_t , size_t , const void * , size_t ) ) { <nl> - return dispatch_data_apply ( data , applier ) ; <nl> - } <nl> - <nl> - / / DISPATCH_RUNTIME_STDLIB_INTERFACE <nl> - / / extern " C " dispatch_queue_t <nl> - / / _swift_apply_current_root_queue ( ) { <nl> - / / return DISPATCH_APPLY_CURRENT_ROOT_QUEUE ; <nl> - / / } <nl> - <nl> # define SOURCE ( t ) \ <nl> SWIFT_CC ( swift ) \ <nl> DISPATCH_RUNTIME_STDLIB_INTERFACE extern " C " dispatch_source_type_t \ <nl> mmm a / stdlib / public / SDK / Dispatch / Dispatch . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Dispatch . swift <nl> public enum DispatchTimeoutResult { <nl> / / / dispatch_group <nl> <nl> public extension DispatchGroup { <nl> - public func notify ( qos : DispatchQoS = . unspecified , flags : DispatchWorkItemFlags = [ ] , queue : DispatchQueue , execute work : @ convention ( block ) ( ) - > ( ) ) { <nl> + public func notify ( qos : DispatchQoS = . unspecified , flags : DispatchWorkItemFlags = [ ] , queue : DispatchQueue , execute work : @ escaping @ convention ( block ) ( ) - > ( ) ) { <nl> if # available ( OSX 10 . 10 , iOS 8 . 0 , * ) , qos ! = . unspecified | | ! flags . isEmpty { <nl> let item = DispatchWorkItem ( qos : qos , flags : flags , block : work ) <nl> __dispatch_group_notify ( self , queue , item . _block ) <nl> mmm a / stdlib / public / SDK / Dispatch / IO . swift <nl> ppp b / stdlib / public / SDK / Dispatch / IO . swift <nl> public extension DispatchIO { <nl> public static let strictInterval = IntervalFlags ( rawValue : 1 ) <nl> } <nl> <nl> - public class func read ( fromFileDescriptor : Int32 , maxLength : Int , runningHandlerOn queue : DispatchQueue , handler : ( data : DispatchData , error : Int32 ) - > Void ) { <nl> + public class func read ( fromFileDescriptor : Int32 , maxLength : Int , runningHandlerOn queue : DispatchQueue , handler : @ escaping ( _ data : DispatchData , _ error : Int32 ) - > Void ) { <nl> __dispatch_read ( fromFileDescriptor , maxLength , queue ) { ( data : __DispatchData , error : Int32 ) in <nl> - handler ( data : DispatchData ( data : data ) , error : error ) <nl> + handler ( DispatchData ( data : data ) , error ) <nl> } <nl> } <nl> <nl> - public class func write ( toFileDescriptor : Int32 , data : DispatchData , runningHandlerOn queue : DispatchQueue , handler : ( data : DispatchData ? , error : Int32 ) - > Void ) { <nl> + public class func write ( toFileDescriptor : Int32 , data : DispatchData , runningHandlerOn queue : DispatchQueue , handler : @ escaping ( _ data : DispatchData ? , _ error : Int32 ) - > Void ) { <nl> __dispatch_write ( toFileDescriptor , data as __DispatchData , queue ) { ( data : __DispatchData ? , error : Int32 ) in <nl> - handler ( data : data . flatMap { DispatchData ( data : $ 0 ) } , error : error ) <nl> + handler ( data . flatMap { DispatchData ( data : $ 0 ) } , error ) <nl> } <nl> } <nl> <nl> public extension DispatchIO { <nl> type : StreamType , <nl> fileDescriptor : Int32 , <nl> queue : DispatchQueue , <nl> - cleanupHandler : ( error : Int32 ) - > Void ) <nl> + cleanupHandler : @ escaping ( _ error : Int32 ) - > Void ) <nl> { <nl> self . init ( __type : type . rawValue , fd : fileDescriptor , queue : queue , handler : cleanupHandler ) <nl> } <nl> public extension DispatchIO { <nl> oflag : Int32 , <nl> mode : mode_t , <nl> queue : DispatchQueue , <nl> - cleanupHandler : ( error : Int32 ) - > Void ) <nl> + cleanupHandler : @ escaping ( _ error : Int32 ) - > Void ) <nl> { <nl> self . init ( __type : type . rawValue , path : path , oflag : oflag , mode : mode , queue : queue , handler : cleanupHandler ) <nl> } <nl> public extension DispatchIO { <nl> type : StreamType , <nl> io : DispatchIO , <nl> queue : DispatchQueue , <nl> - cleanupHandler : ( error : Int32 ) - > Void ) <nl> + cleanupHandler : @ escaping ( _ error : Int32 ) - > Void ) <nl> { <nl> self . init ( __type : type . rawValue , io : io , queue : queue , handler : cleanupHandler ) <nl> } <nl> <nl> - public func read ( offset : off_t , length : Int , queue : DispatchQueue , ioHandler : ( done : Bool , data : DispatchData ? , error : Int32 ) - > Void ) { <nl> + public func read ( offset : off_t , length : Int , queue : DispatchQueue , ioHandler : @ escaping ( _ done : Bool , _ data : DispatchData ? , _ error : Int32 ) - > Void ) { <nl> __dispatch_io_read ( self , offset , length , queue ) { ( done : Bool , data : __DispatchData ? , error : Int32 ) in <nl> - ioHandler ( done : done , data : data . flatMap { DispatchData ( data : $ 0 ) } , error : error ) <nl> + ioHandler ( done , data . flatMap { DispatchData ( data : $ 0 ) } , error ) <nl> } <nl> } <nl> <nl> - public func write ( offset : off_t , data : DispatchData , queue : DispatchQueue , ioHandler : ( done : Bool , data : DispatchData ? , error : Int32 ) - > Void ) { <nl> + public func write ( offset : off_t , data : DispatchData , queue : DispatchQueue , ioHandler : @ escaping ( _ done : Bool , _ data : DispatchData ? , _ error : Int32 ) - > Void ) { <nl> __dispatch_io_write ( self , offset , data as __DispatchData , queue ) { ( done : Bool , data : __DispatchData ? , error : Int32 ) in <nl> - ioHandler ( done : done , data : data . flatMap { DispatchData ( data : $ 0 ) } , error : error ) <nl> + ioHandler ( done , data . flatMap { DispatchData ( data : $ 0 ) } , error ) <nl> } <nl> } <nl> <nl> public func setInterval ( interval : DispatchTimeInterval , flags : IntervalFlags = [ ] ) { <nl> - __dispatch_io_set_interval ( self , interval . rawValue , flags . rawValue ) <nl> + __dispatch_io_set_interval ( self , UInt64 ( interval . rawValue ) , flags . rawValue ) <nl> } <nl> <nl> public func close ( flags : CloseFlags = [ ] ) { <nl> mmm a / stdlib / public / SDK / Dispatch / Queue . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Queue . swift <nl> public extension DispatchQueue { <nl> <nl> public enum GlobalQueuePriority { <nl> @ available ( OSX , deprecated : 10 . 10 , message : " Use qos attributes instead " ) <nl> - @ available ( * , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( iOS , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( tvOS , deprecated , message : " Use qos attributes instead " ) <nl> + @ available ( watchOS , deprecated , message : " Use qos attributes instead " ) <nl> case high <nl> <nl> @ available ( OSX , deprecated : 10 . 10 , message : " Use qos attributes instead " ) <nl> - @ available ( * , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( iOS , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( tvOS , deprecated , message : " Use qos attributes instead " ) <nl> + @ available ( watchOS , deprecated , message : " Use qos attributes instead " ) <nl> case ` default ` <nl> <nl> @ available ( OSX , deprecated : 10 . 10 , message : " Use qos attributes instead " ) <nl> - @ available ( * , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( iOS , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( tvOS , deprecated , message : " Use qos attributes instead " ) <nl> + @ available ( watchOS , deprecated , message : " Use qos attributes instead " ) <nl> case low <nl> <nl> @ available ( OSX , deprecated : 10 . 10 , message : " Use qos attributes instead " ) <nl> - @ available ( * , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( iOS , deprecated : 8 . 0 , message : " Use qos attributes instead " ) <nl> + @ available ( tvOS , deprecated , message : " Use qos attributes instead " ) <nl> + @ available ( watchOS , deprecated , message : " Use qos attributes instead " ) <nl> case background <nl> <nl> internal var _translatedValue : Int { <nl> public extension DispatchQueue { <nl> return _swift_dispatch_get_main_queue ( ) <nl> } <nl> <nl> - @ available ( OSX , deprecated : 10 . 10 , message : " " ) <nl> - @ available ( * , deprecated : 8 . 0 , message : " " ) <nl> + @ available ( OSX , deprecated : 10 . 10 ) <nl> + @ available ( iOS , deprecated : 8 . 0 ) <nl> + @ available ( tvOS , deprecated ) <nl> + @ available ( watchOS , deprecated ) <nl> public class func global ( priority : GlobalQueuePriority ) - > DispatchQueue { <nl> return __dispatch_get_global_queue ( priority . _translatedValue , 0 ) <nl> } <nl> public extension DispatchQueue { <nl> group : DispatchGroup ? = nil , <nl> qos : DispatchQoS = . unspecified , <nl> flags : DispatchWorkItemFlags = [ ] , <nl> - execute work : @ convention ( block ) ( ) - > Void ) <nl> + execute work : @ escaping @ convention ( block ) ( ) - > Void ) <nl> { <nl> if group = = nil & & qos = = . unspecified & & flags . isEmpty { <nl> / / Fast - path route for the most common API usage <nl> public extension DispatchQueue { <nl> deadline : DispatchTime , <nl> qos : DispatchQoS = . unspecified , <nl> flags : DispatchWorkItemFlags = [ ] , <nl> - execute work : @ convention ( block ) ( ) - > Void ) <nl> + execute work : @ escaping @ convention ( block ) ( ) - > Void ) <nl> { <nl> if # available ( OSX 10 . 10 , iOS 8 . 0 , * ) , qos ! = . unspecified | | ! flags . isEmpty { <nl> let item = DispatchWorkItem ( qos : qos , flags : flags , block : work ) <nl> public extension DispatchQueue { <nl> wallDeadline : DispatchWallTime , <nl> qos : DispatchQoS = . unspecified , <nl> flags : DispatchWorkItemFlags = [ ] , <nl> - execute work : @ convention ( block ) ( ) - > Void ) <nl> + execute work : @ escaping @ convention ( block ) ( ) - > Void ) <nl> { <nl> if # available ( OSX 10 . 10 , iOS 8 . 0 , * ) , qos ! = . unspecified | | ! flags . isEmpty { <nl> let item = DispatchWorkItem ( qos : qos , flags : flags , block : work ) <nl> internal func _swift_dispatch_queue_concurrent ( ) - > __OS_dispatch_queue_attr <nl> <nl> @ _silgen_name ( " _swift_dispatch_get_main_queue " ) <nl> internal func _swift_dispatch_get_main_queue ( ) - > DispatchQueue <nl> - <nl> - @ _silgen_name ( " _swift_dispatch_apply_current_root_queue " ) <nl> - internal func _swift_dispatch_apply_current_root_queue ( ) - > DispatchQueue <nl> - <nl> mmm a / stdlib / public / SDK / Dispatch / Source . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Source . swift <nl> public extension DispatchSourceProcess { <nl> <nl> public extension DispatchSourceTimer { <nl> public func scheduleOneshot ( deadline : DispatchTime , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> - __dispatch_source_set_timer ( self as ! DispatchSource , deadline . rawValue , ~ 0 , UInt64 ( leeway . rawValue ) ) <nl> + __dispatch_source_set_timer ( self as ! DispatchSource , UInt64 ( deadline . rawValue ) , ~ 0 , UInt64 ( leeway . rawValue ) ) <nl> } <nl> <nl> public func scheduleOneshot ( wallDeadline : DispatchWallTime , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> - __dispatch_source_set_timer ( self as ! DispatchSource , wallDeadline . rawValue , ~ 0 , UInt64 ( leeway . rawValue ) ) <nl> + __dispatch_source_set_timer ( self as ! DispatchSource , UInt64 ( wallDeadline . rawValue ) , ~ 0 , UInt64 ( leeway . rawValue ) ) <nl> } <nl> <nl> public func scheduleRepeating ( deadline : DispatchTime , interval : DispatchTimeInterval , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> - __dispatch_source_set_timer ( self as ! DispatchSource , deadline . rawValue , interval . rawValue , UInt64 ( leeway . rawValue ) ) <nl> + __dispatch_source_set_timer ( self as ! DispatchSource , deadline . rawValue , UInt64 ( interval . rawValue ) , UInt64 ( leeway . rawValue ) ) <nl> } <nl> <nl> public func scheduleRepeating ( deadline : DispatchTime , interval : Double , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> public extension DispatchSourceTimer { <nl> } <nl> <nl> public func scheduleRepeating ( wallDeadline : DispatchWallTime , interval : DispatchTimeInterval , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> - __dispatch_source_set_timer ( self as ! DispatchSource , wallDeadline . rawValue , interval . rawValue , UInt64 ( leeway . rawValue ) ) <nl> + __dispatch_source_set_timer ( self as ! DispatchSource , wallDeadline . rawValue , UInt64 ( interval . rawValue ) , UInt64 ( leeway . rawValue ) ) <nl> } <nl> <nl> public func scheduleRepeating ( wallDeadline : DispatchWallTime , interval : Double , leeway : DispatchTimeInterval = . nanoseconds ( 0 ) ) { <nl> mmm a / stdlib / public / SDK / Dispatch / Time . swift <nl> ppp b / stdlib / public / SDK / Dispatch / Time . swift <nl> public enum DispatchTimeInterval { <nl> case microseconds ( Int ) <nl> case nanoseconds ( Int ) <nl> <nl> - internal var rawValue : UInt64 { <nl> + internal var rawValue : Int64 { <nl> switch self { <nl> - case . seconds ( let s ) : return UInt64 ( s ) * NSEC_PER_SEC <nl> - case . milliseconds ( let ms ) : return UInt64 ( ms ) * NSEC_PER_MSEC <nl> - case . microseconds ( let us ) : return UInt64 ( us ) * NSEC_PER_USEC <nl> - case . nanoseconds ( let ns ) : return UInt64 ( ns ) <nl> + case . seconds ( let s ) : return Int64 ( s ) * Int64 ( NSEC_PER_SEC ) <nl> + case . milliseconds ( let ms ) : return Int64 ( ms ) * Int64 ( NSEC_PER_MSEC ) <nl> + case . microseconds ( let us ) : return Int64 ( us ) * Int64 ( NSEC_PER_USEC ) <nl> + case . nanoseconds ( let ns ) : return Int64 ( ns ) <nl> } <nl> } <nl> } <nl> <nl> public func + ( time : DispatchTime , interval : DispatchTimeInterval ) - > DispatchTime { <nl> - let t = __dispatch_time ( time . rawValue , Int64 ( interval . rawValue ) ) <nl> + let t = __dispatch_time ( time . rawValue , interval . rawValue ) <nl> return DispatchTime ( rawValue : t ) <nl> } <nl> <nl> public func - ( time : DispatchTime , interval : DispatchTimeInterval ) - > DispatchTime { <nl> - let t = __dispatch_time ( time . rawValue , - Int64 ( interval . rawValue ) ) <nl> + let t = __dispatch_time ( time . rawValue , - interval . rawValue ) <nl> return DispatchTime ( rawValue : t ) <nl> } <nl> <nl> public func - ( time : DispatchTime , seconds : Double ) - > DispatchTime { <nl> } <nl> <nl> public func + ( time : DispatchWallTime , interval : DispatchTimeInterval ) - > DispatchWallTime { <nl> - let t = __dispatch_time ( time . rawValue , Int64 ( interval . rawValue ) ) <nl> + let t = __dispatch_time ( time . rawValue , interval . rawValue ) <nl> return DispatchWallTime ( rawValue : t ) <nl> } <nl> <nl> public func - ( time : DispatchWallTime , interval : DispatchTimeInterval ) - > DispatchWallTime { <nl> - let t = __dispatch_time ( time . rawValue , - Int64 ( interval . rawValue ) ) <nl> + let t = __dispatch_time ( time . rawValue , - interval . rawValue ) <nl> return DispatchWallTime ( rawValue : t ) <nl> } <nl> <nl> mmm a / stdlib / public / SDK / Foundation / Calendar . swift <nl> ppp b / stdlib / public / SDK / Foundation / Calendar . swift <nl> public struct Calendar : Hashable , Equatable , ReferenceConvertible , _MutableBoxi <nl> / / / - parameter direction : Which direction in time to search . The default value is ` . forward ` , which means later in time . <nl> / / / - parameter block : A closure that is called with search results . <nl> @ available ( iOS 8 . 0 , * ) <nl> - public func enumerateDates ( startingAfter start : Date , matching components : DateComponents , matchingPolicy : MatchingPolicy , repeatedTimePolicy : RepeatedTimePolicy = . first , direction : SearchDirection = . forward , using block : @ noescape ( result : Date ? , exactMatch : Bool , stop : inout Bool ) - > Void ) { <nl> + public func enumerateDates ( startingAfter start : Date , matching components : DateComponents , matchingPolicy : MatchingPolicy , repeatedTimePolicy : RepeatedTimePolicy = . first , direction : SearchDirection = . forward , using block : @ noescape ( _ result : Date ? , _ exactMatch : Bool , _ stop : inout Bool ) - > Void ) { <nl> _handle . map { <nl> $ 0 . enumerateDates ( startingAfter : start , matching : components , options : Calendar . _toCalendarOptions ( matchingPolicy : matchingPolicy , repeatedTimePolicy : repeatedTimePolicy , direction : direction ) ) { ( result , exactMatch , stop ) in <nl> var stopv = false <nl> - block ( result : result , exactMatch : exactMatch , stop : & stopv ) <nl> + block ( result , exactMatch , & stopv ) <nl> if stopv { <nl> stop . pointee = true <nl> } <nl> mmm a / stdlib / public / SDK / Foundation / Data . swift <nl> ppp b / stdlib / public / SDK / Foundation / Data . swift <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> / / / <nl> / / / - parameter buffer : A buffer pointer to copy . The size is calculated from ` SourceType ` and ` buffer . count ` . <nl> public init < SourceType > ( buffer : UnsafeBufferPointer < SourceType > ) { <nl> - _wrapped = _SwiftNSData ( immutableObject : NSData ( bytes : buffer . baseAddress , length : strideof ( SourceType . self ) * buffer . count ) ) <nl> + _wrapped = _SwiftNSData ( immutableObject : NSData ( bytes : buffer . baseAddress , length : MemoryLayout < SourceType > . stride * buffer . count ) ) <nl> } <nl> <nl> / / / Initialize a ` Data ` with copied memory content . <nl> / / / <nl> / / / - parameter buffer : A buffer pointer to copy . The size is calculated from ` SourceType ` and ` buffer . count ` . <nl> public init < SourceType > ( buffer : UnsafeMutableBufferPointer < SourceType > ) { <nl> - _wrapped = _SwiftNSData ( immutableObject : NSData ( bytes : UnsafePointer ( buffer . baseAddress ) , length : strideof ( SourceType . self ) * buffer . count ) ) <nl> + _wrapped = _SwiftNSData ( immutableObject : NSData ( bytes : UnsafePointer ( buffer . baseAddress ) , length : MemoryLayout < SourceType > . stride * buffer . count ) ) <nl> } <nl> <nl> / / / Initialize a ` Data ` with the contents of an Array . <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> public func withUnsafeBytes < ResultType , ContentType > ( _ body : @ noescape ( UnsafePointer < ContentType > ) throws - > ResultType ) rethrows - > ResultType { <nl> let bytes = _getUnsafeBytesPointer ( ) <nl> defer { _fixLifetime ( self ) } <nl> - let contentPtr = bytes . bindMemory ( to : ContentType . self , capacity : count / strideof ( ContentType . self ) ) <nl> + let contentPtr = bytes . bindMemory ( to : ContentType . self , capacity : count / MemoryLayout < ContentType > . stride ) <nl> return try body ( contentPtr ) <nl> } <nl> <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> public mutating func withUnsafeMutableBytes < ResultType , ContentType > ( _ body : @ noescape ( UnsafeMutablePointer < ContentType > ) throws - > ResultType ) rethrows - > ResultType { <nl> let mutableBytes = _getUnsafeMutableBytesPointer ( ) <nl> defer { _fixLifetime ( self ) } <nl> - let contentPtr = mutableBytes . bindMemory ( to : ContentType . self , capacity : count / strideof ( ContentType . self ) ) <nl> + let contentPtr = mutableBytes . bindMemory ( to : ContentType . self , capacity : count / MemoryLayout < ContentType > . stride ) <nl> return try body ( UnsafeMutablePointer ( contentPtr ) ) <nl> } <nl> <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> <nl> / / / Copy the contents of the data into a buffer . <nl> / / / <nl> - / / / This function copies the bytes in ` range ` from the data into the buffer . If the count of the ` range ` is greater than ` strideof ( DestinationType ) * buffer . count ` then the first N bytes will be copied into the buffer . <nl> + / / / This function copies the bytes in ` range ` from the data into the buffer . If the count of the ` range ` is greater than ` MemoryLayout < DestinationType > . stride * buffer . count ` then the first N bytes will be copied into the buffer . <nl> / / / - precondition : The range must be within the bounds of the data . Otherwise ` fatalError ` is called . <nl> / / / - parameter buffer : A buffer to copy the data into . <nl> / / / - parameter range : A range in the data to copy into the buffer . If the range is empty , this function will return 0 without copying anything . If the range is nil , as much data as will fit into ` buffer ` is copied . <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> precondition ( r . upperBound > = 0 ) <nl> precondition ( r . upperBound < = cnt , " The range is outside the bounds of the data " ) <nl> <nl> - copyRange = r . lowerBound . . < ( r . lowerBound + Swift . min ( buffer . count * strideof ( DestinationType . self ) , r . count ) ) <nl> + copyRange = r . lowerBound . . < ( r . lowerBound + Swift . min ( buffer . count * MemoryLayout < DestinationType > . stride , r . count ) ) <nl> } else { <nl> - copyRange = 0 . . < Swift . min ( buffer . count * strideof ( DestinationType . self ) , cnt ) <nl> + copyRange = 0 . . < Swift . min ( buffer . count * MemoryLayout < DestinationType > . stride , cnt ) <nl> } <nl> <nl> guard ! copyRange . isEmpty else { return 0 } <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> / / / <nl> / / / In some cases , ( for example , a ` Data ` backed by a ` dispatch_data_t ` , the bytes may be stored discontiguously . In those cases , this function invokes the closure for each contiguous region of bytes . <nl> / / / - parameter block : The closure to invoke for each region of data . You may stop the enumeration by setting the ` stop ` parameter to ` true ` . <nl> - public func enumerateBytes ( _ block : @ noescape ( buffer : UnsafeBufferPointer < UInt8 > , byteIndex : Index , stop : inout Bool ) - > Void ) { <nl> + public func enumerateBytes ( _ block : @ noescape ( _ buffer : UnsafeBufferPointer < UInt8 > , _ byteIndex : Index , _ stop : inout Bool ) - > Void ) { <nl> _mapUnmanaged { <nl> $ 0 . enumerateBytes { ( ptr , range , stop ) in <nl> var stopv = false <nl> let bytePtr = ptr . bindMemory ( to : UInt8 . self , capacity : range . length ) <nl> - block ( buffer : UnsafeBufferPointer ( start : bytePtr , count : range . length ) , byteIndex : range . length , stop : & stopv ) <nl> + block ( UnsafeBufferPointer ( start : bytePtr , count : range . length ) , range . length , & stopv ) <nl> if stopv { <nl> stop . pointee = true <nl> } <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> / / / - parameter buffer : The buffer of bytes to append . The size is calculated from ` SourceType ` and ` buffer . count ` . <nl> public mutating func append < SourceType > ( _ buffer : UnsafeBufferPointer < SourceType > ) { <nl> _applyUnmanagedMutation { <nl> - $ 0 . append ( buffer . baseAddress ! , length : buffer . count * strideof ( SourceType . self ) ) <nl> + $ 0 . append ( buffer . baseAddress ! , length : buffer . count * MemoryLayout < SourceType > . stride ) <nl> } <nl> } <nl> <nl> public struct Data : ReferenceConvertible , Equatable , Hashable , RandomAccessColl <nl> / / / - parameter buffer : The replacement bytes . <nl> public mutating func replaceSubrange < SourceType > ( _ subrange : Range < Index > , with buffer : UnsafeBufferPointer < SourceType > ) { <nl> let nsRange = NSMakeRange ( subrange . lowerBound , subrange . upperBound - subrange . lowerBound ) <nl> - let bufferCount = buffer . count * strideof ( SourceType . self ) <nl> + let bufferCount = buffer . count * MemoryLayout < SourceType > . stride <nl> <nl> _applyUnmanagedMutation { <nl> $ 0 . replaceBytes ( in : nsRange , withBytes : buffer . baseAddress , length : bufferCount ) <nl> mmm a / stdlib / public / SDK / Foundation / DateInterval . swift <nl> ppp b / stdlib / public / SDK / Foundation / DateInterval . swift <nl> public struct DateInterval : ReferenceConvertible , Comparable , Hashable { <nl> public var hashValue : Int { <nl> var buf : ( UInt , UInt ) = ( UInt ( start . timeIntervalSinceReferenceDate ) , UInt ( end . timeIntervalSinceReferenceDate ) ) <nl> return withUnsafeMutablePointer ( to : & buf ) { <nl> - return Int ( bitPattern : CFHashBytes ( unsafeBitCast ( $ 0 , to : UnsafeMutablePointer < UInt8 > . self ) , CFIndex ( sizeof ( UInt . self ) * 2 ) ) ) <nl> + return Int ( bitPattern : CFHashBytes ( unsafeBitCast ( $ 0 , to : UnsafeMutablePointer < UInt8 > . self ) , CFIndex ( MemoryLayout < UInt > . size * 2 ) ) ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / public / SDK / Foundation / Foundation . swift <nl> ppp b / stdlib / public / SDK / Foundation / Foundation . swift <nl> extension NSDictionary { <nl> internal func NS_Swift_NSUndoManager_registerUndoWithTargetHandler ( <nl> _ self_ : AnyObject , <nl> _ target : AnyObject , <nl> - _ handler : @ convention ( block ) ( AnyObject ) - > Void ) <nl> + _ handler : @ escaping @ convention ( block ) ( AnyObject ) - > Void ) <nl> <nl> extension UndoManager { <nl> @ available ( * , unavailable , renamed : " registerUndo ( withTarget : handler : ) " ) <nl> extension UndoManager { <nl> } <nl> <nl> @ available ( OSX 10 . 11 , iOS 9 . 0 , * ) <nl> - public func registerUndo < TargetType : AnyObject > ( withTarget target : TargetType , handler : ( TargetType ) - > Void ) { <nl> + public func registerUndo < TargetType : AnyObject > ( withTarget target : TargetType , handler : @ escaping ( TargetType ) - > Void ) { <nl> / / The generic blocks use a different ABI , so we need to wrap the provided <nl> / / handler in something ObjC compatible . <nl> let objcCompatibleHandler : ( AnyObject ) - > Void = { internalTarget in <nl> mmm a / stdlib / public / SDK / Foundation / IndexPath . swift <nl> ppp b / stdlib / public / SDK / Foundation / IndexPath . swift <nl> public struct IndexPath : ReferenceConvertible , Equatable , Hashable , MutableColl <nl> if count = = 0 { <nl> _indexes = [ ] <nl> } else { <nl> - var ptr = malloc ( count * sizeof ( Element . self ) ) <nl> + var ptr = malloc ( count * MemoryLayout < Element > . size ) <nl> defer { free ( ptr ) } <nl> <nl> let elementPtr = ptr ! . bindMemory ( to : Element . self , capacity : count ) <nl> mmm a / stdlib / public / SDK / Foundation / IndexSet . swift <nl> ppp b / stdlib / public / SDK / Foundation / IndexSet . swift <nl> extension IndexSet : CustomStringConvertible , CustomDebugStringConvertible , Cust <nl> <nl> / / / Iterate two index sets on the boundaries of their ranges . This is where all of the interesting stuff happens for exclusive or , intersect , etc . <nl> private struct IndexSetBoundaryIterator : IteratorProtocol { <nl> - private typealias Element = IndexSet . Element <nl> + typealias Element = IndexSet . Element <nl> <nl> private var i1 : IndexSet . RangeView . Iterator <nl> private var i2 : IndexSet . RangeView . Iterator <nl> mmm a / stdlib / public / SDK / Foundation / NSError . swift <nl> ppp b / stdlib / public / SDK / Foundation / NSError . swift <nl> public protocol RecoverableError : Error { <nl> / / / " document " granularity , that do not affect the entire <nl> / / / application . <nl> func attemptRecovery ( optionIndex recoveryOptionIndex : Int , <nl> - resultHandler handler : ( recovered : Bool ) - > Void ) <nl> + resultHandler handler : ( _ recovered : Bool ) - > Void ) <nl> <nl> / / / Attempt to recover from this error when the user selected the <nl> / / / option at the given index . Returns true to indicate <nl> public extension RecoverableError { <nl> / / / mechanism ( ` ` attemptRecovery ( optionIndex : ) ` ` ) to implement <nl> / / / document - modal recovery . <nl> func attemptRecovery ( optionIndex recoveryOptionIndex : Int , <nl> - resultHandler handler : ( recovered : Bool ) - > Void ) { <nl> - handler ( recovered : attemptRecovery ( optionIndex : recoveryOptionIndex ) ) <nl> + resultHandler handler : ( _ recovered : Bool ) - > Void ) { <nl> + handler ( attemptRecovery ( optionIndex : recoveryOptionIndex ) ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / public / SDK / Foundation / NSStringAPI . swift <nl> ppp b / stdlib / public / SDK / Foundation / NSStringAPI . swift <nl> extension String { <nl> <nl> / / / Enumerates all the lines in a string . <nl> public func enumerateLines ( <nl> - invoking body : ( line : String , stop : inout Bool ) - > ( ) <nl> + invoking body : @ escaping ( _ line : String , _ stop : inout Bool ) - > ( ) <nl> ) { <nl> _ns . enumerateLines { <nl> ( line : String , stop : UnsafeMutablePointer < ObjCBool > ) <nl> in <nl> var stop_ = false <nl> - body ( line : line , stop : & stop_ ) <nl> + body ( line , & stop_ ) <nl> if stop_ { <nl> stop . pointee = true <nl> } <nl> extension String { <nl> public func enumerateSubstrings ( <nl> in range : Range < Index > , <nl> options opts : EnumerationOptions = [ ] , <nl> - _ body : ( <nl> - substring : String ? , substringRange : Range < Index > , <nl> - enclosingRange : Range < Index > , inout Bool <nl> + _ body : @ escaping ( <nl> + _ substring : String ? , _ substringRange : Range < Index > , <nl> + _ enclosingRange : Range < Index > , inout Bool <nl> ) - > ( ) <nl> ) { <nl> _ns . enumerateSubstrings ( in : _toNSRange ( range ) , options : opts ) { <nl> var stop_ = false <nl> <nl> - body ( substring : $ 0 , <nl> - substringRange : self . _range ( $ 1 ) , <nl> - enclosingRange : self . _range ( $ 2 ) , <nl> + body ( $ 0 , <nl> + self . _range ( $ 1 ) , <nl> + self . _range ( $ 2 ) , <nl> & stop_ ) <nl> <nl> if stop_ { <nl> extension String { <nl> _ range : Range < Index > , <nl> options opts : EnumerationOptions = [ ] , <nl> _ body : ( <nl> - substring : String ? , substringRange : Range < Index > , <nl> - enclosingRange : Range < Index > , inout Bool <nl> + _ substring : String ? , _ substringRange : Range < Index > , <nl> + _ enclosingRange : Range < Index > , inout Bool <nl> ) - > ( ) <nl> ) { <nl> fatalError ( " unavailable function can ' t be called " ) <nl> mmm a / stdlib / public / SDK / Foundation / UUID . swift <nl> ppp b / stdlib / public / SDK / Foundation / UUID . swift <nl> public struct UUID : ReferenceConvertible , Hashable , Equatable , CustomStringConv <nl> public var hashValue : Int { <nl> var localValue = uuid <nl> return withUnsafeMutablePointer ( to : & localValue ) { <nl> - return Int ( bitPattern : CFHashBytes ( unsafeBitCast ( $ 0 , to : UnsafeMutablePointer < UInt8 > . self ) , CFIndex ( sizeof ( uuid_t . self ) ) ) ) <nl> + return Int ( bitPattern : CFHashBytes ( unsafeBitCast ( $ 0 , to : UnsafeMutablePointer < UInt8 > . self ) , CFIndex ( MemoryLayout < uuid_t > . size ) ) ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / public / SDK / GLKit / GLKit . swift . gyb <nl> ppp b / stdlib / public / SDK / GLKit / GLKit . swift . gyb <nl> vectorElementNames = [ <nl> public func _indexHomogeneousValue < TTT , T > ( _ aggregate : UnsafePointer < TTT > , <nl> _ index : Int ) - > T { <nl> return UnsafeRawPointer ( aggregate ) . load ( <nl> - fromByteOffset : index * strideof ( T . self ) , as : T . self ) <nl> + fromByteOffset : index * MemoryLayout < T > . stride , as : T . self ) <nl> } <nl> <nl> % { <nl> mmm a / stdlib / public / SDK / SceneKit / SceneKit . swift <nl> ppp b / stdlib / public / SDK / SceneKit / SceneKit . swift <nl> extension SCNGeometryElement { <nl> fatalError ( " Expected constant number of indices per primitive " ) <nl> } <nl> self . init ( <nl> - data : Data ( bytes : indices , count : indexCount * sizeof ( IndexType . self ) ) , <nl> + data : Data ( bytes : indices , count : indexCount * MemoryLayout < IndexType > . stride ) , <nl> primitiveType : primitiveType , <nl> primitiveCount : primitiveCount , <nl> - bytesPerIndex : sizeof ( IndexType . self ) ) <nl> + bytesPerIndex : MemoryLayout < IndexType > . stride ) <nl> _fixLifetime ( indices ) <nl> } <nl> } <nl> mmm a / stdlib / public / SDK / XCTest / XCTest . swift <nl> ppp b / stdlib / public / SDK / XCTest / XCTest . swift <nl> public func XCTAssertLessThanOrEqual < T : Comparable > ( _ expression1 : @ autoclosure <nl> } <nl> } <nl> <nl> - public func XCTAssertThrowsError < T > ( _ expression : @ autoclosure ( ) throws - > T , _ message : @ autoclosure ( ) - > String = " " , file : StaticString = # file , line : UInt = # line , _ errorHandler : ( error : Error ) - > Void = { _ in } ) - > Void { <nl> + public func XCTAssertThrowsError < T > ( _ expression : @ autoclosure ( ) throws - > T , _ message : @ autoclosure ( ) - > String = " " , file : StaticString = # file , line : UInt = # line , _ errorHandler : ( _ error : Error ) - > Void = { _ in } ) - > Void { <nl> / / evaluate expression exactly once <nl> var caughtErrorOptional : Error ? <nl> <nl> public func XCTAssertThrowsError < T > ( _ expression : @ autoclosure ( ) throws - > T , _ <nl> switch result { <nl> case . success : <nl> if let caughtError = caughtErrorOptional { <nl> - errorHandler ( error : caughtError ) <nl> + errorHandler ( caughtError ) <nl> } else { <nl> _XCTRegisterFailure ( true , " XCTAssertThrowsError failed : did not throw an error " , message , file , line ) <nl> } <nl> mmm a / stdlib / public / SwiftShims / DispatchShims . h <nl> ppp b / stdlib / public / SwiftShims / DispatchShims . h <nl> <nl> # include " SwiftStddef . h " <nl> # include " Visibility . h " <nl> <nl> - # define SWIFT_DISPATCH_RETURNS_RETAINED_BLOCK __attribute__ ( ( __ns_returns_retained__ ) ) <nl> + # define SWIFT_DISPATCH_RETURNS_RETAINED __attribute__ ( ( __ns_returns_retained__ ) ) <nl> # define SWIFT_DISPATCH_NOESCAPE __attribute__ ( ( __noescape__ ) ) <nl> + # define SWIFT_DISPATCH_NONNULL _Nonnull <nl> + # define SWIFT_DISPATCH_NULLABLE _Nullable <nl> + # define SWIFT_DISPATCH_ASSUME_NONNULL_BEGIN _Pragma ( " clang assume_nonnull begin " ) <nl> + # define SWIFT_DISPATCH_ASSUME_NONNULL_END _Pragma ( " clang assume_nonnull end " ) <nl> + <nl> + SWIFT_DISPATCH_ASSUME_NONNULL_BEGIN <nl> <nl> # ifdef __cplusplus <nl> namespace swift { extern " C " { <nl> typedef id __swift_shims_dispatch_group_t ; <nl> typedef id __swift_shims_dispatch_data_t ; <nl> <nl> SWIFT_RUNTIME_STDLIB_INTERFACE <nl> - SWIFT_DISPATCH_RETURNS_RETAINED_BLOCK <nl> + SWIFT_DISPATCH_RETURNS_RETAINED <nl> __swift_shims_dispatch_block_t <nl> _swift_dispatch_block_create_with_qos_class ( <nl> __swift_shims_dispatch_block_flags_t flags , <nl> __swift_shims_qos_class_t qos , <nl> int relative_priority , <nl> - __swift_shims_dispatch_block_t block ) ; <nl> + __swift_shims_dispatch_block_t SWIFT_DISPATCH_NONNULL block ) ; <nl> <nl> SWIFT_RUNTIME_STDLIB_INTERFACE <nl> - SWIFT_DISPATCH_RETURNS_RETAINED_BLOCK <nl> + SWIFT_DISPATCH_RETURNS_RETAINED <nl> __swift_shims_dispatch_block_t <nl> _swift_dispatch_block_create_noescape ( <nl> __swift_shims_dispatch_block_flags_t flags , <nl> void _swift_dispatch_apply_current ( <nl> void SWIFT_DISPATCH_NOESCAPE ( ^ block ) ( long ) ) ; <nl> <nl> SWIFT_RUNTIME_STDLIB_INTERFACE <nl> + SWIFT_DISPATCH_RETURNS_RETAINED <nl> __swift_shims_dispatch_data_t <nl> _swift_dispatch_data_create ( <nl> const void * buffer , <nl> __swift_size_t size , <nl> - __swift_shims_dispatch_queue_t queue , <nl> - __swift_shims_dispatch_block_t destructor ) ; <nl> + __swift_shims_dispatch_queue_t SWIFT_DISPATCH_NULLABLE queue , <nl> + __swift_shims_dispatch_block_t SWIFT_DISPATCH_NULLABLE destructor ) ; <nl> + <nl> + typedef unsigned int ( ^ __swift_shims_dispatch_data_applier ) ( __swift_shims_dispatch_data_t , __swift_size_t , const void * , __swift_size_t ) ; <nl> + <nl> + SWIFT_RUNTIME_STDLIB_INTERFACE <nl> + unsigned int <nl> + _swift_dispatch_data_apply ( <nl> + __swift_shims_dispatch_data_t data , <nl> + __swift_shims_dispatch_data_applier SWIFT_DISPATCH_NOESCAPE applier ) ; <nl> <nl> # ifdef __cplusplus <nl> } } / / extern " C " , namespace swift <nl> # endif <nl> <nl> + SWIFT_DISPATCH_ASSUME_NONNULL_END <nl> + <nl> # endif / / __OBJC2__ <nl> <nl> # endif / / SWIFT_STDLIB_SHIMS_DISPATCHSHIMS_H <nl> mmm a / stdlib / public / core / BridgeObjectiveC . swift <nl> ppp b / stdlib / public / core / BridgeObjectiveC . swift <nl> internal struct _CocoaFastEnumerationStackBuf { <nl> _item14 = _item0 <nl> _item15 = _item0 <nl> <nl> - _sanityCheck ( sizeofValue ( self ) > = <nl> - sizeof ( Optional < UnsafeRawPointer > . self ) * count ) <nl> + _sanityCheck ( MemoryLayout . _ofInstance ( self ) . size > = <nl> + MemoryLayout < Optional < UnsafeRawPointer > > . size * count ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / public / core / Builtin . swift <nl> ppp b / stdlib / public / core / Builtin . swift <nl> import SwiftShims <nl> / / / Does not include any dynamically - allocated or " remote " storage . <nl> / / / In particular , ` sizeof ( X . self ) ` , when ` X ` is a class type , is the <nl> / / / same regardless of how many stored properties ` X ` has . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . size instead . " ) <nl> @ _transparent <nl> public func sizeof < T > ( _ : T . Type ) - > Int { <nl> return Int ( Builtin . sizeof ( T . self ) ) <nl> public func sizeof < T > ( _ : T . Type ) - > Int { <nl> / / / Does not include any dynamically - allocated or " remote " storage . <nl> / / / In particular , ` sizeof ( a ) ` , when ` a ` is a class instance , is the <nl> / / / same regardless of how many stored properties ` a ` has . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . size instead . " ) <nl> @ _transparent <nl> public func sizeofValue < T > ( _ : T ) - > Int { <nl> return sizeof ( T . self ) <nl> } <nl> <nl> / / / Returns the minimum memory alignment of ` T ` . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . alignment instead . " ) <nl> @ _transparent <nl> public func alignof < T > ( _ : T . Type ) - > Int { <nl> return Int ( Builtin . alignof ( T . self ) ) <nl> } <nl> <nl> / / / Returns the minimum memory alignment of ` T ` . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . alignment instead . " ) <nl> @ _transparent <nl> public func alignofValue < T > ( _ : T ) - > Int { <nl> return alignof ( T . self ) <nl> public func alignofValue < T > ( _ : T ) - > Int { <nl> <nl> / / / Returns the least possible interval between distinct instances of <nl> / / / ` T ` in memory . The result is always positive . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . stride instead . " ) <nl> @ _transparent <nl> public func strideof < T > ( _ : T . Type ) - > Int { <nl> return Int ( Builtin . strideof_nonzero ( T . self ) ) <nl> public func strideof < T > ( _ : T . Type ) - > Int { <nl> <nl> / / / Returns the least possible interval between distinct instances of <nl> / / / ` T ` in memory . The result is always positive . <nl> + @ available ( * , deprecated , message : " use MemoryLayout < T > . stride instead . " ) <nl> @ _transparent <nl> public func strideofValue < T > ( _ : T ) - > Int { <nl> return strideof ( T . self ) <nl> internal func _roundUp < DestinationType > ( <nl> return UnsafeMutablePointer < DestinationType > ( <nl> bitPattern : _roundUpImpl ( <nl> UInt ( bitPattern : pointer ) , <nl> - toAlignment : alignof ( DestinationType . self ) ) <nl> + toAlignment : MemoryLayout < DestinationType > . alignment ) <nl> ) . unsafelyUnwrapped <nl> } <nl> <nl> func _canBeClass < T > ( _ : T . Type ) - > Int8 { <nl> / / / <nl> @ _transparent <nl> public func unsafeBitCast < T , U > ( _ x : T , to : U . Type ) - > U { <nl> - _precondition ( sizeof ( T . self ) = = sizeof ( U . self ) , <nl> + _precondition ( MemoryLayout < T > . size = = MemoryLayout < U > . size , <nl> " can ' t unsafeBitCast between types of different sizes " ) <nl> return Builtin . reinterpretCast ( x ) <nl> } <nl> public func unsafeDowncast < T : AnyObject > ( _ x : AnyObject , to : T . Type ) - > T { <nl> public func _getUnsafePointerToStoredProperties ( _ x : AnyObject ) <nl> - > UnsafeMutableRawPointer { <nl> let storedPropertyOffset = _roundUp ( <nl> - sizeof ( _HeapObject . self ) , <nl> - toAlignment : alignof ( Optional < AnyObject > . self ) ) <nl> + MemoryLayout < _HeapObject > . size , <nl> + toAlignment : MemoryLayout < Optional < AnyObject > > . alignment ) <nl> return UnsafeMutableRawPointer ( Builtin . bridgeToRawPointer ( x ) ) + <nl> storedPropertyOffset <nl> } <nl> mmm a / stdlib / public / core / CMakeLists . txt <nl> ppp b / stdlib / public / core / CMakeLists . txt <nl> set ( SWIFTLIB_ESSENTIAL <nl> LifetimeManager . swift <nl> ManagedBuffer . swift <nl> Map . swift . gyb <nl> + MemoryLayout . swift <nl> Mirrors . swift . gyb <nl> Misc . swift <nl> MutableCollection . swift <nl> mmm a / stdlib / public / core / Character . swift <nl> ppp b / stdlib / public / core / Character . swift <nl> public struct Character : <nl> let ( count , initialUTF8 ) = s . _core . _encodeSomeUTF8 ( from : 0 ) <nl> / / Notice that the result of sizeof ( ) is a small non - zero number and can ' t <nl> / / overflow when multiplied by 8 . <nl> - let bits = sizeofValue ( initialUTF8 ) & * 8 & - 1 <nl> + let bits = MemoryLayout . _ofInstance ( initialUTF8 ) . size & * 8 & - 1 <nl> if _fastPath ( <nl> count = = s . _core . count & & ( initialUTF8 & ( 1 < < numericCast ( bits ) ) ) ! = 0 ) { <nl> _representation = . small ( Builtin . trunc_Int64_Int63 ( initialUTF8 . _value ) ) <nl> mmm a / stdlib / public / core / Existential . swift <nl> ppp b / stdlib / public / core / Existential . swift <nl> internal struct _CollectionOf < <nl> <nl> internal init ( <nl> _startIndex : IndexType , endIndex : IndexType , <nl> - _ subscriptImpl : ( IndexType ) - > Element <nl> + _ subscriptImpl : @ escaping ( IndexType ) - > Element <nl> ) { <nl> self . startIndex = _startIndex <nl> self . endIndex = endIndex <nl> mmm a / stdlib / public / core / ExistentialCollection . swift . gyb <nl> ppp b / stdlib / public / core / ExistentialCollection . swift . gyb <nl> public struct AnyIterator < Element > : IteratorProtocol { <nl> / / / var x = 7 <nl> / / / let iterator = AnyIterator { x < 15 ? x + + : nil } <nl> / / / let a = Array ( iterator ) / / [ 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 ] <nl> - public init ( _ body : ( ) - > Element ? ) { <nl> + public init ( _ body : @ escaping ( ) - > Element ? ) { <nl> self . _box = _IteratorBox ( _ClosureBasedIterator ( body ) ) <nl> } <nl> <nl> public struct AnyIterator < Element > : IteratorProtocol { <nl> extension AnyIterator : Sequence { } <nl> <nl> internal struct _ClosureBasedIterator < Element > : IteratorProtocol { <nl> - internal init ( _ body : ( ) - > Element ? ) { <nl> + internal init ( _ body : @ escaping ( ) - > Element ? ) { <nl> self . _body = body <nl> } <nl> internal func next ( ) - > Element ? { return _body ( ) } <nl> internal final class _ $ { Kind } Box < S : $ { Kind } > : _Any $ { Kind } Box < S . Iterator . Elemen <nl> internal struct _ClosureBasedSequence < Iterator : IteratorProtocol > <nl> : Sequence { <nl> <nl> - internal init ( _ makeUnderlyingIterator : ( ) - > Iterator ) { <nl> + internal init ( _ makeUnderlyingIterator : @ escaping ( ) - > Iterator ) { <nl> self . _makeUnderlyingIterator = makeUnderlyingIterator <nl> } <nl> <nl> internal struct _ClosureBasedSequence < Iterator : IteratorProtocol > <nl> return _makeUnderlyingIterator ( ) <nl> } <nl> <nl> - internal var _makeUnderlyingIterator : ( ) - > Iterator <nl> + internal var _makeUnderlyingIterator : @ escaping ( ) - > Iterator <nl> } <nl> <nl> / / / A type - erased sequence . <nl> public struct AnySequence < Element > : Sequence { <nl> / / / Creates a sequence whose ` makeIterator ( ) ` method forwards to <nl> / / / ` makeUnderlyingIterator ` . <nl> public init < I : IteratorProtocol > ( <nl> - _ makeUnderlyingIterator : ( ) - > I <nl> + _ makeUnderlyingIterator : @ escaping ( ) - > I <nl> ) where I . Element = = Element { <nl> self . init ( _ClosureBasedSequence ( makeUnderlyingIterator ) ) <nl> } <nl> mmm a / stdlib / public / core / Filter . swift . gyb <nl> ppp b / stdlib / public / core / Filter . swift . gyb <nl> public struct LazyFilterIterator < <nl> / / / for which ` isIncluded ( x ) = = true ` . <nl> internal init ( <nl> _base : Base , <nl> - _ isIncluded : ( Base . Element ) - > Bool <nl> + _ isIncluded : @ escaping ( Base . Element ) - > Bool <nl> ) { <nl> self . _base = _base <nl> self . _predicate = isIncluded <nl> public struct LazyFilterSequence < Base : Sequence > <nl> public / / @ testable <nl> init ( <nl> _base base : Base , <nl> - _ isIncluded : ( Base . Iterator . Element ) - > Bool <nl> + _ isIncluded : @ escaping ( Base . Iterator . Element ) - > Bool <nl> ) { <nl> self . base = base <nl> self . _include = isIncluded <nl> public struct $ { Self } < <nl> public / / @ testable <nl> init ( <nl> _base : Base , <nl> - _ isIncluded : ( Base . Iterator . Element ) - > Bool <nl> + _ isIncluded : @ escaping ( Base . Iterator . Element ) - > Bool <nl> ) { <nl> self . _base = _base <nl> self . _predicate = isIncluded <nl> extension LazySequenceProtocol { <nl> / / / traversal step invokes ` predicate ` on one or more underlying <nl> / / / elements . <nl> public func filter ( <nl> - _ isIncluded : ( Elements . Iterator . Element ) - > Bool <nl> + _ isIncluded : @ escaping ( Elements . Iterator . Element ) - > Bool <nl> ) - > LazyFilterSequence < Self . Elements > { <nl> return LazyFilterSequence ( <nl> _base : self . elements , isIncluded ) <nl> extension LazyCollectionProtocol <nl> / / / traversal step invokes ` predicate ` on one or more underlying <nl> / / / elements . <nl> public func filter ( <nl> - _ isIncluded : ( Elements . Iterator . Element ) - > Bool <nl> + _ isIncluded : @ escaping ( Elements . Iterator . Element ) - > Bool <nl> ) - > LazyFilter $ { collectionForTraversal ( Traversal ) } < Self . Elements > { <nl> return LazyFilter $ { collectionForTraversal ( Traversal ) } ( <nl> _base : self . elements , isIncluded ) <nl> mmm a / stdlib / public / core / FlatMap . swift <nl> ppp b / stdlib / public / core / FlatMap . swift <nl> extension LazySequenceProtocol { <nl> / / / <nl> / / / - Complexity : O ( 1 ) <nl> public func flatMap < SegmentOfResult : Sequence > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > SegmentOfResult <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > SegmentOfResult <nl> ) - > LazySequence < <nl> FlattenSequence < LazyMapSequence < Elements , SegmentOfResult > > > { <nl> return self . map ( transform ) . joined ( ) <nl> extension LazySequenceProtocol { <nl> / / / - Parameter transform : A closure that accepts an element of this <nl> / / / sequence as its argument and returns an optional value . <nl> public func flatMap < ElementOfResult > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> ) - > LazyMapSequence < <nl> LazyFilterSequence < <nl> LazyMapSequence < Elements , ElementOfResult ? > > , <nl> extension LazyCollectionProtocol { <nl> / / / <nl> / / / - Complexity : O ( 1 ) <nl> public func flatMap < SegmentOfResult : Collection > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > SegmentOfResult <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > SegmentOfResult <nl> ) - > LazyCollection < <nl> FlattenCollection < <nl> LazyMapCollection < Elements , SegmentOfResult > > <nl> extension LazyCollectionProtocol { <nl> / / / - Parameter transform : A closure that accepts an element of this <nl> / / / collection as its argument and returns an optional value . <nl> public func flatMap < ElementOfResult > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> ) - > LazyMapCollection < <nl> LazyFilterCollection < <nl> LazyMapCollection < Elements , ElementOfResult ? > > , <nl> extension LazyCollectionProtocol <nl> / / / <nl> / / / - Complexity : O ( 1 ) <nl> public func flatMap < SegmentOfResult : Collection > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > SegmentOfResult <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > SegmentOfResult <nl> ) - > LazyCollection < <nl> FlattenBidirectionalCollection < <nl> LazyMapBidirectionalCollection < Elements , SegmentOfResult > > > <nl> extension LazyCollectionProtocol <nl> / / / - Parameter transform : A closure that accepts an element of this <nl> / / / collection as its argument and returns an optional value . <nl> public func flatMap < ElementOfResult > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > ElementOfResult ? <nl> ) - > LazyMapBidirectionalCollection < <nl> LazyFilterBidirectionalCollection < <nl> LazyMapBidirectionalCollection < Elements , ElementOfResult ? > > , <nl> mmm a / stdlib / public / core / GroupInfo . json <nl> ppp b / stdlib / public / core / GroupInfo . json <nl> <nl> " BridgeStorage . swift " , <nl> " Builtin . swift " , <nl> " VarArgs . swift " , <nl> - " CTypes . swift " <nl> + " CTypes . swift " , <nl> + " MemoryLayout . swift " <nl> ] , <nl> " Reflection " : [ <nl> " Mirrors . swift " , <nl> mmm a / stdlib / public / core / HashedCollections . swift . gyb <nl> ppp b / stdlib / public / core / HashedCollections . swift . gyb <nl> internal struct _UnmanagedAnyObjectArray { <nl> internal subscript ( i : Int ) - > AnyObject { <nl> get { <nl> let unmanaged = value . load ( <nl> - fromByteOffset : i * strideof ( AnyObject . self ) , <nl> + fromByteOffset : i * MemoryLayout < AnyObject > . stride , <nl> as : Unmanaged < AnyObject > . self ) <nl> return unmanaged . takeUnretainedValue ( ) <nl> } <nl> nonmutating set ( newValue ) { <nl> let unmanaged = Unmanaged . passUnretained ( newValue ) <nl> value . storeBytes ( of : unmanaged , <nl> - toByteOffset : i * strideof ( AnyObject . self ) , <nl> + toByteOffset : i * MemoryLayout < AnyObject > . stride , <nl> as : Unmanaged < AnyObject > . self ) <nl> } <nl> } <nl> final internal class _Native $ { Self } StorageImpl < $ { TypeParameters } > : <nl> / / / padding to align the start to word alignment . <nl> internal static func bytesForBitMap ( capacity : Int ) - > Int { <nl> let numWords = _UnsafeBitMap . sizeInWords ( forSizeInBits : capacity ) <nl> - return numWords * strideof ( UInt . self ) + alignof ( UInt . self ) <nl> + return numWords * MemoryLayout < UInt > . stride + MemoryLayout < UInt > . alignment <nl> } <nl> <nl> / / / Returns the bytes necessary to store ' capacity ' keys and padding to align <nl> / / / the start to the alignment of the ' Key ' type assuming a word aligned base <nl> / / / address . <nl> internal static func bytesForKeys ( capacity : Int ) - > Int { <nl> - let padding = max ( 0 , alignof ( Key . self ) - alignof ( UInt . self ) ) <nl> - return strideof ( Key . self ) * capacity + padding <nl> + let padding = max ( 0 , MemoryLayout < Key > . alignment - MemoryLayout < UInt > . alignment ) <nl> + return MemoryLayout < Key > . stride * capacity + padding <nl> } <nl> <nl> % if Self = = ' Dictionary ' : <nl> final internal class _Native $ { Self } StorageImpl < $ { TypeParameters } > : <nl> / / / address aligned to the maximum of the alignment of the ' Key ' type and the <nl> / / / alignment of a word . <nl> internal static func bytesForValues ( capacity : Int ) - > Int { <nl> - let maxPrevAlignment = max ( alignof ( Key . self ) , alignof ( UInt . self ) ) <nl> - let padding = max ( 0 , alignof ( Value . self ) - maxPrevAlignment ) <nl> - return strideof ( Value . self ) * capacity + padding <nl> + let maxPrevAlignment = max ( MemoryLayout < Key > . alignment , MemoryLayout < UInt > . alignment ) <nl> + let padding = max ( 0 , MemoryLayout < Value > . alignment - maxPrevAlignment ) <nl> + return MemoryLayout < Value > . stride * capacity + padding <nl> } <nl> % end <nl> <nl> final internal class _Native $ { Self } StorageImpl < $ { TypeParameters } > : <nl> let bitMapSizeInBytes = <nl> _unsafeMultiply ( <nl> _UnsafeBitMap . sizeInWords ( forSizeInBits : _body . capacity ) , <nl> - strideof ( UInt . self ) ) <nl> + MemoryLayout < UInt > . stride ) <nl> let start = <nl> UnsafeMutableRawPointer ( _initializedHashtableEntriesBitMapStorage ) <nl> + bitMapSizeInBytes <nl> final internal class _Native $ { Self } StorageImpl < $ { TypeParameters } > : <nl> % if Self = = ' Dictionary ' : <nl> / / This API is unsafe and needs a ` _fixLifetime ` in the caller . <nl> internal var _values : UnsafeMutablePointer < Value > { <nl> - let keysSizeInBytes = _unsafeMultiply ( _body . capacity , strideof ( Key . self ) ) <nl> + let keysSizeInBytes = _unsafeMultiply ( _body . capacity , MemoryLayout < Key > . stride ) <nl> let start = UnsafeMutableRawPointer ( _keys ) + keysSizeInBytes <nl> return _roundUp ( start , toAlignmentOf : Value . self ) <nl> } <nl> mmm a / stdlib / public / core / HeapBuffer . swift <nl> ppp b / stdlib / public / core / HeapBuffer . swift <nl> struct _HeapBuffer < Value , Element > : Equatable { <nl> <nl> internal static func _valueOffset ( ) - > Int { <nl> return _roundUp ( <nl> - sizeof ( _HeapObject . self ) , <nl> - toAlignment : alignof ( Value . self ) ) <nl> + MemoryLayout < _HeapObject > . size , <nl> + toAlignment : MemoryLayout < Value > . alignment ) <nl> } <nl> <nl> internal static func _elementOffset ( ) - > Int { <nl> return _roundUp ( <nl> - _valueOffset ( ) + sizeof ( Value . self ) , <nl> - toAlignment : alignof ( Element . self ) ) <nl> + _valueOffset ( ) + MemoryLayout < Value > . size , <nl> + toAlignment : MemoryLayout < Element > . alignment ) <nl> } <nl> <nl> internal static func _requiredAlignMask ( ) - > Int { <nl> / / We can ' t use max here because it can allocate an array . <nl> - let heapAlign = alignof ( _HeapObject . self ) & - 1 <nl> - let valueAlign = alignof ( Value . self ) & - 1 <nl> - let elementAlign = alignof ( Element . self ) & - 1 <nl> + let heapAlign = MemoryLayout < _HeapObject > . alignment & - 1 <nl> + let valueAlign = MemoryLayout < Value > . alignment & - 1 <nl> + let elementAlign = MemoryLayout < Element > . alignment & - 1 <nl> return ( heapAlign < valueAlign <nl> ? ( valueAlign < elementAlign ? elementAlign : valueAlign ) <nl> : ( heapAlign < elementAlign ? elementAlign : heapAlign ) ) <nl> struct _HeapBuffer < Value , Element > : Equatable { <nl> / / / Returns the actual number of ` Elements ` we can possibly store . <nl> internal func _capacity ( ) - > Int { <nl> return ( _allocatedSize ( ) - _HeapBuffer . _elementOffset ( ) ) <nl> - / strideof ( Element . self ) <nl> + / MemoryLayout < Element > . stride <nl> } <nl> <nl> internal init ( ) { <nl> struct _HeapBuffer < Value , Element > : Equatable { <nl> ) <nl> <nl> let totalSize = _HeapBuffer . _elementOffset ( ) + <nl> - capacity * strideof ( Element . self ) <nl> + capacity * MemoryLayout < Element > . stride <nl> let alignMask = _HeapBuffer . _requiredAlignMask ( ) <nl> <nl> let object : AnyObject = _swift_bufferAllocate ( <nl> mmm a / stdlib / public / core / ManagedBuffer . swift <nl> ppp b / stdlib / public / core / ManagedBuffer . swift <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> bufferClass : AnyClass , <nl> minimumCapacity : Int , <nl> makingHeaderWith factory : <nl> - ( buffer : AnyObject , capacity : ( AnyObject ) - > Int ) throws - > Header <nl> + ( _ buffer : AnyObject , _ capacity : ( AnyObject ) - > Int ) throws - > Header <nl> ) rethrows { <nl> self = ManagedBufferPointer ( <nl> bufferClass : bufferClass , minimumCapacity : minimumCapacity ) <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> try withUnsafeMutablePointerToHeader { <nl> $ 0 . initialize ( to : <nl> try factory ( <nl> - buffer : self . buffer , <nl> - capacity : { <nl> + self . buffer , <nl> + { <nl> ManagedBufferPointer ( unsafeBufferObject : $ 0 ) . capacity <nl> } ) ) <nl> } <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> / / / idea to store this information in the " header " area when <nl> / / / an instance is created . <nl> public var capacity : Int { <nl> - return ( _capacityInBytes & - _My . _elementOffset ) / strideof ( Element . self ) <nl> + return ( _capacityInBytes & - _My . _elementOffset ) / MemoryLayout < Element > . stride <nl> } <nl> <nl> / / / Call ` body ` with an ` UnsafeMutablePointer ` to the stored <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> " ManagedBufferPointer must have non - negative capacity " ) <nl> <nl> let totalSize = _My . _elementOffset <nl> - + minimumCapacity * strideof ( Element . self ) <nl> + + minimumCapacity * MemoryLayout < Element > . stride <nl> <nl> let newBuffer : AnyObject = _swift_bufferAllocate ( <nl> bufferType : _uncheckedBufferClass , <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> _ bufferClass : AnyClass , creating : Bool = false <nl> ) { <nl> _debugPrecondition ( <nl> - _class_getInstancePositiveExtentSize ( bufferClass ) = = sizeof ( _HeapObject . self ) <nl> + _class_getInstancePositiveExtentSize ( bufferClass ) = = MemoryLayout < _HeapObject > . size <nl> | | ( <nl> ! creating <nl> & & _class_getInstancePositiveExtentSize ( bufferClass ) <nl> - = = _headerOffset + sizeof ( Header . self ) ) , <nl> + = = _headerOffset + MemoryLayout < Header > . size ) , <nl> " ManagedBufferPointer buffer class has illegal stored properties " <nl> ) <nl> _debugPrecondition ( <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> _ bufferClass : AnyClass , creating : Bool = false <nl> ) { <nl> _sanityCheck ( <nl> - _class_getInstancePositiveExtentSize ( bufferClass ) = = sizeof ( _HeapObject . self ) <nl> + _class_getInstancePositiveExtentSize ( bufferClass ) = = MemoryLayout < _HeapObject > . size <nl> | | ( <nl> ! creating <nl> & & _class_getInstancePositiveExtentSize ( bufferClass ) <nl> - = = _headerOffset + sizeof ( Header . self ) ) , <nl> + = = _headerOffset + MemoryLayout < Header > . size ) , <nl> " ManagedBufferPointer buffer class has illegal stored properties " <nl> ) <nl> _sanityCheck ( <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> / / / The required alignment for allocations of this type , minus 1 <nl> internal static var _alignmentMask : Int { <nl> return max ( <nl> - alignof ( _HeapObject . self ) , <nl> - max ( alignof ( Header . self ) , alignof ( Element . self ) ) ) & - 1 <nl> + MemoryLayout < _HeapObject > . alignment , <nl> + max ( MemoryLayout < Header > . alignment , MemoryLayout < Element > . alignment ) ) & - 1 <nl> } <nl> <nl> / / / The actual number of bytes allocated for this object . <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> internal static var _headerOffset : Int { <nl> _onFastPath ( ) <nl> return _roundUp ( <nl> - sizeof ( _HeapObject . self ) , <nl> - toAlignment : alignof ( Header . self ) ) <nl> + MemoryLayout < _HeapObject > . size , <nl> + toAlignment : MemoryLayout < Header > . alignment ) <nl> } <nl> <nl> / / / An * * unmanaged * * pointer to the storage for the ` Header ` <nl> public struct ManagedBufferPointer < Header , Element > : Equatable { <nl> internal static var _elementOffset : Int { <nl> _onFastPath ( ) <nl> return _roundUp ( <nl> - _headerOffset + sizeof ( Header . self ) , <nl> - toAlignment : alignof ( Element . self ) ) <nl> + _headerOffset + MemoryLayout < Header > . size , <nl> + toAlignment : MemoryLayout < Element > . alignment ) <nl> } <nl> <nl> internal mutating func _isUniqueOrPinnedReference ( ) - > Bool { <nl> mmm a / stdlib / public / core / Map . swift . gyb <nl> ppp b / stdlib / public / core / Map . swift . gyb <nl> public struct LazyMapSequence < Base : Sequence , Element > <nl> <nl> / / / Create an instance with elements ` transform ( x ) ` for each element <nl> / / / ` x ` of base . <nl> - internal init ( _base : Base , transform : ( Base . Iterator . Element ) - > Element ) { <nl> + internal init ( _base : Base , transform : @ escaping ( Base . Iterator . Element ) - > Element ) { <nl> self . _base = _base <nl> self . _transform = transform <nl> } <nl> public struct $ { Self } < <nl> <nl> / / / Create an instance with elements ` transform ( x ) ` for each element <nl> / / / ` x ` of base . <nl> - internal init ( _base : Base , transform : ( Base . Iterator . Element ) - > Element ) { <nl> + internal init ( _base : Base , transform : @ escaping ( Base . Iterator . Element ) - > Element ) { <nl> self . _base = _base <nl> self . _transform = transform <nl> } <nl> extension LazySequenceProtocol { <nl> / / / the result are computed lazily , each time they are read , by <nl> / / / calling ` transform ` function on a base element . <nl> public func map < U > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > U <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > U <nl> ) - > LazyMapSequence < Self . Elements , U > { <nl> return LazyMapSequence ( _base : self . elements , transform : transform ) <nl> } <nl> extension LazyCollectionProtocol <nl> / / / the result are computed lazily , each time they are read , by <nl> / / / calling ` transform ` function on a base element . <nl> public func map < U > ( <nl> - _ transform : ( Elements . Iterator . Element ) - > U <nl> + _ transform : @ escaping ( Elements . Iterator . Element ) - > U <nl> ) - > LazyMap $ { collectionForTraversal ( Traversal ) } < Self . Elements , U > { <nl> return LazyMap $ { collectionForTraversal ( Traversal ) } ( <nl> _base : self . elements , <nl> new file mode 100644 <nl> index 000000000000 . . a3cb8af54e84 <nl> mmm / dev / null <nl> ppp b / stdlib / public / core / MemoryLayout . swift <nl> <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + / / <nl> + / / This source file is part of the Swift . org open source project <nl> + / / <nl> + / / Copyright ( c ) 2014 - 2016 Apple Inc . and the Swift project authors <nl> + / / Licensed under Apache License v2 . 0 with Runtime Library Exception <nl> + / / <nl> + / / See http : / / swift . org / LICENSE . txt for license information <nl> + / / See http : / / swift . org / CONTRIBUTORS . txt for the list of Swift project authors <nl> + / / <nl> + / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> + <nl> + / / / Accesses the memory layout of ` T ` through its <nl> + / / / ` size ` , ` stride ` , and ` alignment ` properties <nl> + public enum MemoryLayout < T > { <nl> + <nl> + / / / Returns the contiguous memory footprint of ` T ` . <nl> + / / / <nl> + / / / Does not include any dynamically - allocated or " remote " <nl> + / / / storage . In particular , ` MemoryLayout < T > . size ` , when <nl> + / / / ` T ` is a class type , is the same regardless of how many <nl> + / / / stored properties ` T ` has . <nl> + @ _transparent <nl> + public static var size : Int { <nl> + return Int ( Builtin . sizeof ( T . self ) ) <nl> + } <nl> + <nl> + / / / For instances of ` T ` in an ` Array < T > ` , returns the number of <nl> + / / / bytes from the start of one instance to the start of the <nl> + / / / next . This is the same as the number of bytes moved when an <nl> + / / / ` UnsafePointer < T > ` is incremented . ` T ` may have a lower minimal <nl> + / / / alignment that trades runtime performance for space <nl> + / / / efficiency . The result is always positive . <nl> + @ _transparent <nl> + public static var stride : Int { <nl> + return Int ( Builtin . strideof_nonzero ( T . self ) ) <nl> + } <nl> + <nl> + / / / Returns the default memory alignment of ` T ` . <nl> + @ _transparent <nl> + public static var alignment : Int { <nl> + return Int ( Builtin . alignof ( T . self ) ) <nl> + } <nl> + } <nl> + <nl> + extension MemoryLayout { <nl> + @ _transparent <nl> + public / / @ testable <nl> + static func _ofInstance ( _ : @ autoclosure ( ) - > T ) - > MemoryLayout < T > . Type { <nl> + return MemoryLayout < T > . self <nl> + } <nl> + } <nl> mmm a / stdlib / public / core / Pointer . swift <nl> ppp b / stdlib / public / core / Pointer . swift <nl> func _convertConstArrayToPointerArgument < <nl> if let addr = opaquePointer { <nl> validPointer = ToPointer ( addr . _rawValue ) <nl> } else { <nl> - let lastAlignedValue = ~ ( alignof ( FromElement . self ) - 1 ) <nl> + let lastAlignedValue = ~ ( MemoryLayout < FromElement > . alignment - 1 ) <nl> let lastAlignedPointer = UnsafeRawPointer ( bitPattern : lastAlignedValue ) ! <nl> validPointer = ToPointer ( lastAlignedPointer . _rawValue ) <nl> } <nl> mmm a / stdlib / public / core / Runtime . swift . gyb <nl> ppp b / stdlib / public / core / Runtime . swift . gyb <nl> func _float $ { bits } ToString ( _ value : Float $ { bits } , debug : Bool ) - > String { <nl> } <nl> } <nl> <nl> - _sanityCheck ( sizeof ( _Buffer32 . self ) = = 32 ) <nl> - _sanityCheck ( sizeof ( _Buffer72 . self ) = = 72 ) <nl> + _sanityCheck ( MemoryLayout < _Buffer32 > . size = = 32 ) <nl> + _sanityCheck ( MemoryLayout < _Buffer72 > . size = = 72 ) <nl> <nl> var buffer = _Buffer32 ( ) <nl> return buffer . withBytes { ( bufferPtr ) in <nl> func _rawPointerToString ( _ value : Builtin . RawPointer ) - > String { <nl> radix : 16 , <nl> uppercase : false <nl> ) <nl> - for _ in 0 . . < ( 2 * sizeof ( UnsafeRawPointer . self ) - result . utf16 . count ) { <nl> + for _ in 0 . . < ( 2 * MemoryLayout < UnsafeRawPointer > . size - result . utf16 . count ) { <nl> result = " 0 " + result <nl> } <nl> return " 0x " + result <nl> mmm a / stdlib / public / core / SequenceAlgorithms . swift . gyb <nl> ppp b / stdlib / public / core / SequenceAlgorithms . swift . gyb <nl> extension Sequence { <nl> public func reduce < Result > ( <nl> _ initialResult : Result , <nl> _ nextPartialResult : <nl> - @ noescape ( partialResult : Result , $ { GElement } ) throws - > Result <nl> + @ noescape ( _ partialResult : Result , $ { GElement } ) throws - > Result <nl> ) rethrows - > Result { <nl> var accumulator = initialResult <nl> for element in self { <nl> - accumulator = try nextPartialResult ( partialResult : accumulator , element ) <nl> + accumulator = try nextPartialResult ( accumulator , element ) <nl> } <nl> return accumulator <nl> } <nl> mmm a / stdlib / public / core / Sort . swift . gyb <nl> ppp b / stdlib / public / core / Sort . swift . gyb <nl> public / / @ testable <nl> func _introSort < C > ( <nl> _ elements : inout C , <nl> subRange range : Range < C . Index > <nl> - $ { " , by areInIncreasingOrder : ( C . Iterator . Element , C . Iterator . Element ) - > Bool " if p else " " } <nl> + $ { " , by areInIncreasingOrder : @ escaping ( C . Iterator . Element , C . Iterator . Element ) - > Bool " if p else " " } <nl> ) where <nl> C : MutableCollection & RandomAccessCollection <nl> $ { " " if p else " , C . Iterator . Element : Comparable " } { <nl> mmm a / stdlib / public / core / StringUTF8 . swift <nl> ppp b / stdlib / public / core / StringUTF8 . swift <nl> extension _StringCore { <nl> if _fastPath ( elementWidth = = 1 ) { <nl> / / How many UTF - 16 code units might we use before we ' ve filled up <nl> / / our _UTF8Chunk with UTF - 8 code units ? <nl> - let utf16Count = Swift . min ( sizeof ( _UTF8Chunk . self ) , count - i ) <nl> + let utf16Count = Swift . min ( MemoryLayout < _UTF8Chunk > . size , count - i ) <nl> <nl> var result : _UTF8Chunk = ~ 0 / / Start with all bits set <nl> <nl> extension String { <nl> <nl> / / / A Buffer value with the high byte set <nl> internal static var _bufferHiByte : Buffer { <nl> - return 0xFF < < numericCast ( ( sizeof ( Buffer . self ) & - 1 ) & * 8 ) <nl> + return 0xFF < < numericCast ( ( MemoryLayout < Buffer > . size & - 1 ) & * 8 ) <nl> } <nl> <nl> / / / Consume a byte of the given buffer : shift out the low byte <nl> mmm a / stdlib / public / core / SwiftNativeNSArray . swift <nl> ppp b / stdlib / public / core / SwiftNativeNSArray . swift <nl> extension _SwiftNativeNSArrayWithContiguousStorage : _NSArrayCore { <nl> / / counting while correctly aliasing with all other pointer types . <nl> UnsafeMutableRawPointer ( aBuffer ) . copyBytes ( <nl> from : objects . baseAddress ! + range . location , <nl> - count : range . length * strideof ( AnyObject . self ) ) <nl> + count : range . length * MemoryLayout < AnyObject > . stride ) <nl> } <nl> } <nl> <nl> mmm a / stdlib / public / core / UnfoldSequence . swift <nl> ppp b / stdlib / public / core / UnfoldSequence . swift <nl> <nl> / / / value returned by passing the previous element to ` next ` . <nl> / / / <nl> / / / - SeeAlso : ` sequence ( state : next : ) ` <nl> - public func sequence < T > ( first : T , next : ( T ) - > T ? ) - > UnfoldFirstSequence < T > { <nl> + public func sequence < T > ( first : T , next : @ escaping ( T ) - > T ? ) - > UnfoldFirstSequence < T > { <nl> / / The trivial implementation where the state is the next value to return <nl> / / has the downside of being unnecessarily eager ( it evaluates ` next ` one <nl> / / step in advance ) . We solve this by using a boolean value to disambiguate <nl> public func sequence < T > ( first : T , next : ( T ) - > T ? ) - > UnfoldFirstSequence < T > { <nl> / / / - Returns : A sequence that yields each successive value from ` next ` . <nl> / / / <nl> / / / - SeeAlso : ` sequence ( first : next : ) ` <nl> - public func sequence < T , State > ( state : State , next : ( inout State ) - > T ? ) <nl> + public func sequence < T , State > ( state : State , next : @ escaping ( inout State ) - > T ? ) <nl> - > UnfoldSequence < T , State > { <nl> return UnfoldSequence ( _state : state , _next : next ) <nl> } <nl> public struct UnfoldSequence < Element , State > : Sequence , IteratorProtocol { <nl> } <nl> } <nl> <nl> - internal init ( _state : State , _next : ( inout State ) - > Element ? ) { <nl> + internal init ( _state : State , _next : @ escaping ( inout State ) - > Element ? ) { <nl> self . _state = _state <nl> self . _next = _next <nl> } <nl> mmm a / stdlib / public / core / Unicode . swift <nl> ppp b / stdlib / public / core / Unicode . swift <nl> internal func _transcodeSomeUTF16AsUTF8 < Input > ( <nl> typealias _UTF8Chunk = _StringCore . _UTF8Chunk <nl> <nl> let endIndex = input . endIndex <nl> - let utf8Max = sizeof ( _UTF8Chunk . self ) <nl> + let utf8Max = MemoryLayout < _UTF8Chunk > . size <nl> var result : _UTF8Chunk = 0 <nl> var utf8Count = 0 <nl> var nextIndex = startIndex <nl> internal func _transcodeSomeUTF16AsUTF8 < Input > ( <nl> nextIndex = input . index ( nextIndex , offsetBy : utf16Length ) <nl> } <nl> / / FIXME : Annoying check , courtesy of < rdar : / / problem / 16740169 > <nl> - if utf8Count < sizeofValue ( result ) { <nl> + if utf8Count < MemoryLayout . _ofInstance ( result ) . size { <nl> result | = ~ 0 < < numericCast ( utf8Count * 8 ) <nl> } <nl> return ( nextIndex , result ) <nl> extension UTF16 { <nl> destination : UnsafeMutablePointer < U > , <nl> count : Int <nl> ) { <nl> - if strideof ( T . self ) = = strideof ( U . self ) { <nl> + if MemoryLayout < T > . stride = = MemoryLayout < U > . stride { <nl> _memcpy ( <nl> dest : UnsafeMutablePointer ( destination ) , <nl> src : UnsafeMutablePointer ( source ) , <nl> - size : UInt ( count ) * UInt ( strideof ( U . self ) ) ) <nl> + size : UInt ( count ) * UInt ( MemoryLayout < U > . stride ) ) <nl> } <nl> else { <nl> for i in 0 . . < count { <nl> mmm a / stdlib / public / core / UnsafeBufferPointer . swift . gyb <nl> ppp b / stdlib / public / core / UnsafeBufferPointer . swift . gyb <nl> public struct UnsafeBufferPointerIterator < Element > <nl> / / / contiguously in memory , presenting a collection interface to the <nl> / / / underlying elements . <nl> / / / <nl> - / / / The pointer should be aligned to ` alignof ( Element . self ) ` . <nl> + / / / The pointer should be aligned to ` MemoryLayout < Element > . alignment ` . <nl> public struct Unsafe $ { Mutable } BufferPointer < Element > <nl> : $ { Mutable } Indexable , $ { Mutable } Collection , RandomAccessCollection { <nl> <nl> mmm a / stdlib / public / core / UnsafePointer . swift . gyb <nl> ppp b / stdlib / public / core / UnsafePointer . swift . gyb <nl> <nl> / / / provides no automated memory management , and therefore must <nl> / / / be handled with great care to ensure safety . <nl> / / / <nl> - / / / Instances must be aligned to ` alignof ( Pointee . self ) ` , i . e . <nl> - / / / ` ( UnsafePointer < Int8 > ( self ) - nil ) % alignof ( Pointee . self ) = = 0 ` <nl> + / / / Instances must be aligned to ` MemoryLayout < Pointee > . alignment ` , i . e . <nl> + / / / ` ( UnsafePointer < Int8 > ( self ) - nil ) % MemoryLayout < Pointee > . alignment = = 0 ` <nl> / / / <nl> / / / The memory referenced by an instance can be in one of the following states : <nl> / / / <nl> public struct $ { Self } < Pointee > <nl> / / / - Postcondition : The pointee is allocated , but not initialized . <nl> static public func allocate ( capacity count : Int ) <nl> - > UnsafeMutablePointer < Pointee > { <nl> - let size = strideof ( Pointee . self ) * count <nl> + let size = MemoryLayout < Pointee > . stride * count <nl> let rawPtr = <nl> Builtin . allocRaw ( size . _builtinWordValue , Builtin . alignof ( Pointee . self ) ) <nl> Builtin . bindMemory ( rawPtr , count . _builtinWordValue , Pointee . self ) <nl> public struct $ { Self } < Pointee > <nl> / / / <nl> / / / - Postcondition : The memory has been deallocated . <nl> public func deallocate ( capacity : Int ) { <nl> - let size = strideof ( Pointee . self ) * capacity <nl> + let size = MemoryLayout < Pointee > . stride * capacity <nl> Builtin . deallocRaw ( <nl> _rawValue , size . _builtinWordValue , Builtin . alignof ( Pointee . self ) ) <nl> } <nl> public struct $ { Self } < Pointee > <nl> / / / <nl> / / / - Precondition : Type ' T ' is layout compatible with type ' Pointee ' . <nl> / / / <nl> - / / / - Precondition : The memory ` self . . < self + count * strideof ( T . self ) ` <nl> + / / / - Precondition : The memory ` self . . < self + count * MemoryLayout < T > . stride ` <nl> / / / is bound to ` Pointee ` . <nl> public func withMemoryRebound < T , Result > ( to : T . Type , capacity count : Int , <nl> _ body : @ noescape ( UnsafeMutablePointer < T > ) throws - > Result <nl> public func < < Pointee > ( lhs : $ { Self } < Pointee > , rhs : $ { Self } < Pointee > ) - > Bool { <nl> @ _transparent <nl> public func + < Pointee > ( lhs : $ { Self } < Pointee > , rhs : Int ) - > $ { Self } < Pointee > { <nl> return $ { Self } ( Builtin . gep_Word ( <nl> - lhs . _rawValue , ( rhs & * strideof ( Pointee . self ) ) . _builtinWordValue ) ) <nl> + lhs . _rawValue , ( rhs & * MemoryLayout < Pointee > . stride ) . _builtinWordValue ) ) <nl> } <nl> <nl> @ _transparent <nl> public func - < Pointee > ( lhs : $ { Self } < Pointee > , rhs : $ { Self } < Pointee > ) - > Int { <nl> return <nl> Int ( Builtin . sub_Word ( Builtin . ptrtoint_Word ( lhs . _rawValue ) , <nl> Builtin . ptrtoint_Word ( rhs . _rawValue ) ) ) <nl> - / strideof ( Pointee . self ) <nl> + / MemoryLayout < Pointee > . stride <nl> } <nl> <nl> @ _transparent <nl> mmm a / stdlib / public / core / UnsafeRawPointer . swift . gyb <nl> ppp b / stdlib / public / core / UnsafeRawPointer . swift . gyb <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / - Precondition : The memory is uninitialized . <nl> / / / <nl> / / / - Postcondition : The memory is bound to ' T ' starting at ` self ` continuing <nl> - / / / through ` self ` + ` count ` * ` strideof ( T . self ) ` <nl> + / / / through ` self ` + ` count ` * ` MemoryLayout < T > . stride ` <nl> / / / <nl> / / / - Warning : Binding memory to a type is potentially undefined if the <nl> / / / memory is ever accessed as an unrelated type . <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> " UnsafeMutableRawPointer . initializeMemory : negative count " ) <nl> <nl> Builtin . bindMemory ( _rawValue , count . _builtinWordValue , type ) <nl> - var nextPtr = self + index & * strideof ( T . self ) <nl> + var nextPtr = self + index & * MemoryLayout < T > . stride <nl> for _ in 0 . . < count { <nl> Builtin . initialize ( value , nextPtr . _rawValue ) <nl> - nextPtr + = strideof ( T . self ) <nl> + nextPtr + = MemoryLayout < T > . stride <nl> } <nl> return UnsafeMutablePointer ( _rawValue ) <nl> } <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / - Precondition : ` count > = 0 ` <nl> / / / <nl> / / / - Precondition : The memory regions ` source . . < source + count ` and <nl> - / / / ` self . . < self + count * strideof ( T . self ) ` do not overlap . <nl> + / / / ` self . . < self + count * MemoryLayout < T > . stride ` do not overlap . <nl> / / / <nl> - / / / - Precondition : The memory at ` self . . < self + count * strideof ( T . self ) ` <nl> + / / / - Precondition : The memory at ` self . . < self + count * MemoryLayout < T > . stride ` <nl> / / / is uninitialized , and the ` T ` values at ` source . . < source + count ` are <nl> / / / initialized . <nl> / / / <nl> / / / - Precondition : The underlying pointer is properly aligned for <nl> / / / accessing ` T ` . <nl> / / / <nl> - / / / - Postcondition : The memory at ` self . . < self + count * strideof ( T . self ) ` <nl> + / / / - Postcondition : The memory at ` self . . < self + count * MemoryLayout < T > . stride ` <nl> / / / is bound to type ` T ` . <nl> / / / <nl> / / / - Postcondition : The ` T ` values at ` self . . < self + count * <nl> - / / / strideof ( T . self ) ` and ` source . . < source + count ` are initialized . <nl> + / / / MemoryLayout < T > . stride ` and ` source . . < source + count ` are initialized . <nl> @ discardableResult <nl> public func initializeMemory < T > ( <nl> as type : T . Type , from source : UnsafePointer < T > , count : Int <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> count > = 0 , <nl> " UnsafeMutableRawPointer . initializeMemory with negative count " ) <nl> _debugPrecondition ( <nl> - ( UnsafeRawPointer ( self + count * strideof ( T . self ) ) <nl> + ( UnsafeRawPointer ( self + count * MemoryLayout < T > . stride ) <nl> < = UnsafeRawPointer ( source ) ) <nl> | | UnsafeRawPointer ( source + count ) < = UnsafeRawPointer ( self ) , <nl> " UnsafeMutableRawPointer . initializeMemory overlapping range " ) <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / Returns an ` UnsafeMutablePointer < T > ` this memory . <nl> / / / <nl> / / / - Precondition : The memory at ` self . . < self + source . count * <nl> - / / / strideof ( T . self ) ` is uninitialized . <nl> + / / / MemoryLayout < T > . stride ` is uninitialized . <nl> / / / <nl> / / / - Postcondition : The memory at ` self . . < self + source . count * <nl> - / / / strideof ( T . self ) ` is bound to type ` T ` . <nl> + / / / MemoryLayout < T > . stride ` is bound to type ` T ` . <nl> / / / <nl> / / / - Postcondition : The ` T ` values at ` self . . < self + source . count * <nl> - / / / strideof ( T . self ) ` are initialized . <nl> + / / / MemoryLayout < T > . stride ` are initialized . <nl> / / / <nl> / / / TODO : Optimize where ` C ` is a ` ContiguousArrayBuffer ` . <nl> @ discardableResult <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / - Precondition : ` count > = 0 ` <nl> / / / <nl> / / / - Precondition : The memory at ` self . . < self + count * <nl> - / / / strideof ( T . self ) ` is uninitialized and the ` T ` values at <nl> + / / / MemoryLayout < T > . stride ` is uninitialized and the ` T ` values at <nl> / / / ` source . . < source + count ` are initialized . <nl> / / / <nl> / / / - Postcondition : The memory at ` self . . < self + count * <nl> - / / / strideof ( T . self ) ` is bound to type ` T ` . <nl> + / / / MemoryLayout < T > . stride ` is bound to type ` T ` . <nl> / / / <nl> / / / - Postcondition : The ` T ` values at ` self . . < self + count * <nl> - / / / strideof ( T . self ) ` are initialized and the memory at <nl> + / / / MemoryLayout < T > . stride ` are initialized and the memory at <nl> / / / ` source . . < source + count ` is uninitialized . <nl> @ discardableResult <nl> public func moveInitializeMemory < T > ( <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / such that ` T ` is layout compatible with ` U ` . <nl> public func load < T > ( fromByteOffset offset : Int = 0 , as type : T . Type ) - > T { <nl> _debugPrecondition ( 0 = = ( UInt ( bitPattern : self + offset ) <nl> - & ( UInt ( alignof ( T . self ) ) - 1 ) ) , <nl> + & ( UInt ( MemoryLayout < T > . alignment ) - 1 ) ) , <nl> " load from misaligned raw pointer " ) <nl> <nl> return Builtin . load ( ( self + offset ) . _rawValue ) <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> of value : T , toByteOffset offset : Int = 0 , as : T . Type <nl> ) { <nl> _debugPrecondition ( 0 = = ( UInt ( bitPattern : self + offset ) <nl> - & ( UInt ( alignof ( T . self ) ) - 1 ) ) , <nl> + & ( UInt ( MemoryLayout < T > . alignment ) - 1 ) ) , <nl> " storeBytes to misaligned raw pointer " ) <nl> <nl> var temp = value <nl> withUnsafeMutablePointer ( to : & temp ) { source in <nl> let rawSrc = UnsafeMutableRawPointer ( source ) . _rawValue <nl> - let zero : Int32 = 0 <nl> / / FIXME : to be replaced by _memcpy when conversions are implemented . <nl> Builtin . int_memcpy_RawPointer_RawPointer_Int64 ( <nl> - ( self + offset ) . _rawValue , rawSrc , UInt64 ( sizeof ( T . self ) ) . _value , <nl> - / * alignment : * / zero . _value , <nl> + ( self + offset ) . _rawValue , rawSrc , UInt64 ( MemoryLayout < T > . size ) . _value , <nl> + / * alignment : * / Int32 ( 0 ) . _value , <nl> / * volatile : * / false . _value ) <nl> } <nl> } <nl> public struct Unsafe $ { Mutable } RawPointer : Strideable , Hashable , _Pointer { <nl> / / / - Precondition : If the memory at ` self . . < self + count ` is bound to <nl> / / / a type ` U ` , then ` U ` is a trivial type , the underlying <nl> / / / pointers ` source ` and ` self ` are properly aligned for type <nl> - / / / ` U ` , and ` count ` is a multiple of ` strideof ( U . self ) ` . <nl> + / / / ` U ` , and ` count ` is a multiple of ` MemoryLayout < U > . stride ` . <nl> / / / <nl> / / / - Postcondition : The memory at ` self . . < self + count ` is <nl> / / / initialized to raw bytes . If the memory is bound to type ` U ` , <nl> mmm a / stdlib / public / core / VarArgs . swift <nl> ppp b / stdlib / public / core / VarArgs . swift <nl> public func getVaList ( _ args : [ CVarArg ] ) - > CVaListPointer { <nl> public func _encodeBitsAsWords < T : CVarArg > ( _ x : T ) - > [ Int ] { <nl> let result = [ Int ] ( <nl> repeating : 0 , <nl> - count : ( sizeof ( T . self ) + sizeof ( Int . self ) - 1 ) / sizeof ( Int . self ) ) <nl> + count : ( MemoryLayout < T > . size + MemoryLayout < Int > . size - 1 ) / MemoryLayout < Int > . size ) <nl> _sanityCheck ( result . count > 0 ) <nl> var tmp = x <nl> / / FIXME : use UnsafeMutablePointer . assign ( from : ) instead of memcpy . <nl> _memcpy ( dest : UnsafeMutablePointer ( result . _baseAddressIfContiguous ! ) , <nl> src : UnsafeMutablePointer ( Builtin . addressof ( & tmp ) ) , <nl> - size : UInt ( sizeof ( T . self ) ) ) <nl> + size : UInt ( MemoryLayout < T > . size ) ) <nl> return result <nl> } <nl> <nl> extension Int64 : CVarArg , _CVarArgAligned { <nl> / / / the value returned by ` _cVarArgEncoding ` . <nl> public var _cVarArgAlignment : Int { <nl> / / FIXME : alignof differs from the ABI alignment on some architectures <nl> - return alignofValue ( self ) <nl> + return MemoryLayout . _ofInstance ( self ) . alignment <nl> } <nl> } <nl> <nl> extension UInt64 : CVarArg , _CVarArgAligned { <nl> / / / the value returned by ` _cVarArgEncoding ` . <nl> public var _cVarArgAlignment : Int { <nl> / / FIXME : alignof differs from the ABI alignment on some architectures <nl> - return alignofValue ( self ) <nl> + return MemoryLayout . _ofInstance ( self ) . alignment <nl> } <nl> } <nl> <nl> extension Float : _CVarArgPassedAsDouble , _CVarArgAligned { <nl> / / / the value returned by ` _cVarArgEncoding ` . <nl> public var _cVarArgAlignment : Int { <nl> / / FIXME : alignof differs from the ABI alignment on some architectures <nl> - return alignofValue ( Double ( self ) ) <nl> + return MemoryLayout . _ofInstance ( Double ( self ) ) . alignment <nl> } <nl> } <nl> <nl> extension Double : _CVarArgPassedAsDouble , _CVarArgAligned { <nl> / / / the value returned by ` _cVarArgEncoding ` . <nl> public var _cVarArgAlignment : Int { <nl> / / FIXME : alignof differs from the ABI alignment on some architectures <nl> - return alignofValue ( self ) <nl> + return MemoryLayout . _ofInstance ( self ) . alignment <nl> } <nl> } <nl> <nl> final internal class _VaListBuilder { <nl> / / differs from ABI alignment on some architectures . <nl> # if arch ( arm ) & & ! os ( iOS ) <nl> if let arg = arg as ? _CVarArgAligned { <nl> - let alignmentInWords = arg . _cVarArgAlignment / sizeof ( Int . self ) <nl> + let alignmentInWords = arg . _cVarArgAlignment / MemoryLayout < Int > . size <nl> let misalignmentInWords = count % alignmentInWords <nl> if misalignmentInWords ! = 0 { <nl> let paddingInWords = alignmentInWords - misalignmentInWords <nl> final internal class _VaListBuilder { <nl> } <nl> <nl> func rawSizeAndAlignment ( _ wordCount : Int ) - > ( Builtin . Word , Builtin . Word ) { <nl> - return ( ( wordCount * strideof ( Int . self ) ) . _builtinWordValue , <nl> + return ( ( wordCount * MemoryLayout < Int > . stride ) . _builtinWordValue , <nl> requiredAlignmentInBytes . _builtinWordValue ) <nl> } <nl> <nl> final internal class _VaListBuilder { <nl> } <nl> <nl> / / FIXME : alignof differs from the ABI alignment on some architectures <nl> - let requiredAlignmentInBytes = alignof ( Double . self ) <nl> + let requiredAlignmentInBytes = MemoryLayout < Double > . alignment <nl> var count = 0 <nl> var allocated = 0 <nl> var storage : UnsafeMutablePointer < Int > ? = nil <nl> final internal class _VaListBuilder { <nl> <nl> struct Header { <nl> var gp_offset = CUnsignedInt ( 0 ) <nl> - var fp_offset = CUnsignedInt ( _x86_64CountGPRegisters * strideof ( Int . self ) ) <nl> + var fp_offset = CUnsignedInt ( _x86_64CountGPRegisters * MemoryLayout < Int > . stride ) <nl> var overflow_arg_area : UnsafeMutablePointer < Int > ? = nil <nl> var reg_save_area : UnsafeMutablePointer < Int > ? = nil <nl> } <nl> mmm a / stdlib / public / stubs / DispatchShims . mm <nl> ppp b / stdlib / public / stubs / DispatchShims . mm <nl> void SWIFT_DISPATCH_NOESCAPE ( ^ block ) ( long ) ) <nl> return dispatch_data_create ( buffer , size , cast ( queue ) , cast ( destructor ) ) ; <nl> } <nl> <nl> + unsigned int <nl> + swift : : _swift_dispatch_data_apply ( <nl> + __swift_shims_dispatch_data_t data , <nl> + __swift_shims_dispatch_data_applier SWIFT_DISPATCH_NOESCAPE applier ) <nl> + { <nl> + return dispatch_data_apply ( data , ^ bool ( dispatch_data_t data , size_t off , const void * loc , size_t size ) { <nl> + return applier ( data , off , loc , size ) ; <nl> + } ) ; <nl> + } <nl> mmm a / test / 1_stdlib / Character . swift <nl> ppp b / test / 1_stdlib / Character . swift <nl> CharacterTests . test ( " literal " ) { <nl> <nl> CharacterTests . test ( " sizeof " ) { <nl> / / FIXME : should be 8 . <nl> - / / < rdar : / / problem / 16754935 > sizeof ( Character . self ) is 9 , should be 8 <nl> + / / < rdar : / / problem / 16754935 > MemoryLayout < Character > . size is 9 , should be 8 <nl> <nl> - let size1 = sizeof ( Character . self ) <nl> + let size1 = MemoryLayout < Character > . size <nl> expectTrue ( size1 = = 8 | | size1 = = 9 ) <nl> <nl> var a : Character = " a " <nl> - let size2 = sizeofValue ( a ) <nl> + let size2 = MemoryLayout . _ofInstance ( a ) . size <nl> expectTrue ( size2 = = 8 | | size2 = = 9 ) <nl> <nl> expectEqual ( size1 , size2 ) <nl> mmm a / test / 1_stdlib / Dispatch . swift <nl> ppp b / test / 1_stdlib / Dispatch . swift <nl> DispatchAPI . test ( " dispatch_data_t enumeration " ) { <nl> _ = 1 <nl> } <nl> } <nl> + <nl> + DispatchAPI . test ( " dispatch_data_t deallocator " ) { <nl> + let q = DispatchQueue ( label : " dealloc queue " ) <nl> + var t = 0 <nl> + <nl> + autoreleasepool { <nl> + let size = 1024 <nl> + let p = UnsafeMutablePointer < UInt8 > . allocate ( capacity : size ) <nl> + let d = DispatchData ( bytesNoCopy : UnsafeBufferPointer ( start : p , count : size ) , deallocator : . custom ( q , { <nl> + t = 1 <nl> + } ) ) <nl> + } <nl> + <nl> + q . sync { <nl> + expectEqual ( 1 , t ) <nl> + } <nl> + } <nl> new file mode 100644 <nl> index 000000000000 . . a7ba3a60630c <nl> mmm / dev / null <nl> ppp b / test / 1_stdlib / DispatchDeprecationMacOS . swift <nl> <nl> + / / RUN : % swift - parse - target x86_64 - apple - macosx10 . 9 - verify - sdk % sdk % s <nl> + / / REQUIRES : OS = macosx <nl> + / / REQUIRES : objc_interop <nl> + <nl> + import Foundation <nl> + import Dispatch <nl> + <nl> + / / Don ' t warn because these APIs were deprecated in macOS 10 . 10 and the <nl> + / / minimum deployment target is 10 . 9 . <nl> + _ = DispatchQueue . GlobalQueuePriority . high / / no - warning <nl> + _ = DispatchQueue . GlobalQueuePriority . default / / no - warning <nl> + _ = DispatchQueue . GlobalQueuePriority . low / / no - warning <nl> + _ = DispatchQueue . GlobalQueuePriority . background / / no - warning <nl> + <nl> + _ = DispatchQueue . global ( priority : DispatchQueue . GlobalQueuePriority . background ) / / no - warning <nl> new file mode 100644 <nl> index 000000000000 . . e6756167d5d7 <nl> mmm / dev / null <nl> ppp b / test / 1_stdlib / DispatchDeprecationWatchOS . swift <nl> <nl> + / / RUN : % swift - parse - target i386 - apple - watchos2 . 0 - verify - sdk % sdk % s <nl> + / / REQUIRES : OS = watchos <nl> + / / REQUIRES : objc_interop <nl> + <nl> + import Foundation <nl> + import Dispatch <nl> + <nl> + / / These are deprecated on all versions of watchOS . <nl> + _ = DispatchQueue . GlobalQueuePriority . high / / expected - warning { { ' high ' is deprecated on watchOS : Use qos attributes instead } } <nl> + _ = DispatchQueue . GlobalQueuePriority . default / / expected - warning { { ' default ' is deprecated on watchOS : Use qos attributes instead } } <nl> + _ = DispatchQueue . GlobalQueuePriority . low / / expected - warning { { ' low ' is deprecated on watchOS : Use qos attributes instead } } <nl> + let b = DispatchQueue . GlobalQueuePriority . background / / expected - warning { { ' background ' is deprecated on watchOS : Use qos attributes instead } } <nl> + <nl> + _ = DispatchQueue . global ( priority : b ) / / expected - warning { { ' global ( priority : ) ' is deprecated on watchOS } } <nl> mmm a / test / 1_stdlib / Inputs / DictionaryKeyValueTypesObjC . swift <nl> ppp b / test / 1_stdlib / Inputs / DictionaryKeyValueTypesObjC . swift <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ a : NSArray , <nl> _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> _ useEnumerator : ( NSArray , NSFastEnumeration , ( AnyObject ) - > ( ) ) - > Void , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> let expectedContentsWithoutIdentity = <nl> _makeExpectedArrayContents ( expected ) <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkArrayFastEnumerationFromSwift ( <nl> _ expected : [ Int ] , <nl> _ a : NSArray , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkArrayFastEnumerationImpl ( <nl> expected , a , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkArrayFastEnumerationFromObjC ( <nl> _ expected : [ Int ] , <nl> _ a : NSArray , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkArrayFastEnumerationImpl ( <nl> expected , a , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ expected : [ Int ] , <nl> _ a : NSArray , <nl> maxFastEnumerationItems : Int , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkArrayFastEnumerationImpl ( <nl> expected , a , { a . objectEnumerator ( ) } , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ s : NSSet , <nl> _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> _ useEnumerator : ( NSSet , NSFastEnumeration , ( AnyObject ) - > ( ) ) - > Void , <nl> - _ convertMember : ( AnyObject ) - > Int <nl> + _ convertMember : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> let expectedContentsWithoutIdentity = <nl> _makeExpectedSetContents ( expected ) <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkSetFastEnumerationFromSwift ( <nl> _ expected : [ Int ] , <nl> _ s : NSSet , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertMember : ( AnyObject ) - > Int <nl> + _ convertMember : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkSetFastEnumerationImpl ( <nl> expected , s , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkSetFastEnumerationFromObjC ( <nl> _ expected : [ Int ] , <nl> _ s : NSSet , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertMember : ( AnyObject ) - > Int <nl> + _ convertMember : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkSetFastEnumerationImpl ( <nl> expected , s , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ expected : [ Int ] , <nl> _ s : NSSet , <nl> maxFastEnumerationItems : Int , <nl> - _ convertMember : ( AnyObject ) - > Int <nl> + _ convertMember : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkSetFastEnumerationImpl ( <nl> expected , s , { s . objectEnumerator ( ) } , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ d : NSDictionary , <nl> _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> _ useEnumerator : ( NSDictionary , NSFastEnumeration , ( AnyObjectTuple2 ) - > ( ) ) - > Void , <nl> - _ convertKey : ( AnyObject ) - > Int , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertKey : @ escaping ( AnyObject ) - > Int , <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> let expectedContentsWithoutIdentity = <nl> _makeExpectedDictionaryContents ( expected ) <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkDictionaryFastEnumerationFromSwift ( <nl> _ expected : [ ( Int , Int ) ] , <nl> _ d : NSDictionary , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertKey : ( AnyObject ) - > Int , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertKey : @ escaping ( AnyObject ) - > Int , <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkDictionaryFastEnumerationImpl ( <nl> expected , d , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> func checkDictionaryFastEnumerationFromObjC ( <nl> _ expected : [ ( Int , Int ) ] , <nl> _ d : NSDictionary , _ makeEnumerator : ( ) - > NSFastEnumeration , <nl> - _ convertKey : ( AnyObject ) - > Int , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertKey : @ escaping ( AnyObject ) - > Int , <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkDictionaryFastEnumerationImpl ( <nl> expected , d , makeEnumerator , <nl> typealias AnyObjectTuple2 = ( AnyObject , AnyObject ) <nl> _ expected : [ ( Int , Int ) ] , <nl> _ d : NSDictionary , <nl> maxFastEnumerationItems : Int , <nl> - _ convertKey : ( AnyObject ) - > Int , <nl> - _ convertValue : ( AnyObject ) - > Int <nl> + _ convertKey : @ escaping ( AnyObject ) - > Int , <nl> + _ convertValue : @ escaping ( AnyObject ) - > Int <nl> ) { <nl> _checkDictionaryFastEnumerationImpl ( <nl> expected , d , { d . keyEnumerator ( ) } , <nl> mmm a / test / 1_stdlib / Runtime . swift . gyb <nl> ppp b / test / 1_stdlib / Runtime . swift . gyb <nl> func computeCountLeadingZeroes ( _ x : Int64 ) - > Int64 { <nl> <nl> BitTwiddlingTestSuite . test ( " _pointerSize " ) { <nl> # if arch ( i386 ) | | arch ( arm ) <nl> - expectEqual ( 4 , sizeof ( Optional < AnyObject > . self ) ) <nl> + expectEqual ( 4 , MemoryLayout < Optional < AnyObject > > . size ) <nl> # elseif arch ( x86_64 ) | | arch ( arm64 ) | | arch ( powerpc64 ) | | arch ( powerpc64le ) <nl> - expectEqual ( 8 , sizeof ( Optional < AnyObject > . self ) ) <nl> + expectEqual ( 8 , MemoryLayout < Optional < AnyObject > > . size ) <nl> # else <nl> fatalError ( " implement " ) <nl> # endif <nl> mmm a / test / 1_stdlib / TestData . swift <nl> ppp b / test / 1_stdlib / TestData . swift <nl> class TestData : TestDataSuper { <nl> <nl> func testCopyBytes ( ) { <nl> let c = 10 <nl> - let underlyingBuffer = malloc ( c * strideof ( UInt16 . self ) ) ! <nl> + let underlyingBuffer = malloc ( c * MemoryLayout < UInt16 > . stride ) ! <nl> let u16Ptr = underlyingBuffer . bindMemory ( to : UInt16 . self , capacity : c ) <nl> let buffer = UnsafeMutableBufferPointer < UInt16 > ( start : u16Ptr , count : c ) <nl> <nl> buffer [ 0 ] = 0 <nl> buffer [ 1 ] = 0 <nl> <nl> - var data = Data ( capacity : c * strideof ( UInt16 . self ) ) <nl> - data . resetBytes ( in : 0 . . < c * strideof ( UInt16 . self ) ) <nl> + var data = Data ( capacity : c * MemoryLayout < UInt16 > . stride ) <nl> + data . resetBytes ( in : 0 . . < c * MemoryLayout < UInt16 > . stride ) <nl> data [ 0 ] = 0xFF <nl> data [ 1 ] = 0xFF <nl> let copiedCount = data . copyBytes ( to : buffer ) <nl> - expectEqual ( copiedCount , c * strideof ( UInt16 . self ) ) <nl> + expectEqual ( copiedCount , c * MemoryLayout < UInt16 > . stride ) <nl> <nl> expectEqual ( buffer [ 0 ] , 0xFFFF ) <nl> free ( underlyingBuffer ) <nl> class TestData : TestDataSuper { <nl> var data = a . withUnsafeBufferPointer { <nl> return Data ( buffer : $ 0 ) <nl> } <nl> - let expectedSize = strideof ( UInt8 . self ) * a . count <nl> + let expectedSize = MemoryLayout < UInt8 > . stride * a . count <nl> expectEqual ( expectedSize , data . count ) <nl> <nl> let underlyingBuffer = unsafeBitCast ( malloc ( expectedSize - 1 ) ! , to : UnsafeMutablePointer < UInt8 > . self ) <nl> class TestData : TestDataSuper { <nl> var data = a . withUnsafeBufferPointer { <nl> return Data ( buffer : $ 0 ) <nl> } <nl> - let expectedSize = strideof ( Int32 . self ) * a . count <nl> + let expectedSize = MemoryLayout < Int32 > . stride * a . count <nl> expectEqual ( expectedSize , data . count ) <nl> <nl> let underlyingBuffer = unsafeBitCast ( malloc ( expectedSize + 1 ) ! , to : UnsafeMutablePointer < UInt8 > . self ) <nl> class TestData : TestDataSuper { <nl> return Data ( buffer : $ 0 ) <nl> } <nl> <nl> - var expectedSize = strideof ( Int32 . self ) * a . count <nl> + var expectedSize = MemoryLayout < Int32 > . stride * a . count <nl> expectEqual ( expectedSize , data . count ) <nl> <nl> [ false , true ] . withUnsafeBufferPointer { <nl> data . append ( $ 0 ) <nl> } <nl> <nl> - expectedSize + = strideof ( Bool . self ) * 2 <nl> + expectedSize + = MemoryLayout < Bool > . stride * 2 <nl> expectEqual ( expectedSize , data . count ) <nl> <nl> let underlyingBuffer = unsafeBitCast ( malloc ( expectedSize ) ! , to : UnsafeMutablePointer < UInt8 > . self ) <nl> class TestData : TestDataSuper { <nl> return Data ( buffer : $ 0 ) <nl> } <nl> <nl> - expectEqual ( data . count , strideof ( MyStruct . self ) * 3 ) <nl> + expectEqual ( data . count , MemoryLayout < MyStruct > . stride * 3 ) <nl> <nl> <nl> / / append <nl> class TestData : TestDataSuper { <nl> data . append ( $ 0 ) <nl> } <nl> <nl> - expectEqual ( data . count , strideof ( MyStruct . self ) * 6 ) <nl> + expectEqual ( data . count , MemoryLayout < MyStruct > . stride * 6 ) <nl> <nl> / / copyBytes <nl> do { <nl> / / equal size <nl> - let underlyingBuffer = malloc ( 6 * strideof ( MyStruct . self ) ) ! <nl> + let underlyingBuffer = malloc ( 6 * MemoryLayout < MyStruct > . stride ) ! <nl> defer { free ( underlyingBuffer ) } <nl> <nl> let ptr = underlyingBuffer . bindMemory ( to : MyStruct . self , capacity : 6 ) <nl> let buffer = UnsafeMutableBufferPointer < MyStruct > ( start : ptr , count : 6 ) <nl> <nl> let byteCount = data . copyBytes ( to : buffer ) <nl> - expectEqual ( 6 * strideof ( MyStruct . self ) , byteCount ) <nl> + expectEqual ( 6 * MemoryLayout < MyStruct > . stride , byteCount ) <nl> } <nl> <nl> do { <nl> / / undersized <nl> - let underlyingBuffer = malloc ( 3 * strideof ( MyStruct . self ) ) ! <nl> + let underlyingBuffer = malloc ( 3 * MemoryLayout < MyStruct > . stride ) ! <nl> defer { free ( underlyingBuffer ) } <nl> <nl> let ptr = underlyingBuffer . bindMemory ( to : MyStruct . self , capacity : 3 ) <nl> let buffer = UnsafeMutableBufferPointer < MyStruct > ( start : ptr , count : 3 ) <nl> <nl> let byteCount = data . copyBytes ( to : buffer ) <nl> - expectEqual ( 3 * strideof ( MyStruct . self ) , byteCount ) <nl> + expectEqual ( 3 * MemoryLayout < MyStruct > . stride , byteCount ) <nl> } <nl> <nl> do { <nl> / / oversized <nl> - let underlyingBuffer = malloc ( 12 * strideof ( MyStruct . self ) ) ! <nl> + let underlyingBuffer = malloc ( 12 * MemoryLayout < MyStruct > . stride ) ! <nl> defer { free ( underlyingBuffer ) } <nl> <nl> let ptr = underlyingBuffer . bindMemory ( to : MyStruct . self , capacity : 6 ) <nl> let buffer = UnsafeMutableBufferPointer < MyStruct > ( start : ptr , count : 6 ) <nl> <nl> let byteCount = data . copyBytes ( to : buffer ) <nl> - expectEqual ( 6 * strideof ( MyStruct . self ) , byteCount ) <nl> + expectEqual ( 6 * MemoryLayout < MyStruct > . stride , byteCount ) <nl> } <nl> } <nl> <nl> mmm a / test / 1_stdlib / UnsafePointer . swift . gyb <nl> ppp b / test / 1_stdlib / UnsafePointer . swift . gyb <nl> class Missile { <nl> func checkPointerCorrectness ( _ check : Check , <nl> _ withMissiles : Bool = false , <nl> _ f : ( UnsafeMutablePointer < Missile > ) - > <nl> - ( UnsafeMutablePointer < Missile > , count : Int ) - > Void ) { <nl> + ( UnsafeMutablePointer < Missile > , _ count : Int ) - > Void ) { <nl> let ptr = UnsafeMutablePointer < Missile > . allocate ( capacity : 4 ) <nl> switch check { <nl> case . RightOverlap : <nl> func checkPointerCorrectness ( _ check : Check , <nl> if withMissiles { <nl> ( ptr + 2 ) . initialize ( to : Missile ( 3 ) ) <nl> } <nl> - f ( ptr + 1 ) ( ptr , count : 2 ) <nl> + f ( ptr + 1 ) ( ptr , 2 ) <nl> expectEqual ( 1 , ptr [ 1 ] . number ) <nl> expectEqual ( 2 , ptr [ 2 ] . number ) <nl> case . LeftOverlap : <nl> func checkPointerCorrectness ( _ check : Check , <nl> } <nl> ( ptr + 1 ) . initialize ( to : Missile ( 2 ) ) <nl> ( ptr + 2 ) . initialize ( to : Missile ( 3 ) ) <nl> - f ( ptr ) ( ptr + 1 , count : 2 ) <nl> + f ( ptr ) ( ptr + 1 , 2 ) <nl> expectEqual ( 2 , ptr [ 0 ] . number ) <nl> expectEqual ( 3 , ptr [ 1 ] . number ) <nl> case . Disjoint : <nl> func checkPointerCorrectness ( _ check : Check , <nl> } <nl> ( ptr + 2 ) . initialize ( to : Missile ( 2 ) ) <nl> ( ptr + 3 ) . initialize ( to : Missile ( 3 ) ) <nl> - f ( ptr ) ( ptr + 2 , count : 2 ) <nl> + f ( ptr ) ( ptr + 2 , 2 ) <nl> expectEqual ( 2 , ptr [ 0 ] . number ) <nl> expectEqual ( 3 , ptr [ 1 ] . number ) <nl> / / backwards <nl> func checkPointerCorrectness ( _ check : Check , <nl> ( ptr2 + 2 ) . initialize ( to : Missile ( 2 ) ) <nl> ( ptr2 + 3 ) . initialize ( to : Missile ( 3 ) ) <nl> } <nl> - f ( ptr2 + 2 ) ( ptr2 , count : 2 ) <nl> + f ( ptr2 + 2 ) ( ptr2 , 2 ) <nl> expectEqual ( 0 , ptr2 [ 2 ] . number ) <nl> expectEqual ( 1 , ptr2 [ 3 ] . number ) <nl> } <nl> } <nl> <nl> func checkPtr ( <nl> - _ f : ( ( UnsafeMutablePointer < Missile > ) - > ( UnsafeMutablePointer < Missile > , count : Int ) - > Void ) , <nl> + _ f : ( ( UnsafeMutablePointer < Missile > ) - > ( UnsafeMutablePointer < Missile > , _ count : Int ) - > Void ) , <nl> _ m : Bool <nl> ) - > ( Check ) - > Void { <nl> return { checkPointerCorrectness ( $ 0 , m , f ) } <nl> } <nl> <nl> func checkPtr ( <nl> - _ f : ( ( UnsafeMutablePointer < Missile > ) - > ( UnsafePointer < Missile > , count : Int ) - > Void ) , <nl> + _ f : ( ( UnsafeMutablePointer < Missile > ) - > ( UnsafePointer < Missile > , _ count : Int ) - > Void ) , <nl> _ m : Bool <nl> ) - > ( Check ) - > Void { <nl> return { <nl> checkPointerCorrectness ( $ 0 , m ) { destPtr in <nl> - return { f ( destPtr ) ( UnsafeMutablePointer ( $ 0 ) , count : $ 1 ) } <nl> + return { f ( destPtr ) ( UnsafeMutablePointer ( $ 0 ) , $ 1 ) } <nl> } <nl> } <nl> } <nl> mmm a / test / 1_stdlib / UnsafeRawPointer . swift <nl> ppp b / test / 1_stdlib / UnsafeRawPointer . swift <nl> class Missile { <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " initializeMemory " ) { <nl> Missile . missilesLaunched = 0 <nl> do { <nl> - let sizeInBytes = 3 * strideof ( Missile . self ) <nl> + let sizeInBytes = 3 * MemoryLayout < Missile > . stride <nl> var p1 = UnsafeMutableRawPointer . allocate ( <nl> - bytes : sizeInBytes , alignedTo : alignof ( Missile . self ) ) <nl> + bytes : sizeInBytes , alignedTo : MemoryLayout < Missile > . alignment ) <nl> defer { <nl> - p1 . deallocate ( bytes : sizeInBytes , alignedTo : alignof ( Missile . self ) ) <nl> + p1 . deallocate ( bytes : sizeInBytes , alignedTo : MemoryLayout < Missile > . alignment ) <nl> } <nl> var ptrM = p1 . initializeMemory ( as : Missile . self , to : Missile ( 1 ) ) <nl> p1 . initializeMemory ( as : Missile . self , at : 1 , count : 2 , to : Missile ( 2 ) ) <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " initializeMemory " ) { <nl> expectEqual ( 2 , ptrM [ 2 ] . number ) <nl> <nl> var p2 = UnsafeMutableRawPointer . allocate ( <nl> - bytes : sizeInBytes , alignedTo : alignof ( Missile . self ) ) <nl> + bytes : sizeInBytes , alignedTo : MemoryLayout < Missile > . alignment ) <nl> defer { <nl> - p2 . deallocate ( bytes : sizeInBytes , alignedTo : alignof ( Missile . self ) ) <nl> + p2 . deallocate ( bytes : sizeInBytes , alignedTo : MemoryLayout < Missile > . alignment ) <nl> } <nl> let ptrM2 = p2 . moveInitializeMemory ( as : Missile . self , from : ptrM , count : 3 ) <nl> defer { <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " initializeMemory " ) { <nl> } <nl> <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " bindMemory " ) { <nl> - let sizeInBytes = 3 * strideof ( Int . self ) <nl> + let sizeInBytes = 3 * MemoryLayout < Int > . stride <nl> var p1 = UnsafeMutableRawPointer . allocate ( <nl> - bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> defer { <nl> - p1 . deallocate ( bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + p1 . deallocate ( bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> } <nl> let ptrI = p1 . bindMemory ( to : Int . self , capacity : 3 ) <nl> ptrI . initialize ( from : 1 . . . 3 ) <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " bindMemory " ) { <nl> } <nl> <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " load / store " ) { <nl> - let sizeInBytes = 3 * strideof ( Int . self ) <nl> + let sizeInBytes = 3 * MemoryLayout < Int > . stride <nl> var p1 = UnsafeMutableRawPointer . allocate ( <nl> - bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> defer { <nl> - p1 . deallocate ( bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + p1 . deallocate ( bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> } <nl> let ptrI = p1 . initializeMemory ( as : Int . self , from : 1 . . . 3 ) <nl> defer { <nl> ptrI . deinitialize ( count : 3 ) <nl> } <nl> expectEqual ( 1 , p1 . load ( as : Int . self ) ) <nl> - expectEqual ( 2 , p1 . load ( fromByteOffset : strideof ( Int . self ) , as : Int . self ) ) <nl> - expectEqual ( 3 , p1 . load ( fromByteOffset : 2 * strideof ( Int . self ) , as : Int . self ) ) <nl> + expectEqual ( 2 , p1 . load ( fromByteOffset : MemoryLayout < Int > . stride , as : Int . self ) ) <nl> + expectEqual ( 3 , p1 . load ( fromByteOffset : 2 * MemoryLayout < Int > . stride , as : Int . self ) ) <nl> p1 . storeBytes ( of : 4 , as : Int . self ) <nl> - p1 . storeBytes ( of : 5 , toByteOffset : strideof ( Int . self ) , as : Int . self ) <nl> - p1 . storeBytes ( of : 6 , toByteOffset : 2 * strideof ( Int . self ) , as : Int . self ) <nl> + p1 . storeBytes ( of : 5 , toByteOffset : MemoryLayout < Int > . stride , as : Int . self ) <nl> + p1 . storeBytes ( of : 6 , toByteOffset : 2 * MemoryLayout < Int > . stride , as : Int . self ) <nl> expectEqual ( 4 , p1 . load ( as : Int . self ) ) <nl> - expectEqual ( 5 , p1 . load ( fromByteOffset : strideof ( Int . self ) , as : Int . self ) ) <nl> - expectEqual ( 6 , p1 . load ( fromByteOffset : 2 * strideof ( Int . self ) , as : Int . self ) ) <nl> + expectEqual ( 5 , p1 . load ( fromByteOffset : MemoryLayout < Int > . stride , as : Int . self ) ) <nl> + expectEqual ( 6 , p1 . load ( fromByteOffset : 2 * MemoryLayout < Int > . stride , as : Int . self ) ) <nl> } <nl> <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " copyBytes " ) { <nl> - let sizeInBytes = 4 * strideof ( Int . self ) <nl> + let sizeInBytes = 4 * MemoryLayout < Int > . stride <nl> var rawPtr = UnsafeMutableRawPointer . allocate ( <nl> - bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> defer { <nl> - rawPtr . deallocate ( bytes : sizeInBytes , alignedTo : alignof ( Int . self ) ) <nl> + rawPtr . deallocate ( bytes : sizeInBytes , alignedTo : MemoryLayout < Int > . alignment ) <nl> } <nl> let ptrI = rawPtr . initializeMemory ( as : Int . self , count : 4 , to : 42 ) <nl> defer { <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " copyBytes " ) { <nl> / / Right overlap <nl> ptrI [ 0 ] = 1 <nl> ptrI [ 1 ] = 2 <nl> - ( rawPtr + strideof ( Int . self ) ) . copyBytes ( <nl> - from : roPtr , count : 2 * strideof ( Int . self ) ) <nl> + ( rawPtr + MemoryLayout < Int > . stride ) . copyBytes ( <nl> + from : roPtr , count : 2 * MemoryLayout < Int > . stride ) <nl> expectEqual ( 1 , ptrI [ 1 ] ) <nl> expectEqual ( 2 , ptrI [ 2 ] ) <nl> <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " copyBytes " ) { <nl> ptrI [ 1 ] = 2 <nl> ptrI [ 2 ] = 3 <nl> rawPtr . copyBytes ( <nl> - from : roPtr + strideof ( Int . self ) , count : 2 * strideof ( Int . self ) ) <nl> + from : roPtr + MemoryLayout < Int > . stride , count : 2 * MemoryLayout < Int > . stride ) <nl> expectEqual ( 2 , ptrI [ 0 ] ) <nl> expectEqual ( 3 , ptrI [ 1 ] ) <nl> <nl> UnsafeMutableRawPointerExtraTestSuite . test ( " copyBytes " ) { <nl> ptrI [ 2 ] = 2 <nl> ptrI [ 3 ] = 3 <nl> rawPtr . copyBytes ( <nl> - from : roPtr + 2 * strideof ( Int . self ) , count : 2 * strideof ( Int . self ) ) <nl> + from : roPtr + 2 * MemoryLayout < Int > . stride , count : 2 * MemoryLayout < Int > . stride ) <nl> expectEqual ( 2 , ptrI [ 0 ] ) <nl> expectEqual ( 3 , ptrI [ 1 ] ) <nl> <nl> / / Backwards <nl> ptrI [ 0 ] = 0 <nl> ptrI [ 1 ] = 1 <nl> - ( rawPtr + 2 * strideof ( Int . self ) ) . copyBytes ( <nl> - from : roPtr , count : 2 * strideof ( Int . self ) ) <nl> + ( rawPtr + 2 * MemoryLayout < Int > . stride ) . copyBytes ( <nl> + from : roPtr , count : 2 * MemoryLayout < Int > . stride ) <nl> expectEqual ( 0 , ptrI [ 2 ] ) <nl> expectEqual ( 1 , ptrI [ 3 ] ) <nl> } <nl> enum Check { <nl> } <nl> <nl> func checkRawPointerCorrectness ( _ check : Check , <nl> - _ f : ( UnsafeMutableRawPointer ) - > ( as : Int . Type , from : UnsafeMutablePointer < Int > , count : Int ) - > UnsafeMutablePointer < Int > ) { <nl> + _ f : ( UnsafeMutableRawPointer ) - > ( _ as : Int . Type , _ from : UnsafeMutablePointer < Int > , _ count : Int ) - > UnsafeMutablePointer < Int > ) { <nl> let ptr = UnsafeMutablePointer < Int > . allocate ( capacity : 4 ) <nl> switch check { <nl> case . RightOverlap : <nl> ptr . initialize ( to : 1 ) <nl> ( ptr + 1 ) . initialize ( to : 2 ) <nl> - _ = f ( UnsafeMutableRawPointer ( ptr + 1 ) ) ( as : Int . self , from : ptr , count : 2 ) <nl> + _ = f ( UnsafeMutableRawPointer ( ptr + 1 ) ) ( Int . self , ptr , 2 ) <nl> expectEqual ( 1 , ptr [ 1 ] ) <nl> expectEqual ( 2 , ptr [ 2 ] ) <nl> case . LeftOverlap : <nl> ( ptr + 1 ) . initialize ( to : 2 ) <nl> ( ptr + 2 ) . initialize ( to : 3 ) <nl> - _ = f ( UnsafeMutableRawPointer ( ptr ) ) ( as : Int . self , from : ptr + 1 , count : 2 ) <nl> + _ = f ( UnsafeMutableRawPointer ( ptr ) ) ( Int . self , ptr + 1 , 2 ) <nl> expectEqual ( 2 , ptr [ 0 ] ) <nl> expectEqual ( 3 , ptr [ 1 ] ) <nl> case . Disjoint : <nl> ( ptr + 2 ) . initialize ( to : 2 ) <nl> ( ptr + 3 ) . initialize ( to : 3 ) <nl> - _ = f ( UnsafeMutableRawPointer ( ptr ) ) ( as : Int . self , from : ptr + 2 , count : 2 ) <nl> + _ = f ( UnsafeMutableRawPointer ( ptr ) ) ( Int . self , ptr + 2 , 2 ) <nl> expectEqual ( 2 , ptr [ 0 ] ) <nl> expectEqual ( 3 , ptr [ 1 ] ) <nl> / / backwards <nl> let ptr2 = UnsafeMutablePointer < Int > . allocate ( capacity : 4 ) <nl> ptr2 . initialize ( to : 0 ) <nl> ( ptr2 + 1 ) . initialize ( to : 1 ) <nl> - _ = f ( UnsafeMutableRawPointer ( ptr2 + 2 ) ) ( as : Int . self , from : ptr2 , count : 2 ) <nl> + _ = f ( UnsafeMutableRawPointer ( ptr2 + 2 ) ) ( Int . self , ptr2 , 2 ) <nl> expectEqual ( 0 , ptr2 [ 2 ] ) <nl> expectEqual ( 1 , ptr2 [ 3 ] ) <nl> } <nl> func checkRawPointerCorrectness ( _ check : Check , <nl> <nl> func checkPtr ( <nl> _ f : ( ( UnsafeMutableRawPointer ) <nl> - - > ( as : Int . Type , from : UnsafeMutablePointer < Int > , count : Int ) <nl> + - > ( _ as : Int . Type , _ from : UnsafeMutablePointer < Int > , _ count : Int ) <nl> - > UnsafeMutablePointer < Int > ) <nl> ) - > ( Check ) - > Void { <nl> return { checkRawPointerCorrectness ( $ 0 , f ) } <nl> func checkPtr ( <nl> <nl> func checkPtr ( <nl> _ f : ( ( UnsafeMutableRawPointer ) <nl> - - > ( as : Int . Type , from : UnsafePointer < Int > , count : Int ) <nl> + - > ( _ as : Int . Type , _ from : UnsafePointer < Int > , _ count : Int ) <nl> - > UnsafeMutablePointer < Int > ) <nl> ) - > ( Check ) - > Void { <nl> return { <nl> checkRawPointerCorrectness ( $ 0 ) { destPtr in <nl> - return { f ( destPtr ) ( as : $ 0 , from : UnsafeMutablePointer ( $ 1 ) , count : $ 2 ) } <nl> + return { f ( destPtr ) ( $ 0 , UnsafeMutablePointer ( $ 1 ) , $ 2 ) } <nl> } <nl> } <nl> } <nl> mmm a / test / 1_stdlib / simd . swift . gyb <nl> ppp b / test / 1_stdlib / simd . swift . gyb <nl> var simdTestSuite = TestSuite ( " simd " ) <nl> <nl> simdTestSuite . test ( " sizes " ) { <nl> / / C interop requires that vector be the right size . <nl> - expectEqual ( 8 , sizeof ( float2 . self ) ) <nl> - expectEqual ( 16 , sizeof ( float3 . self ) ) <nl> - expectEqual ( 16 , sizeof ( float4 . self ) ) <nl> - expectEqual ( 8 , sizeof ( int2 . self ) ) <nl> - expectEqual ( 16 , sizeof ( int3 . self ) ) <nl> - expectEqual ( 16 , sizeof ( int4 . self ) ) <nl> - expectEqual ( 16 , sizeof ( double2 . self ) ) <nl> - expectEqual ( 32 , sizeof ( double3 . self ) ) <nl> - expectEqual ( 32 , sizeof ( double4 . self ) ) <nl> - <nl> - expectEqual ( 16 , sizeof ( float2x2 . self ) ) <nl> - expectEqual ( 32 , sizeof ( float2x3 . self ) ) <nl> - expectEqual ( 32 , sizeof ( float2x4 . self ) ) <nl> - expectEqual ( 24 , sizeof ( float3x2 . self ) ) <nl> - expectEqual ( 48 , sizeof ( float3x3 . self ) ) <nl> - expectEqual ( 48 , sizeof ( float3x4 . self ) ) <nl> - expectEqual ( 32 , sizeof ( float4x2 . self ) ) <nl> - expectEqual ( 64 , sizeof ( float4x3 . self ) ) <nl> - expectEqual ( 64 , sizeof ( float4x4 . self ) ) <nl> - <nl> - expectEqual ( 32 , sizeof ( double2x2 . self ) ) <nl> - expectEqual ( 64 , sizeof ( double2x3 . self ) ) <nl> - expectEqual ( 64 , sizeof ( double2x4 . self ) ) <nl> - expectEqual ( 48 , sizeof ( double3x2 . self ) ) <nl> - expectEqual ( 96 , sizeof ( double3x3 . self ) ) <nl> - expectEqual ( 96 , sizeof ( double3x4 . self ) ) <nl> - expectEqual ( 64 , sizeof ( double4x2 . self ) ) <nl> - expectEqual ( 128 , sizeof ( double4x3 . self ) ) <nl> - expectEqual ( 128 , sizeof ( double4x4 . self ) ) <nl> + expectEqual ( 8 , MemoryLayout < float2 > . size ) <nl> + expectEqual ( 16 , MemoryLayout < float3 > . size ) <nl> + expectEqual ( 16 , MemoryLayout < float4 > . size ) <nl> + expectEqual ( 8 , MemoryLayout < int2 > . size ) <nl> + expectEqual ( 16 , MemoryLayout < int3 > . size ) <nl> + expectEqual ( 16 , MemoryLayout < int4 > . size ) <nl> + expectEqual ( 16 , MemoryLayout < double2 > . size ) <nl> + expectEqual ( 32 , MemoryLayout < double3 > . size ) <nl> + expectEqual ( 32 , MemoryLayout < double4 > . size ) <nl> + <nl> + expectEqual ( 16 , MemoryLayout < float2x2 > . size ) <nl> + expectEqual ( 32 , MemoryLayout < float2x3 > . size ) <nl> + expectEqual ( 32 , MemoryLayout < float2x4 > . size ) <nl> + expectEqual ( 24 , MemoryLayout < float3x2 > . size ) <nl> + expectEqual ( 48 , MemoryLayout < float3x3 > . size ) <nl> + expectEqual ( 48 , MemoryLayout < float3x4 > . size ) <nl> + expectEqual ( 32 , MemoryLayout < float4x2 > . size ) <nl> + expectEqual ( 64 , MemoryLayout < float4x3 > . size ) <nl> + expectEqual ( 64 , MemoryLayout < float4x4 > . size ) <nl> + <nl> + expectEqual ( 32 , MemoryLayout < double2x2 > . size ) <nl> + expectEqual ( 64 , MemoryLayout < double2x3 > . size ) <nl> + expectEqual ( 64 , MemoryLayout < double2x4 > . size ) <nl> + expectEqual ( 48 , MemoryLayout < double3x2 > . size ) <nl> + expectEqual ( 96 , MemoryLayout < double3x3 > . size ) <nl> + expectEqual ( 96 , MemoryLayout < double3x4 > . size ) <nl> + expectEqual ( 64 , MemoryLayout < double4x2 > . size ) <nl> + expectEqual ( 128 , MemoryLayout < double4x3 > . size ) <nl> + expectEqual ( 128 , MemoryLayout < double4x4 > . size ) <nl> } <nl> <nl> simdTestSuite . test ( " vector init " ) { <nl> mmm a / test / ClangModules / Dispatch_test . swift <nl> ppp b / test / ClangModules / Dispatch_test . swift <nl> extension dispatch_queue_t { } / / expected - error { { ' dispatch_queue_t ' is unavaila <nl> <nl> / / Make sure you can extend a dispatch type via its common name . <nl> extension DispatchQueue { <nl> - func myAsync ( _ block : ( ) - > Void ) { <nl> + func myAsync ( _ block : @ escaping ( ) - > Void ) { <nl> async ( execute : block ) <nl> } <nl> } <nl> mmm a / test / ClangModules / blocks_parse . swift <nl> ppp b / test / ClangModules / blocks_parse . swift <nl> func testNoEscape ( f : @ noescape @ convention ( block ) ( ) - > Void , nsStr : NSString , <nl> / / rdar : / / problem / 19818617 <nl> nsStr . enumerateLines ( fStr ) / / okay due to @ noescape <nl> <nl> - _ = nsStr . enumerateLines as Int / / expected - error { { cannot convert value of type ' ( @ noescape ( String ) - > Void ) - > Void ' to type ' Int ' in coercion } } <nl> + _ = nsStr . enumerateLines as Int / / expected - error { { cannot convert value of type ' ( ( String ) - > Void ) - > Void ' to type ' Int ' in coercion } } <nl> } <nl> <nl> func checkTypeImpl < T > ( _ a : inout T , _ : T . Type ) { } <nl> mmm a / test / ClangModules / objc_bridging_custom . swift <nl> ppp b / test / ClangModules / objc_bridging_custom . swift <nl> class Base : NSObject { <nl> } <nl> <nl> class Sub : Base { <nl> - / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( a : Refrigerator , b : Refrigerator ) - > Refrigerator ? ' } } { { 25 - 40 = Refrigerator } } { { 45 - 61 = Refrigerator ? } } { { 66 - 81 = Refrigerator } } <nl> + / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( Refrigerator , Refrigerator ) - > Refrigerator ? ' } } { { 25 - 40 = Refrigerator } } { { 45 - 61 = Refrigerator ? } } { { 66 - 81 = Refrigerator } } <nl> override func test ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator { / / expected - error { { method does not override any method from its superclass } } { { none } } <nl> return a <nl> } <nl> - / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( a : ManufacturerInfo < NSString > , b : ManufacturerInfo < NSString > ) - > ManufacturerInfo < NSString > ? ' } } { { 32 - 62 = ManufacturerInfo < NSString > } } { { 67 - 98 = ManufacturerInfo < NSString > ? } } { { 103 - 133 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( ManufacturerInfo < NSString > , ManufacturerInfo < NSString > ) - > ManufacturerInfo < NSString > ? ' } } { { 32 - 62 = ManufacturerInfo < NSString > } } { { 67 - 98 = ManufacturerInfo < NSString > ? } } { { 103 - 133 = ManufacturerInfo < NSString > } } <nl> override func testGeneric ( a : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { / / expected - error { { method does not override any method from its superclass } } { { none } } <nl> return a <nl> } <nl> class Sub : Base { <nl> <nl> override func testUnmigrated ( a : NSObject , b : NSObject , c : NSObject ) { } / / expected - error { { method does not override any method from its superclass } } { { none } } <nl> <nl> - / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( a : RuncingMode , b : Refrigerator ) - > ( ) ' } } { { 53 - 68 = Refrigerator } } <nl> + / / expected - note @ + 1 { { type does not match superclass instance method with type ' ( RuncingMode , Refrigerator ) - > ( ) ' } } { { 53 - 68 = Refrigerator } } <nl> override func testPartialMigrated ( a : NSObject , b : APPRefrigerator ) { } / / expected - error { { method does not override any method from its superclass } } { { none } } <nl> <nl> - / / expected - note @ + 1 { { type does not match superclass subscript with type ' ( a : Refrigerator , b : Refrigerator ) - > Refrigerator ? ' } } { { 27 - 42 = Refrigerator } } { { 49 - 65 = Refrigerator ? } } { { 70 - 85 = Refrigerator } } <nl> + / / expected - note @ + 1 { { type does not match superclass subscript with type ' ( Refrigerator , Refrigerator ) - > Refrigerator ? ' } } { { 27 - 42 = Refrigerator } } { { 49 - 65 = Refrigerator ? } } { { 70 - 85 = Refrigerator } } <nl> override subscript ( a a : APPRefrigerator , b b : APPRefrigerator ? ) - > APPRefrigerator { / / expected - error { { subscript does not override any subscript from its superclass } } { { none } } <nl> return a <nl> } <nl> - / / expected - note @ + 1 { { type does not match superclass subscript with type ' ( generic : ManufacturerInfo < NSString > , b : ManufacturerInfo < NSString > ) - > ManufacturerInfo < NSString > ? ' } } { { 33 - 63 = ManufacturerInfo < NSString > } } { { 70 - 101 = ManufacturerInfo < NSString > ? } } { { 106 - 136 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { type does not match superclass subscript with type ' ( ManufacturerInfo < NSString > , ManufacturerInfo < NSString > ) - > ManufacturerInfo < NSString > ? ' } } { { 33 - 63 = ManufacturerInfo < NSString > } } { { 70 - 101 = ManufacturerInfo < NSString > ? } } { { 106 - 136 = ManufacturerInfo < NSString > } } <nl> override subscript ( generic a : APPManufacturerInfo < AnyObject > , b b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { / / expected - error { { subscript does not override any subscript from its superclass } } { { none } } <nl> return a <nl> } <nl> protocol TestProto { <nl> } <nl> <nl> class TestProtoImpl : NSObject , TestProto { / / expected - error { { type ' TestProtoImpl ' does not conform to protocol ' TestProto ' } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator ' } } { { 16 - 31 = Refrigerator } } { { 36 - 52 = Refrigerator ? } } { { 57 - 72 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPRefrigerator , APPRefrigerator ? ) - > APPRefrigerator ' } } { { 16 - 31 = Refrigerator } } { { 36 - 52 = Refrigerator ? } } { { 57 - 72 = Refrigerator } } <nl> func test ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator { <nl> return a <nl> } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 23 - 53 = ManufacturerInfo < NSString > } } { { 58 - 89 = ManufacturerInfo < NSString > ? } } { { 94 - 124 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPManufacturerInfo < AnyObject > , APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 23 - 53 = ManufacturerInfo < NSString > } } { { 58 - 89 = ManufacturerInfo < NSString > ? } } { { 94 - 124 = ManufacturerInfo < NSString > } } <nl> func testGeneric ( a : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { <nl> return a <nl> } <nl> / / expected - note @ + 1 { { candidate has non - matching type ' ( inout APPRefrigerator ) - > ( ) ' } } { { 27 - 48 = inout Refrigerator } } <nl> class func testInout ( _ : inout APPRefrigerator ) { } <nl> <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : NSObject , b : NSObject , c : NSObject ) - > ( ) ' } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( NSObject , NSObject , NSObject ) - > ( ) ' } } <nl> func testUnmigrated ( a : NSObject , b : NSObject , c : NSObject ) { } <nl> <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : NSObject , b : APPRefrigerator ) - > ( ) ' } } { { 44 - 59 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( NSObject , APPRefrigerator ) - > ( ) ' } } { { 44 - 59 = Refrigerator } } <nl> func testPartialMigrated ( a : NSObject , b : APPRefrigerator ) { } <nl> <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator ' } } { { 18 - 33 = Refrigerator } } { { 40 - 56 = Refrigerator ? } } { { 61 - 76 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPRefrigerator , APPRefrigerator ? ) - > APPRefrigerator ' } } { { 18 - 33 = Refrigerator } } { { 40 - 56 = Refrigerator ? } } { { 61 - 76 = Refrigerator } } <nl> subscript ( a a : APPRefrigerator , b b : APPRefrigerator ? ) - > APPRefrigerator { <nl> return a <nl> } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( generic : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 24 - 54 = ManufacturerInfo < NSString > } } { { 61 - 92 = ManufacturerInfo < NSString > ? } } { { 97 - 127 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPManufacturerInfo < AnyObject > , APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 24 - 54 = ManufacturerInfo < NSString > } } { { 61 - 92 = ManufacturerInfo < NSString > ? } } { { 97 - 127 = ManufacturerInfo < NSString > } } <nl> subscript ( generic a : APPManufacturerInfo < AnyObject > , b b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { <nl> return a <nl> } <nl> class TestProtoImpl : NSObject , TestProto { / / expected - error { { type ' TestProtoI <nl> <nl> class TestObjCProtoImpl : NSObject , TestObjCProto { <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator ' } } { { 16 - 31 = Refrigerator } } { { 36 - 52 = Refrigerator ? } } { { 57 - 72 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPRefrigerator , APPRefrigerator ? ) - > APPRefrigerator ' } } { { 16 - 31 = Refrigerator } } { { 36 - 52 = Refrigerator ? } } { { 57 - 72 = Refrigerator } } <nl> func test ( a : APPRefrigerator , b : APPRefrigerator ? ) - > APPRefrigerator { / / expected - warning { { instance method ' test ( a : b : ) ' nearly matches optional requirement ' test ( a : b : ) ' of protocol ' TestObjCProto ' } } <nl> return a <nl> } <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 23 - 53 = ManufacturerInfo < NSString > } } { { 58 - 89 = ManufacturerInfo < NSString > ? } } { { 94 - 124 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPManufacturerInfo < AnyObject > , APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 23 - 53 = ManufacturerInfo < NSString > } } { { 58 - 89 = ManufacturerInfo < NSString > ? } } { { 94 - 124 = ManufacturerInfo < NSString > } } <nl> func testGeneric ( a : APPManufacturerInfo < AnyObject > , b : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { / / expected - warning { { instance method ' testGeneric ( a : b : ) ' nearly matches optional requirement ' testGeneric ( a : b : ) ' of protocol ' TestObjCProto ' } } { { none } } <nl> return a <nl> } <nl> <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : NSObject , b : NSObject , c : NSObject ) - > ( ) ' } } { { none } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( NSObject , NSObject , NSObject ) - > ( ) ' } } { { none } } <nl> func testUnmigrated ( a : NSObject , b : NSObject , c : NSObject ) { } / / expected - warning { { instance method ' testUnmigrated ( a : b : c : ) ' nearly matches optional requirement ' testUnmigrated ( a : b : c : ) ' of protocol ' TestObjCProto ' } } <nl> <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : NSObject , b : APPRefrigerator ) - > ( ) ' } } { { 44 - 59 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( NSObject , APPRefrigerator ) - > ( ) ' } } { { 44 - 59 = Refrigerator } } <nl> func testPartialMigrated ( a : NSObject , b : APPRefrigerator ) { } / / expected - warning { { instance method ' testPartialMigrated ( a : b : ) ' nearly matches optional requirement ' testPartialMigrated ( a : b : ) ' of protocol ' TestObjCProto ' } } { { none } } <nl> <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( a : APPRefrigerator ? ) - > APPRefrigerator ' } } { { 18 - 34 = Refrigerator ? } } { { 39 - 54 = Refrigerator } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPRefrigerator ? ) - > APPRefrigerator ' } } { { 18 - 34 = Refrigerator ? } } { { 39 - 54 = Refrigerator } } <nl> subscript ( a a : APPRefrigerator ? ) - > APPRefrigerator { / / expected - warning { { subscript ' subscript ( a : ) ' nearly matches optional requirement ' subscript ( a : ) ' of protocol ' TestObjCProto ' } } { { none } } <nl> / / expected - note @ - 1 { { here } } <nl> return a ! <nl> } <nl> / / expected - note @ + 2 { { private } } expected - note @ + 2 { { @ nonobjc } } expected - note @ + 2 { { extension } } <nl> - / / expected - note @ + 1 { { candidate has non - matching type ' ( generic : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 24 - 55 = ManufacturerInfo < NSString > ? } } { { 60 - 90 = ManufacturerInfo < NSString > } } <nl> + / / expected - note @ + 1 { { candidate has non - matching type ' ( APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > ' } } { { 24 - 55 = ManufacturerInfo < NSString > ? } } { { 60 - 90 = ManufacturerInfo < NSString > } } <nl> subscript ( generic a : APPManufacturerInfo < AnyObject > ? ) - > APPManufacturerInfo < AnyObject > { / / expected - warning { { subscript ' subscript ( generic : ) ' nearly matches optional requirement ' subscript ( generic : ) ' of protocol ' TestObjCProto ' } } { { none } } <nl> / / expected - error @ - 1 { { subscript getter with Objective - C selector ' objectForKeyedSubscript : ' conflicts with previous declaration with the same Objective - C selector } } <nl> return a ! <nl> mmm a / test / ClangModules / objc_parse . swift <nl> ppp b / test / ClangModules / objc_parse . swift <nl> func instanceMethodsInExtensions ( _ b : B ) { <nl> b . method ( 1 , separateExtMethod : 3 . 5 ) <nl> <nl> let m1 = b . method ( _ : onCat1 : ) <nl> - _ = m1 ( 1 , onCat1 : 2 . 5 ) <nl> + _ = m1 ( 1 , 2 . 5 ) <nl> <nl> let m2 = b . method ( _ : onExtA : ) <nl> - _ = m2 ( 1 , onExtA : 2 . 5 ) <nl> + _ = m2 ( 1 , 2 . 5 ) <nl> <nl> let m3 = b . method ( _ : onExtB : ) <nl> - _ = m3 ( 1 , onExtB : 2 . 5 ) <nl> + _ = m3 ( 1 , 2 . 5 ) <nl> <nl> let m4 = b . method ( _ : separateExtMethod : ) <nl> - _ = m4 ( 1 , separateExtMethod : 2 . 5 ) <nl> + _ = m4 ( 1 , 2 . 5 ) <nl> } <nl> <nl> func dynamicLookupMethod ( _ b : AnyObject ) { <nl> if let m5 = b . method ( _ : separateExtMethod : ) { <nl> - _ = m5 ( 1 , separateExtMethod : 2 . 5 ) <nl> + _ = m5 ( 1 , 2 . 5 ) <nl> } <nl> } <nl> <nl> new file mode 100644 <nl> index 000000000000 . . dcc902c21205 <nl> mmm / dev / null <nl> ppp b / test / ClangModules / overlay_with_submodule . swift <nl> <nl> + / / RUN : % target - swift - frontend - emit - module % s - sdk % S / Inputs - module - name HasSubmodule - I % S / Inputs / custom - modules - o % t <nl> + <nl> + / / REQUIRES : objc_interop <nl> + <nl> + @ _exported import HasSubmodule <nl> + @ _exported import HasSubmodule . Submodule <nl> + <nl> + func useAnOperator ( ) - > Int { <nl> + var x : Int <nl> + x = 1 + 2 / / Forces a lookup of precedence groups <nl> + return x <nl> + } <nl> mmm a / test / Constraints / array_literal . swift <nl> ppp b / test / Constraints / array_literal . swift <nl> func longArray ( ) { <nl> var _ = [ " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " , " 1 " ] <nl> } <nl> <nl> - [ 1 , 2 ] . map / / expected - error { { expression type ' ( @ noescape ( Int ) throws - > _ ) throws - > [ _ ] ' is ambiguous without more context } } <nl> + [ 1 , 2 ] . map / / expected - error { { expression type ' ( ( Int ) throws - > _ ) throws - > [ _ ] ' is ambiguous without more context } } <nl> <nl> <nl> / / < rdar : / / problem / 25563498 > Type checker crash assigning array literal to type conforming to _ArrayProtocol <nl> - func rdar25563498 < T : _ArrayProtocol > ( t : T ) { <nl> + func rdar25563498 < T : ExpressibleByArrayLiteral > ( t : T ) { <nl> var x : T = [ 1 ] / / expected - error { { contextual type ' T ' cannot be used with array literal } } <nl> } <nl> <nl> - func rdar25563498_ok < T : _ArrayProtocol > ( t : T ) - > T <nl> + func rdar25563498_ok < T : ExpressibleByArrayLiteral > ( t : T ) - > T <nl> where T . Element : ExpressibleByIntegerLiteral { <nl> let x : T = [ 1 ] <nl> return x <nl> mmm a / test / Constraints / closures . swift <nl> ppp b / test / Constraints / closures . swift <nl> var _ : ( Int , Int ) - > Int = { $ 0 + $ 1 + $ 2 } / / expected - error { { contextual closure typ <nl> / / Crash when re - typechecking bodies of non - single expression closures <nl> <nl> struct CC { } <nl> - / / expected - note @ + 1 { { in call to function ' callCC ' } } <nl> func callCC < U > ( _ f : ( CC ) - > U ) - > ( ) { } <nl> <nl> func typeCheckMultiStmtClosureCrash ( ) { <nl> - callCC { / / expected - error { { generic parameter ' U ' could not be inferred } } <nl> + callCC { / / expected - error { { unable to infer closure return type in current context } } <nl> _ = $ 0 <nl> return 1 <nl> } <nl> } <nl> <nl> / / SR - 832 - both these should be ok <nl> - func someFunc ( _ foo : ( ( String ) - > String ) ? , bar : ( String ) - > String ) { <nl> + func someFunc ( _ foo : ( @ escaping ( String ) - > String ) ? , <nl> + bar : @ escaping ( String ) - > String ) { <nl> let _ : ( String ) - > String = foo ! = nil ? foo ! : bar <nl> let _ : ( String ) - > String = foo ? ? bar <nl> } <nl> struct S < T > { <nl> / / Make sure we cannot infer an ( ) argument from an empty parameter list . <nl> func acceptNothingToInt ( _ : @ noescape ( ) - > Int ) { } <nl> func testAcceptNothingToInt ( ac1 : @ autoclosure ( ) - > Int ) { <nl> - / / expected - note @ - 1 { { parameter ' ac1 ' is implicitly @ noescape because it was declared @ autoclosure } } <nl> + / / expected - note @ - 1 { { parameter ' ac1 ' is implicitly non - escaping because it was declared @ autoclosure } } <nl> acceptNothingToInt ( { ac1 ( $ 0 ) } ) <nl> / / expected - error @ - 1 { { cannot convert value of type ' ( _ ) - > Int ' to expected argument type ' ( ) - > Int ' } } <nl> - / / FIXME : expected - error @ - 2 { { closure use of @ noescape parameter ' ac1 ' may allow it to escape } } <nl> + / / FIXME : expected - error @ - 2 { { closure use of non - escaping parameter ' ac1 ' may allow it to escape } } <nl> } <nl> + <nl> + / / < rdar : / / problem / 23570873 > QoI : Poor error calling map without being able to infer " U " ( closure result inference ) <nl> + struct Thing { <nl> + init ? ( ) { } <nl> + } <nl> + / / This throws a compiler error <nl> + let things = Thing ( ) . map { thing in / / expected - error { { unable to infer closure return type in current context } } <nl> + / / Commenting out this makes it compile <nl> + _ = thing <nl> + return thing <nl> + } <nl> + <nl> + <nl> + <nl> + <nl> mmm a / test / Constraints / diagnostics . swift <nl> ppp b / test / Constraints / diagnostics . swift <nl> extension Double : P { <nl> func f0 ( _ x : Int , <nl> _ y : Float ) { } <nl> <nl> - func f1 ( _ : ( Int , Float ) - > Int ) { } <nl> + func f1 ( _ : @ escaping ( Int , Float ) - > Int ) { } <nl> <nl> - func f2 ( _ : ( _ : ( Int ) - > Int ) ) - > Int { } <nl> + func f2 ( _ : ( _ : @ escaping ( Int ) - > Int ) ) - > Int { } <nl> <nl> - func f3 ( _ : ( _ : ( Int ) - > Float ) - > Int ) { } <nl> + func f3 ( _ : @ escaping ( _ : @ escaping ( Int ) - > Float ) - > Int ) { } <nl> <nl> func f4 ( _ x : Int ) - > Int { } <nl> <nl> f1 ( <nl> ) <nl> <nl> f3 ( <nl> - f2 / / expected - error { { cannot convert value of type ' ( ( ( Int ) - > Int ) ) - > Int ' to expected argument type ' ( ( Int ) - > Float ) - > Int ' } } <nl> + f2 / / expected - error { { cannot convert value of type ' ( ( @ escaping ( Int ) - > Int ) ) - > Int ' to expected argument type ' ( @ escaping ( Int ) - > Float ) - > Int ' } } <nl> ) <nl> <nl> f4 ( i , d ) / / expected - error { { extra argument in call } } <nl> func f7 ( ) - > ( c : Int , v : A ) { <nl> return f6 ( g ) / / expected - error { { cannot convert return expression of type ' ( c : Int , i : A ) ' to return type ' ( c : Int , v : A ) ' } } <nl> } <nl> <nl> - func f8 < T : P2 > ( _ n : T , _ f : ( T ) - > T ) { } <nl> + func f8 < T : P2 > ( _ n : T , _ f : @ escaping ( T ) - > T ) { } <nl> f8 ( 3 , f4 ) / / expected - error { { in argument type ' ( Int ) - > Int ' , ' Int ' does not conform to expected type ' P2 ' } } <nl> typealias Tup = ( Int , Double ) <nl> func f9 ( _ x : Tup ) - > Tup { return x } <nl> func r18800223 ( _ i : Int ) { <nl> } <nl> <nl> / / < rdar : / / problem / 21883806 > Bogus " ' _ ' can only appear in a pattern or on the left side of an assignment " is back <nl> - _ = { $ 0 } / / expected - error { { unable to infer closure return type in current context } } <nl> + _ = { $ 0 } / / expected - error { { unable to infer closure type in the current context } } <nl> <nl> <nl> <nl> func r20789423 ( ) { <nl> <nl> <nl> <nl> - func f7 ( _ a : Int ) - > ( b : Int ) - > Int { <nl> + func f7 ( _ a : Int ) - > ( _ b : Int ) - > Int { <nl> return { b in a + b } <nl> } <nl> <nl> - _ = f7 ( 1 ) ( b : 1 ) <nl> + _ = f7 ( 1 ) ( 1 ) <nl> f7 ( 1 . 0 ) ( 2 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> <nl> - f7 ( 1 ) ( 1 . 0 ) / / expected - error { { missing argument label ' b : ' in call } } <nl> - f7 ( 1 ) ( b : 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + f7 ( 1 ) ( 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + f7 ( 1 ) ( b : 1 . 0 ) / / expected - error { { extraneous argument label ' b : ' in call } } <nl> <nl> let f8 = f7 ( 2 ) <nl> - _ = f8 ( b : 1 ) <nl> - f8 ( 10 ) / / expected - error { { missing argument label ' b : ' in call } } { { 4 - 4 = b : } } <nl> - f8 ( 1 . 0 ) / / expected - error { { missing argument label ' b : ' in call } } <nl> - f8 ( b : 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + _ = f8 ( 1 ) <nl> + f8 ( 10 ) / / expected - warning { { result of call is unused , but produces ' Int ' } } <nl> + f8 ( 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + f8 ( b : 1 . 0 ) / / expected - error { { extraneous argument label ' b : ' in call } } <nl> <nl> <nl> class CurriedClass { <nl> func method1 ( ) { } <nl> - func method2 ( _ a : Int ) - > ( b : Int ) - > ( ) { return { b in ( ) } } <nl> + func method2 ( _ a : Int ) - > ( _ b : Int ) - > ( ) { return { b in ( ) } } <nl> func method3 ( _ a : Int , b : Int ) { } <nl> } <nl> <nl> _ = c . method1 <nl> c . method1 ( 1 ) / / expected - error { { argument passed to call that takes no arguments } } <nl> _ = c . method2 ( 1 ) <nl> _ = c . method2 ( 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> - c . method2 ( 1 ) ( b : 2 ) <nl> - c . method2 ( 1 ) ( c : 2 ) / / expected - error { { incorrect argument label in call ( have ' c : ' , expected ' b : ' ) } } { { 14 - 15 = b } } <nl> - c . method2 ( 1 ) ( c : 2 . 0 ) / / expected - error { { incorrect argument label in call ( have ' c : ' , expected ' b : ' ) } } <nl> - c . method2 ( 1 ) ( b : 2 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> - c . method2 ( 1 . 0 ) ( b : 2 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> - c . method2 ( 1 . 0 ) ( b : 2 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + c . method2 ( 1 ) ( 2 ) <nl> + c . method2 ( 1 ) ( c : 2 ) / / expected - error { { extraneous argument label ' c : ' in call } } <nl> + c . method2 ( 1 ) ( c : 2 . 0 ) / / expected - error { { extraneous argument label ' c : ' in call } } <nl> + c . method2 ( 1 ) ( 2 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + c . method2 ( 1 . 0 ) ( 2 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + c . method2 ( 1 . 0 ) ( 2 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> <nl> CurriedClass . method1 ( c ) ( ) <nl> _ = CurriedClass . method1 ( c ) <nl> CurriedClass . method1 ( c ) ( 1 ) / / expected - error { { argument passed to call that takes no arguments } } <nl> CurriedClass . method1 ( 2 . 0 ) ( 1 ) / / expected - error { { use of instance member ' method1 ' on type ' CurriedClass ' ; did you mean to use a value of type ' CurriedClass ' instead ? } } <nl> <nl> - CurriedClass . method2 ( c ) ( 32 ) ( b : 1 ) <nl> + CurriedClass . method2 ( c ) ( 32 ) ( b : 1 ) / / expected - error { { extraneous argument label ' b : ' in call } } <nl> _ = CurriedClass . method2 ( c ) <nl> _ = CurriedClass . method2 ( c ) ( 32 ) <nl> _ = CurriedClass . method2 ( 1 , 2 ) / / expected - error { { use of instance member ' method2 ' on type ' CurriedClass ' ; did you mean to use a value of type ' CurriedClass ' instead ? } } <nl> CurriedClass . method2 ( c ) ( 1 . 0 ) ( b : 1 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> - CurriedClass . method2 ( c ) ( 1 ) ( b : 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> - CurriedClass . method2 ( c ) ( 2 ) ( c : 1 . 0 ) / / expected - error { { incorrect argument label in call ( have ' c : ' , expected ' b : ' ) } } <nl> + CurriedClass . method2 ( c ) ( 1 ) ( 1 . 0 ) / / expected - error { { cannot convert value of type ' Double ' to expected argument type ' Int ' } } <nl> + CurriedClass . method2 ( c ) ( 2 ) ( c : 1 . 0 ) / / expected - error { { extraneous argument label ' c : ' } } <nl> <nl> CurriedClass . method3 ( c ) ( 32 , b : 1 ) <nl> _ = CurriedClass . method3 ( c ) <nl> func read2 ( _ p : UnsafeMutableRawPointer , maxLength : Int ) { } <nl> func read < T : Integer > ( ) - > T ? { <nl> var buffer : T <nl> let n = withUnsafePointer ( to : & buffer ) { ( p ) in <nl> - read2 ( UnsafePointer ( p ) , maxLength : sizeof ( T ) ) / / expected - error { { cannot convert value of type ' UnsafePointer < _ > ' to expected argument type ' UnsafeMutableRawPointer ' } } <nl> + read2 ( UnsafePointer ( p ) , maxLength : MemoryLayout < T > . size ) / / expected - error { { cannot convert value of type ' UnsafePointer < _ > ' to expected argument type ' UnsafeMutableRawPointer ' } } <nl> } <nl> } <nl> <nl> mmm a / test / Constraints / fixes . swift <nl> ppp b / test / Constraints / fixes . swift <nl> func f6 ( _ a : A , _ : Int ) { } <nl> func createB ( ) - > B { } / / expected - note { { found this candidate } } <nl> func createB ( _ i : Int ) - > B { } / / expected - note { { found this candidate } } <nl> <nl> - func f7 ( _ a : A , _ : ( ) - > Int ) - > B { } <nl> + func f7 ( _ a : A , _ : @ escaping ( ) - > Int ) - > B { } <nl> func f7 ( _ a : A , _ : Int ) - > Int { } <nl> <nl> / / Forgot the ' ( ) ' to call a function . <nl> mmm a / test / Constraints / generics . swift <nl> ppp b / test / Constraints / generics . swift <nl> func r22459135 ( ) { <nl> g ( h ( [ 3 ] ) ) <nl> } <nl> <nl> - func f2 < TargetType : AnyObject > ( _ target : TargetType , handler : ( TargetType ) - > ( ) ) { <nl> + func f2 < TargetType : AnyObject > ( _ target : TargetType , handler : @ escaping ( TargetType ) - > ( ) ) { <nl> let _ : ( AnyObject ) - > ( ) = { internalTarget in <nl> handler ( internalTarget as ! TargetType ) <nl> } <nl> protocol Q19215114 { } <nl> protocol P19215114 { } <nl> <nl> / / expected - note @ + 1 { { in call to function ' body9215114 ' } } <nl> - func body9215114 < T : P19215114 , U : Q19215114 > ( _ t : T ) - > ( u : U ) - > ( ) { } <nl> + func body9215114 < T : P19215114 , U : Q19215114 > ( _ t : T ) - > ( _ u : U ) - > ( ) { } <nl> <nl> func test9215114 < T : P19215114 , U : Q19215114 > ( _ t : T ) - > ( U ) - > ( ) { <nl> / / Should complain about not being able to infer type of U . <nl> Whatever . foo ( a : 23 ) / / expected - error { { generic parameter ' A ' could not be infer <nl> / / < rdar : / / problem / 21718955 > Swift useless error : cannot invoke ' foo ' with no arguments <nl> Whatever . bar ( ) / / expected - error { { generic parameter ' A ' could not be inferred } } <nl> <nl> + / / < rdar : / / problem / 27515965 > Type checker doesn ' t enforce same - type constraint if associated type is Any <nl> + protocol P27515965 { <nl> + associatedtype R <nl> + func f ( ) - > R <nl> + } <nl> + <nl> + struct S27515965 : P27515965 { <nl> + func f ( ) - > Any { return self } <nl> + } <nl> + <nl> + struct V27515965 { <nl> + init < T : P27515965 > ( _ tp : T ) where T . R = = Float { } <nl> + } <nl> + <nl> + func test ( x : S27515965 ) - > V27515965 { <nl> + return V27515965 ( x ) / / expected - error { { generic parameter ' T ' could not be inferred } } <nl> + } <nl> mmm a / test / Constraints / keyword_arguments . swift <nl> ppp b / test / Constraints / keyword_arguments . swift <nl> mismatchOverloaded1 . method2 ( 5 ) { $ 0 } <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> / / Values of function type <nl> / / mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - <nl> - func testValuesOfFunctionType ( _ f1 : ( _ : Int , arg : Int ) - > ( ) ) { <nl> - f1 ( 3 , arg : 5 ) <nl> - f1 ( x : 3 , 5 ) / / expected - error { { incorrect argument labels in call ( have ' x : _ : ' , expected ' _ : arg : ' ) } } { { 6 - 9 = } } { { 12 - 12 = arg : } } <nl> - f1 ( 3 , 5 ) / / expected - error { { missing argument label ' arg : ' in call } } { { 9 - 9 = arg : } } <nl> + func testValuesOfFunctionType ( _ f1 : ( _ : Int , _ arg : Int ) - > ( ) ) { <nl> + f1 ( 3 , arg : 5 ) / / expected - error { { extraneous argument label ' arg : ' in call } } { { 9 - 14 = } } <nl> + f1 ( x : 3 , 5 ) / / expected - error { { extraneous argument label ' x : ' in call } } { { 6 - 9 = } } <nl> + f1 ( 3 , 5 ) <nl> } <nl> <nl> <nl> mmm a / test / Constraints / lvalues . swift <nl> ppp b / test / Constraints / lvalues . swift <nl> func rdar23131768 ( ) { <nl> } <nl> <nl> / / < rdar : / / problem / 23331567 > Swift : Compiler crash related to closures with inout parameter . <nl> - func r23331567 ( _ fn : ( x : inout Int ) - > Void ) { <nl> + func r23331567 ( _ fn : ( _ x : inout Int ) - > Void ) { <nl> var a = 0 <nl> - fn ( x : & a ) <nl> + fn ( & a ) <nl> } <nl> r23331567 { $ 0 + = 1 } <nl> <nl> mmm a / test / Constraints / members . swift <nl> ppp b / test / Constraints / members . swift <nl> protocol Functional { <nl> func apply ( _ v : Vector ) - > Scalar <nl> } <nl> protocol Coalgebra { <nl> - func coproduct ( _ f : Functional ) - > ( v1 : Vector , v2 : Vector ) - > Scalar <nl> + func coproduct ( _ f : Functional ) - > ( _ v1 : Vector , _ v2 : Vector ) - > Scalar <nl> } <nl> <nl> / / Make sure existential is closed early when we partially apply <nl> mmm a / test / Constraints / patterns . swift <nl> ppp b / test / Constraints / patterns . swift <nl> func good ( _ a : A < EE > ) - > Int { <nl> } <nl> <nl> func bad ( _ a : A < EE > ) { <nl> - a . map { / / expected - error { { generic parameter ' T ' could not be inferred } } <nl> + a . map { / / expected - error { { unable to infer closure return type in current context } } <nl> let _ : EE = $ 0 <nl> return 1 <nl> } <nl> } <nl> <nl> func ugly ( _ a : A < EE > ) { <nl> - a . map { / / expected - error { { generic parameter ' T ' could not be inferred } } <nl> + a . map { / / expected - error { { unable to infer closure return type in current context } } <nl> switch $ 0 { <nl> case . A : <nl> return 1 <nl> mmm a / test / Constraints / tuple . swift <nl> ppp b / test / Constraints / tuple . swift <nl> var f : Float <nl> <nl> func f1 ( y : Float , rest : Int . . . ) { } <nl> <nl> - func f2 ( _ : ( x : Int , y : Int ) - > Int ) { } <nl> + func f2 ( _ : ( _ x : Int , _ y : Int ) - > Int ) { } <nl> func f2xy ( x : Int , y : Int ) - > Int { } <nl> func f2ab ( a : Int , b : Int ) - > Int { } <nl> func f2yx ( y : Int , x : Int ) - > Int { } <nl> <nl> - func f3 ( _ x : ( x : Int , y : Int ) - > ( ) ) { } <nl> + func f3 ( _ x : ( _ x : Int , _ y : Int ) - > ( ) ) { } <nl> func f3a ( _ x : Int , y : Int ) { } <nl> func f3b ( _ : Int ) { } <nl> <nl> f4 ( 1 , 2 , 3 ) <nl> <nl> f2 ( f2xy ) <nl> f2 ( f2ab ) <nl> - f2 ( f2yx ) / / expected - error { { cannot convert value of type ' ( y : Int , x : Int ) - > Int ' to expected argument type ' ( x : Int , y : Int ) - > Int ' } } <nl> + f2 ( f2yx ) <nl> <nl> f3 ( f3a ) <nl> - f3 ( f3b ) / / expected - error { { cannot convert value of type ' ( Int ) - > ( ) ' to expected argument type ' ( x : Int , y : Int ) - > ( ) ' } } <nl> + f3 ( f3b ) / / expected - error { { cannot convert value of type ' ( Int ) - > ( ) ' to expected argument type ' ( Int , Int ) - > ( ) ' } } <nl> <nl> func getIntFloat ( ) - > ( int : Int , float : Float ) { } <nl> var values = getIntFloat ( ) <nl> extension Int : PosixErrorReturn { <nl> } <nl> <nl> func posixCantFail < A , T : Comparable & PosixErrorReturn > <nl> - ( _ f : ( A ) - > T ) - > ( args : A ) - > T <nl> + ( _ f : @ escaping ( A ) - > T ) - > ( _ args : A ) - > T <nl> { <nl> return { args in <nl> let result = f ( args ) <nl> func open ( _ name : String , oflag : Int ) - > Int { } <nl> <nl> var foo : Int = 0 <nl> <nl> - var fd = posixCantFail ( open ) ( args : ( " foo " , 0 ) ) <nl> + var fd = posixCantFail ( open ) ( ( " foo " , 0 ) ) <nl> <nl> / / Tuples and lvalues <nl> class C { <nl> func makeRequest ( ) { <nl> } <nl> } <nl> <nl> + / / < rdar : / / problem / 25271859 > QoI : Misleading error message when expression result can ' t be inferred from closure <nl> + struct r25271859 < T > { <nl> + } <nl> + <nl> + extension r25271859 { <nl> + func map < U > ( f : ( T ) - > U ) - > r25271859 < U > { <nl> + } <nl> + <nl> + func andThen < U > ( f : ( T ) - > r25271859 < U > ) { <nl> + } <nl> + } <nl> + <nl> + func f ( a : r25271859 < ( Float , Int ) > ) { <nl> + a . map { $ 0 . 0 } <nl> + . andThen { _ in / / expected - error { { generic parameter ' U ' could not be inferred } } <nl> + print ( " hello " ) / / comment this out and it runs , leave any form of print in and it doesn ' t <nl> + return Task < String > ( ) <nl> + } <nl> + } <nl> <nl> mmm a / test / DebugInfo / closure . swift <nl> ppp b / test / DebugInfo / closure . swift <nl> <nl> <nl> func markUsed < T > ( _ t : T ) { } <nl> <nl> - func foldl1 < T > ( _ list : [ T ] , _ function : ( a : T , b : T ) - > T ) - > T { <nl> + func foldl1 < T > ( _ list : [ T ] , _ function : ( _ a : T , _ b : T ) - > T ) - > T { <nl> assert ( list . count > 1 ) <nl> var accumulator = list [ 0 ] <nl> for i in 1 . . < list . count { <nl> - accumulator = function ( a : accumulator , b : list [ i ] ) <nl> + accumulator = function ( accumulator , list [ i ] ) <nl> } <nl> return accumulator <nl> } <nl> mmm a / test / DebugInfo / linetable . swift <nl> ppp b / test / DebugInfo / linetable . swift <nl> class MyClass <nl> } <nl> } <nl> <nl> - func call_me ( _ code : ( ) - > Void ) <nl> + func call_me ( _ code : @ escaping ( ) - > Void ) <nl> { <nl> code ( ) <nl> } <nl> mmm a / test / FixCode / fixits - apply . swift <nl> ppp b / test / FixCode / fixits - apply . swift <nl> func ftest1 ( ) { <nl> let myvar = 0 <nl> } <nl> <nl> - func ftest2 ( x x : Int - > Int ) { } <nl> + func ftest2 ( x x : @ escaping Int - > Int ) { } <nl> <nl> protocol SomeProt { <nl> func protMeth ( p : Int ) <nl> mmm a / test / FixCode / fixits - apply . swift . result <nl> ppp b / test / FixCode / fixits - apply . swift . result <nl> func ftest1 ( ) { <nl> let myvar = 0 <nl> } <nl> <nl> - func ftest2 ( x : ( Int ) - > Int ) { } <nl> + func ftest2 ( x : @ escaping ( Int ) - > Int ) { } <nl> <nl> protocol SomeProt { <nl> func protMeth ( p : Int ) <nl> mmm a / test / Generics / same_type_constraints . swift <nl> ppp b / test / Generics / same_type_constraints . swift <nl> struct SatisfySameTypeAssocTypeRequirementDependent < T > <nl> public struct GeneratorOf < T > : IteratorProtocol , Sequence { <nl> <nl> / / / Construct an instance whose ` next ( ) ` method calls ` nextElement ` . <nl> - public init ( _ nextElement : ( ) - > T ? ) { <nl> + public init ( _ nextElement : @ escaping ( ) - > T ? ) { <nl> self . _next = nextElement <nl> } <nl> <nl> public struct LazySequenceOf < S : Sequence , A > : Sequence where S . Iterator . Elemen <nl> public subscript ( i : A ) - > A { return i } <nl> } <nl> <nl> - public func iterate < A > ( _ f : ( A ) - > A ) - > ( x : A ) - > LazySequenceOf < Iterate < A > , A > ? { <nl> + public func iterate < A > ( _ f : @ escaping ( A ) - > A ) - > ( _ x : A ) - > LazySequenceOf < Iterate < A > , A > ? { <nl> return { x in nil } <nl> } <nl> <nl> public final class IterateGenerator < A > : IteratorProtocol { <nl> / / rdar : / / problem / 18475138 <nl> public protocol Observable : class { <nl> associatedtype Output <nl> - func addObserver ( _ obj : ( Output ) - > Void ) <nl> + func addObserver ( _ obj : @ escaping ( Output ) - > Void ) <nl> } <nl> <nl> public protocol Bindable : class { <nl> infix operator < - : AssignmentPrecedence <nl> <nl> func < - < <nl> Right : Observable <nl> - > ( lhs : ( Right . Output ) - > Void , rhs : Right ) - > Composed < SideEffect < Right > , Right > ? <nl> + > ( lhs : @ escaping ( Right . Output ) - > Void , rhs : Right ) - > Composed < SideEffect < Right > , Right > ? <nl> { <nl> return nil <nl> } <nl> class Cow : Animal { <nl> <nl> struct SpecificAnimal < F : Food > : Animal { <nl> typealias EdibleFood = F <nl> - let _eat : ( f : F ) - > ( ) <nl> + let _eat : ( _ f : F ) - > ( ) <nl> <nl> init < A : Animal > ( _ selfie : A ) where A . EdibleFood = = F { <nl> _eat = { selfie . eat ( $ 0 ) } <nl> } <nl> func eat ( _ f : F ) { <nl> - _eat ( f : f ) <nl> + _eat ( f ) <nl> } <nl> } <nl> <nl> mmm a / test / IDE / Inputs / mock - sdk / Foo . annotated . txt <nl> ppp b / test / IDE / Inputs / mock - sdk / Foo . annotated . txt <nl> var < loc > fooIntVar < / loc > : < ref : Struct > Int32 < / ref > < / decl > <nl> func < loc > fooFunc1 ( < decl : Param > _ a : < ref : Struct > Int32 < / ref > < / decl > ) < / loc > - > < ref : Struct > Int32 < / ref > < / decl > <nl> < decl : Func > func < loc > fooFunc1AnonymousParam ( < decl : Param > _ : < ref : Struct > Int32 < / ref > < / decl > ) < / loc > - > < ref : Struct > Int32 < / ref > < / decl > <nl> < decl : Func > func < loc > fooFunc3 ( < decl : Param > _ a : < ref : Struct > Int32 < / ref > < / decl > , < decl : Param > _ b : < ref : Struct > Float < / ref > < / decl > , < decl : Param > _ c : < ref : Struct > Double < / ref > < / decl > , < decl : Param > _ d : < ref : Struct > UnsafeMutablePointer < / ref > < < ref : Struct > Int32 < / ref > > ! < / decl > ) < / loc > - > < ref : Struct > Int32 < / ref > < / decl > <nl> - < decl : Func > func < loc > fooFuncWithBlock ( < decl : Param > _ blk : ( ( < ref : Struct > Float < / ref > ) - > < ref : Struct > Int32 < / ref > ) ! < / decl > ) < / loc > < / decl > <nl> - < decl : Func > func < loc > fooFuncWithFunctionPointer ( < decl : Param > _ fptr : ( @ convention ( c ) ( < ref : Struct > Float < / ref > ) - > < ref : Struct > Int32 < / ref > ) ! < / decl > ) < / loc > < / decl > <nl> + < decl : Func > func < loc > fooFuncWithBlock ( < decl : Param > _ blk : ( @ escaping ( < ref : Struct > Float < / ref > ) - > < ref : Struct > Int32 < / ref > ) ! < / decl > ) < / loc > < / decl > <nl> + < decl : Func > func < loc > fooFuncWithFunctionPointer ( < decl : Param > _ fptr : ( @ escaping @ convention ( c ) ( < ref : Struct > Float < / ref > ) - > < ref : Struct > Int32 < / ref > ) ! < / decl > ) < / loc > < / decl > <nl> < decl : Func > func < loc > fooFuncNoreturn1 ( ) < / loc > - > < ref : Enum > Never < / ref > < / decl > <nl> < decl : Func > func < loc > fooFuncNoreturn2 ( ) < / loc > - > < ref : Enum > Never < / ref > < / decl > <nl> <nl> mmm a / test / IDE / Inputs / mock - sdk / Foo . printed . recursive . txt <nl> ppp b / test / IDE / Inputs / mock - sdk / Foo . printed . recursive . txt <nl> var fooIntVar : Int32 <nl> func fooFunc1 ( _ a : Int32 ) - > Int32 <nl> func fooFunc1AnonymousParam ( _ : Int32 ) - > Int32 <nl> func fooFunc3 ( _ a : Int32 , _ b : Float , _ c : Double , _ d : UnsafeMutablePointer < Int32 > ! ) - > Int32 <nl> - func fooFuncWithBlock ( _ blk : ( ( Float ) - > Int32 ) ! ) <nl> - func fooFuncWithFunctionPointer ( _ fptr : ( @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithBlock ( _ blk : ( @ escaping ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithFunctionPointer ( _ fptr : ( @ escaping @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> func fooFuncNoreturn1 ( ) - > Never <nl> func fooFuncNoreturn2 ( ) - > Never <nl> <nl> mmm a / test / IDE / Inputs / mock - sdk / Foo . printed . txt <nl> ppp b / test / IDE / Inputs / mock - sdk / Foo . printed . txt <nl> func fooFunc3 ( _ a : Int32 , _ b : Float , _ c : Double , _ d : UnsafeMutablePointer < Int <nl> Very good <nl> fooFuncWithBlock function . <nl> * / <nl> - func fooFuncWithBlock ( _ blk : ( ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithBlock ( _ blk : ( @ escaping ( Float ) - > Int32 ) ! ) <nl> <nl> - func fooFuncWithFunctionPointer ( _ fptr : ( @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithFunctionPointer ( _ fptr : ( @ escaping @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> <nl> func fooFuncNoreturn1 ( ) - > Never <nl> func fooFuncNoreturn2 ( ) - > Never <nl> new file mode 100644 <nl> index 000000000000 . . 6f984ad19b23 <nl> mmm / dev / null <nl> ppp b / test / IDE / comment_inherited_protocol . swift <nl> <nl> + / / RUN : % target - swift - ide - test - print - comments - source - filename % s | FileCheck % s <nl> + <nl> + protocol ParentProtocol1 { <nl> + / / / ParentProtocol1 . onlyParent1 ( ) <nl> + func onlyParent1 ( ) <nl> + / / CHECK : Func / ParentProtocol1 . onlyParent1 { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > onlyParent1 ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol111onlyParent1FT_T_ < / USR > < Declaration > func onlyParent1 ( ) < / Declaration > < Abstract > < Para > ParentProtocol1 . onlyParent1 ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol1 . commonParentRequirement ( ) <nl> + func commonParentRequirement ( ) <nl> + / / CHECKL : Func / ParentProtocol1 . commonParentRequirement { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonParentRequirement ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol123commonParentRequirementFT_T_ < / USR > < Declaration > func commonParentRequirement ( ) < / Declaration > < Abstract > < Para > ParentProtocol1 . commonParentRequirement ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol1 . commonRequirementWithDocComment ( ) <nl> + func commonRequirementWithDocComment ( ) <nl> + / / CHECK : Func / ParentProtocol1 . commonRequirementWithDocComment { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonRequirementWithDocComment ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol131commonRequirementWithDocCommentFT_T_ < / USR > < Declaration > func commonRequirementWithDocComment ( ) < / Declaration > < Abstract > < Para > ParentProtocol1 . commonRequirementWithDocComment ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol1 . commonRequirementWithoutDocComment ( ) <nl> + func commonRequirementWithoutDocComment ( ) <nl> + / / CHECK : Func / ParentProtocol1 . commonRequirementWithoutDocComment { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonRequirementWithoutDocComment ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol134commonRequirementWithoutDocCommentFT_T_ < / USR > < Declaration > func commonRequirementWithoutDocComment ( ) < / Declaration > < Abstract > < Para > ParentProtocol1 . commonRequirementWithoutDocComment ( ) < / Para > < / Abstract > < / Function > ] <nl> + } <nl> + <nl> + protocol ParentProtocol2 { <nl> + / / / ParentProtocol2 . onlyParent2 ( ) <nl> + func onlyParent2 ( ) <nl> + / / CHECK : Func / ParentProtocol2 . onlyParent2 { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > onlyParent2 ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol211onlyParent2FT_T_ < / USR > < Declaration > func onlyParent2 ( ) < / Declaration > < Abstract > < Para > ParentProtocol2 . onlyParent2 ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol2 . commonParentRequirement ( ) <nl> + func commonParentRequirement ( ) <nl> + / / CHECK : Func / ParentProtocol2 . commonParentRequirement { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonParentRequirement ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol223commonParentRequirementFT_T_ < / USR > < Declaration > func commonParentRequirement ( ) < / Declaration > < Abstract > < Para > ParentProtocol2 . commonParentRequirement ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol2 . commonRequirementWithDocComment ( ) <nl> + func commonRequirementWithDocComment ( ) <nl> + / / CHECK : Func / ParentProtocol2 . commonRequirementWithDocComment { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonRequirementWithDocComment ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol231commonRequirementWithDocCommentFT_T_ < / USR > < Declaration > func commonRequirementWithDocComment ( ) < / Declaration > < Abstract > < Para > ParentProtocol2 . commonRequirementWithDocComment ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / / ParentProtocol2 . commonRequirementWithoutDocComment ( ) <nl> + func commonRequirementWithoutDocComment ( ) <nl> + / / CHECK : Func / ParentProtocol2 . commonRequirementWithoutDocComment { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonRequirementWithoutDocComment ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol234commonRequirementWithoutDocCommentFT_T_ < / USR > < Declaration > func commonRequirementWithoutDocComment ( ) < / Declaration > < Abstract > < Para > ParentProtocol2 . commonRequirementWithoutDocComment ( ) < / Para > < / Abstract > < / Function > ] <nl> + } <nl> + <nl> + protocol ChildProtocol : ParentProtocol1 , ParentProtocol2 { <nl> + / / / ChildProtocol . commonRequirementWithDocComment ( ) <nl> + func commonRequirementWithDocComment ( ) <nl> + / / CHECK : Func / ChildProtocol . commonRequirementWithDocComment { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > commonRequirementWithDocComment ( ) < / Name > < USR > s : FP14swift_ide_test13ChildProtocol31commonRequirementWithDocCommentFT_T_ < / USR > < Declaration > func commonRequirementWithDocComment ( ) < / Declaration > < Abstract > < Para > ChildProtocol . commonRequirementWithDocComment ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / This should show nothing because there are two inherited requirements . <nl> + func commonRequirementWithoutDocComment ( ) <nl> + / / CHECK : Func / ChildProtocol . commonRequirementWithoutDocComment { { . * } } DocCommentAsXML = none <nl> + } <nl> + <nl> + / / Test that ChildProtocol ' s default implementation for requirements <nl> + / / come from the right place . <nl> + extension ChildProtocol { <nl> + / / Should come from ParentProtocol1 . <nl> + func onlyParent1 ( ) { } <nl> + / / CHECK : Func / onlyParent1 { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > onlyParent1 ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol111onlyParent1FT_T_ < / USR > < Declaration > func onlyParent1 ( ) < / Declaration > < Abstract > < Para > ParentProtocol1 . onlyParent1 ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / Should come from ParentProtocol2 . <nl> + func onlyParent2 ( ) { } <nl> + / / CHECK : Func / onlyParent2 { { . * } } DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > onlyParent2 ( ) < / Name > < USR > s : FP14swift_ide_test15ParentProtocol211onlyParent2FT_T_ < / USR > < Declaration > func onlyParent2 ( ) < / Declaration > < Abstract > < Para > ParentProtocol2 . onlyParent2 ( ) < / Para > < / Abstract > < / Function > ] <nl> + <nl> + / / Should show nothing because the requirement is in both parents . <nl> + func commonParentRequirement ( ) { } <nl> + / / CHECK : Func / commonParentRequirement { { . * } } DocCommentAsXML = none <nl> + <nl> + / / Should show nothing because the requirement is in both parents . <nl> + func commonRequirementWithDocComment ( ) { } <nl> + / / CHECK : Func / commonRequirementWithDocComment { { . * } } DocCommentAsXML = none <nl> + <nl> + / / Should show nothing because there are multiple requirements . <nl> + func commonRequirementWithoutDocComment ( ) { } <nl> + / / CHECK : Func / commonRequirementWithoutDocComment { { . * } } DocCommentAsXML = none <nl> + } <nl> + <nl> mmm a / test / IDE / comment_to_xml . swift <nl> ppp b / test / IDE / comment_to_xml . swift <nl> <nl> / / REQUIRES : no_asan <nl> / / WRONG - NOT : CommentXMLInvalid <nl> <nl> + / / REQUIRES : se_0111_complete <nl> mmm a / test / IDE / complete_call_arg . swift <nl> ppp b / test / IDE / complete_call_arg . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN - FIXME : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = ARG1 | FileCheck % s - check - prefix = EXPECT_OINT <nl> / / RUN - FIXME : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = ARG2 | FileCheck % s - check - prefix = ARG - NAME1 <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = ARG3 | FileCheck % s - check - prefix = ARG - NAME2 <nl> mmm a / test / IDE / complete_crashes . swift <nl> ppp b / test / IDE / complete_crashes . swift <nl> protocol Fooable { <nl> <nl> / / rdar : / / problem / 22688199 <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = RDAR_22688199 | FileCheck % s - check - prefix = FLIP_CURRIED <nl> - func curried ( _ a : Int ) ( b1 : Int , b2 : Int ) { } <nl> + func curried ( _ a : Int ) ( _ b1 : Int , _ b2 : Int ) { } <nl> func flip < A , B , C > ( _ f : A - > B - > C ) - > B - > A - > C { } <nl> func rdar22688199 ( ) { <nl> let f = flip ( curried ) ( # ^ RDAR_22688199 ^ # <nl> } <nl> - / / FLIP_CURRIED : Pattern / ExprSpecific : [ ' ( ' ] { # b1 : Int # } , { # b2 : Int # } ) [ # ( Int ) - > ( ) # ] <nl> + / / FLIP_CURRIED : Pattern / ExprSpecific : [ ' ( ' ] { # Int # } , { # Int # } ) [ # ( Int ) - > ( ) # ] <nl> <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = RDAR_22836263 <nl> func rdar22836263 ( ) { <nl> mmm a / test / IDE / complete_default_arguments . swift <nl> ppp b / test / IDE / complete_default_arguments . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN : sed - n - e ' 1 , / NO_ERRORS_UP_TO_HERE $ / p ' % s > % t_no_errors . swift <nl> / / RUN : % target - swift - frontend - verify - parse % t_no_errors . swift <nl> <nl> func testDefaultArgs2 ( ) { <nl> } <nl> / / DEFAULT_ARGS_2 : Begin completions <nl> / / DEFAULT_ARGS_2 - DAG : Pattern / ExprSpecific : ( { # ( a ) : Int # } ) [ # Void # ] { { ; name = . + $ } } <nl> - / / DEFAULT_ARGS_2 - DAG : Pattern / ExprSpecific : ( { # ( a ) : Int # } , { # b : Int # } ) [ # Void # ] { { ; name = . + $ } } <nl> + / / DEFAULT_ARGS_2 - DAG : Pattern / ExprSpecific : ( { # ( a ) : Int # } , { # ( b ) : Int # } ) [ # Void # ] { { ; name = . + $ } } <nl> / / DEFAULT_ARGS_2 : End completions <nl> <nl> func testDefaultArgs3 ( ) { <nl> mmm a / test / IDE / complete_enum_elements . swift <nl> ppp b / test / IDE / complete_enum_elements . swift <nl> enum BarEnum { <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar1 [ # BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar2 ( ) [ # ( ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar3 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( a : Int , b : Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , b : ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , b : ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( a : Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( a : Int , b : ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( Int , ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar9 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar10 ( { # Int # } , { # Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_TYPE_CONTEXT - DAG : Decl [ EnumElement ] / ExprSpecific : . Bar11 ( { # Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> enum BarEnum { <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar1 [ # BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar2 ( ) [ # ( ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar3 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( a : Int , b : Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar5 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , b : ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( a : Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( a : Int , b : ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar5 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar6 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( Int , ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar9 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar10 ( { # Int # } , { # Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_NO_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : . Bar11 ( { # Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> enum BarEnum { <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar1 [ # BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar2 ( ) [ # ( ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar3 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( a : Int , b : Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar5 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar6 ( { # a : Int # } , { # Float # } ) [ # ( a : Int , b : ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( a : Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> - / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( a : Int , b : ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar4 ( { # a : Int # } , { # b : Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar5 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar6 ( { # a : Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar7 ( { # a : Int # } , ( { # b : Float # } , { # c : Double # } ) ) [ # ( Int , ( b : Float , c : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> + / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar8 ( { # a : Int # } , b : ( { # c : Float # } , { # d : Double # } ) ) [ # ( Int , ( c : Float , d : Double ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar9 ( { # Int # } ) [ # ( Int ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar10 ( { # Int # } , { # Float # } ) [ # ( Int , Float ) - > BarEnum # ] { { ; name = . + $ } } <nl> / / BAR_ENUM_DOT - NEXT : Decl [ EnumElement ] / CurrNominal : Bar11 ( { # Int # } , { # Float # } ) [ # ( Int , ( Float ) ) - > BarEnum # ] { { ; name = . + $ } } <nl> mmm a / test / IDE / complete_from_stdlib . swift <nl> ppp b / test / IDE / complete_from_stdlib . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = PLAIN_TOP_LEVEL_1 > % t . toplevel . txt <nl> / / RUN : FileCheck % s - check - prefix = PLAIN_TOP_LEVEL < % t . toplevel . txt <nl> / / RUN : FileCheck % s - check - prefix = NO_STDLIB_PRIVATE < % t . toplevel . txt <nl> mmm a / test / IDE / complete_override . swift <nl> ppp b / test / IDE / complete_override . swift <nl> protocol HasThrowingProtocol { <nl> <nl> class HasThrowing { <nl> func bar ( ) throws { } <nl> - func baz ( x : ( ) throws - > ( ) ) rethrows { } <nl> + func baz ( x : @ escaping ( ) throws - > ( ) ) rethrows { } <nl> init ( ) throws { } <nl> } <nl> class TestClassWithThrows : HasThrowing , HasThrowingProtocol { <nl> class TestClassWithThrows : HasThrowing , HasThrowingProtocol { <nl> / / HAS_THROWING : Begin completions <nl> / / HAS_THROWING - DAG : Decl [ InstanceMethod ] / Super : func foo ( ) throws { | } ; name = foo ( ) throws <nl> / / HAS_THROWING - DAG : Decl [ InstanceMethod ] / Super : override func bar ( ) throws { | } ; name = bar ( ) throws <nl> - / / HAS_THROWING - DAG : Decl [ InstanceMethod ] / Super : override func baz ( x : ( ) throws - > ( ) ) rethrows { | } ; name = baz ( x : ( ) throws - > ( ) ) rethrows <nl> + / / FIXME : SR - 2214 make the below require printing @ escaping <nl> + / / HAS_THROWING - DAG : Decl [ InstanceMethod ] / Super : override func baz ( x : { { ( @ escaping ) ? } } ( ) throws - > ( ) ) rethrows { | } ; name = baz ( x : { { ( @ escaping ) ? } } ( ) throws - > ( ) ) rethrows <nl> / / HAS_THROWING - DAG : Decl [ Constructor ] / Super : init ( ) throws { | } ; name = init ( ) throws <nl> / / HAS_THROWING : End completions <nl> <nl> mmm a / test / IDE / complete_unresolved_members . swift <nl> ppp b / test / IDE / complete_unresolved_members . swift <nl> <nl> <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = OTHER_FILE_1 % S / Inputs / EnumFromOtherFile . swift | FileCheck % s - check - prefix = OTHER_FILE_1 <nl> <nl> + / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = NON_OPT_SET_1 | FileCheck % s - check - prefix = NON_OPT_SET_1 <nl> + / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = NON_OPT_SET_2 | FileCheck % s - check - prefix = NON_OPT_SET_1 <nl> + / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = NON_OPT_SET_3 | FileCheck % s - check - prefix = NON_OPT_SET_1 <nl> + <nl> enum SomeEnum1 { <nl> case South <nl> case North <nl> func testAvail1 ( x : EnumAvail1 ) { <nl> func testAvail2 ( x : OptionsAvail1 ) { <nl> testAvail2 ( . # ^ OPTIONS_AVAIL_1 ^ # ) <nl> } <nl> - / / OPTIONS_AVAIL_1 : Begin completions , 2 items <nl> + / / OPTIONS_AVAIL_1 : Begin completions , 3 items <nl> / / ENUM_AVAIL_1 - NOT : AAA <nl> / / OPTIONS_AVAIL_1 - DAG : Decl [ StaticVar ] / CurrNominal : aaa [ # OptionsAvail1 # ] ; <nl> / / OPTIONS_AVAIL_1 - DAG : Decl [ StaticVar ] / CurrNominal / NotRecommended : BBB [ # OptionsAvail1 # ] ; <nl> + / / OPTIONS_AVAIL_1 - DAG : Decl [ Constructor ] / CurrNominal : init ( { # rawValue : Int # } ) [ # OptionsAvail1 # ] <nl> / / ENUM_AVAIL_1 - NOT : AAA <nl> / / OPTIONS_AVAIL_1 : End completions <nl> <nl> func enumFromOtherFile ( ) - > EnumFromOtherFile { <nl> / / OTHER_FILE_1 - DAG : Decl [ EnumElement ] / ExprSpecific : a ( { # Int # } ) [ # ( Int ) - > EnumFromOtherFile # ] ; <nl> / / OTHER_FILE_1 - DAG : Decl [ EnumElement ] / ExprSpecific : c [ # EnumFromOtherFile # ] ; <nl> / / OTHER_FILE_1 : End completions <nl> + <nl> + struct NonOptSet { <nl> + static let a = NonOptSet ( ) <nl> + static let wrongType = 1 <nl> + let notStatic = NonOptSet ( ) <nl> + init ( x : Int , y : Int ) { } <nl> + init ( ) { } <nl> + static func b ( ) - > NonOptSet { return NonOptSet ( ) } <nl> + static func wrongType ( ) - > Int { return 0 } <nl> + func notStatic ( ) - > NonOptSet { return NonOptSet ( ) } <nl> + } <nl> + <nl> + func testNonOptSet ( ) { <nl> + let x : NonOptSet <nl> + x = . # ^ NON_OPT_SET_1 ^ # <nl> + } <nl> + / / NON_OPT_SET_1 : Begin completions , 4 items <nl> + / / NON_OPT_SET_1 - DAG : Decl [ StaticVar ] / CurrNominal : a [ # NonOptSet # ] <nl> + / / NON_OPT_SET_1 - DAG : Decl [ Constructor ] / CurrNominal : init ( { # x : Int # } , { # y : Int # } ) [ # NonOptSet # ] <nl> + / / NON_OPT_SET_1 - DAG : Decl [ Constructor ] / CurrNominal : init ( ) [ # NonOptSet # ] <nl> + / / NON_OPT_SET_1 - DAG : Decl [ StaticMethod ] / CurrNominal : b ( ) [ # NonOptSet # ] <nl> + / / NON_OPT_SET_1 : End completions <nl> + <nl> + func testNonOptSet ( ) { <nl> + let x : NonOptSet = . # ^ NON_OPT_SET_2 ^ # <nl> + } <nl> + <nl> + func testNonOptSet ( ) - > NonOptSet { <nl> + return . # ^ NON_OPT_SET_3 ^ # <nl> + } <nl> mmm a / test / IDE / complete_value_expr . swift <nl> ppp b / test / IDE / complete_value_expr . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = FOO_OBJECT_DOT_1 | FileCheck % s - check - prefix = FOO_OBJECT_DOT <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = FOO_OBJECT_DOT_2 | FileCheck % s - check - prefix = FOO_OBJECT_DOT <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = FOO_OBJECT_DOT_3 | FileCheck % s - check - prefix = FOO_OBJECT_DOT <nl> mmm a / test / IDE / complete_vararg . swift <nl> ppp b / test / IDE / complete_vararg . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = TOP_LEVEL_1 | FileCheck % s - check - prefix = TOP_LEVEL_1 <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = OBJ_DOT_1 | FileCheck % s - check - prefix = OBJ_DOT_1 <nl> / / RUN : % target - swift - ide - test - code - completion - source - filename % s - code - completion - token = FREE_FUNC_1 | FileCheck % s - check - prefix = FREE_FUNC_1 <nl> mmm a / test / IDE / print_ast_tc_decls . swift <nl> ppp b / test / IDE / print_ast_tc_decls . swift <nl> public func ParamAttrs2 ( a : @ autoclosure ( escaping ) ( ) - > ( ) ) { <nl> a ( ) <nl> } <nl> <nl> - / / PASS_PRINT_AST : public func ParamAttrs3 ( a : @ noescape ( ) - > ( ) ) <nl> - public func ParamAttrs3 ( a : @ noescape ( ) - > ( ) ) { <nl> + / / PASS_PRINT_AST : public func ParamAttrs3 ( a : ( ) - > ( ) ) <nl> + public func ParamAttrs3 ( a : ( ) - > ( ) ) { <nl> a ( ) <nl> } <nl> <nl> mmm a / test / IDE / print_clang_bool_bridging . swift <nl> ppp b / test / IDE / print_clang_bool_bridging . swift <nl> typealias CBoolBlock = ( Bool ) - > Bool <nl> typealias ObjCBoolBlock = ( Bool ) - > Bool <nl> typealias DarwinBooleanBlock = ( Bool ) - > Bool <nl> <nl> - func testCBoolFnToBlock ( _ : @ convention ( c ) ( Bool ) - > Bool ) - > ( Bool ) - > Bool <nl> - func testObjCBoolFnToBlock ( _ : @ convention ( c ) ( ObjCBool ) - > ObjCBool ) - > ( Bool ) - > Bool <nl> - func testDarwinBooleanFnToBlock ( _ : @ convention ( c ) ( DarwinBoolean ) - > DarwinBoolean ) - > ( Bool ) - > Bool <nl> + func testCBoolFnToBlock ( _ : @ escaping @ convention ( c ) ( Bool ) - > Bool ) - > ( Bool ) - > Bool <nl> + func testObjCBoolFnToBlock ( _ : @ escaping @ convention ( c ) ( ObjCBool ) - > ObjCBool ) - > ( Bool ) - > Bool <nl> + func testDarwinBooleanFnToBlock ( _ : @ escaping @ convention ( c ) ( DarwinBoolean ) - > DarwinBoolean ) - > ( Bool ) - > Bool <nl> <nl> func testCBoolFnToBlockTypedef ( _ : CBoolFn ) - > CBoolBlock <nl> func testObjCBoolFnToBlockTypedef ( _ : ObjCBoolFn ) - > ObjCBoolBlock <nl> class Test : NSObject { <nl> var propObjCBoolBlock : ( Bool ) - > Bool <nl> var propDarwinBooleanBlock : ( Bool ) - > Bool <nl> <nl> - func testCBoolFn ( toBlock fp : @ convention ( c ) ( Bool ) - > Bool ) - > ( Bool ) - > Bool <nl> - func testObjCBoolFn ( toBlock fp : @ convention ( c ) ( ObjCBool ) - > ObjCBool ) - > ( Bool ) - > Bool <nl> - func testDarwinBooleanFn ( toBlock fp : @ convention ( c ) ( DarwinBoolean ) - > DarwinBoolean ) - > ( Bool ) - > Bool <nl> + func testCBoolFn ( toBlock fp : @ escaping @ convention ( c ) ( Bool ) - > Bool ) - > ( Bool ) - > Bool <nl> + func testObjCBoolFn ( toBlock fp : @ escaping @ convention ( c ) ( ObjCBool ) - > ObjCBool ) - > ( Bool ) - > Bool <nl> + func testDarwinBooleanFn ( toBlock fp : @ escaping @ convention ( c ) ( DarwinBoolean ) - > DarwinBoolean ) - > ( Bool ) - > Bool <nl> <nl> - func produceCBoolBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ convention ( block ) ( Bool ) - > Bool ) ? > ) <nl> - func produceObjCBoolBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ convention ( block ) ( ObjCBool ) - > ObjCBool ) ? > ) <nl> - func produceDarwinBooleanBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ convention ( block ) ( DarwinBoolean ) - > DarwinBoolean ) ? > ) <nl> + func produceCBoolBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ escaping @ convention ( block ) ( Bool ) - > Bool ) ? > ) <nl> + func produceObjCBoolBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ escaping @ convention ( block ) ( ObjCBool ) - > ObjCBool ) ? > ) <nl> + func produceDarwinBooleanBlockTypedef ( _ outBlock : AutoreleasingUnsafeMutablePointer < ( @ escaping @ convention ( block ) ( DarwinBoolean ) - > DarwinBoolean ) ? > ) <nl> <nl> init ( ) <nl> } <nl> mmm a / test / IDE / print_clang_decls . swift <nl> ppp b / test / IDE / print_clang_decls . swift <nl> <nl> / / CHECK - NULLABILITY : class SomeClass { <nl> / / CHECK - NULLABILITY : class func methodA ( _ obj : SomeClass ? ) - > Any { { $ } } <nl> / / CHECK - NULLABILITY : func methodA ( _ obj : SomeClass ? ) - > Any { { $ } } <nl> - / / CHECK - NULLABILITY : class func methodB ( _ block : ( ( Int32 , Int32 ) - > Int32 ) ? = nil ) - > Any { { $ } } <nl> - / / CHECK - NULLABILITY : func methodB ( _ block : ( ( Int32 , Int32 ) - > Int32 ) ? = nil ) - > Any { { $ } } <nl> + / / CHECK - NULLABILITY : class func methodB ( _ block : ( @ escaping ( Int32 , Int32 ) - > Int32 ) ? = nil ) - > Any { { $ } } <nl> + / / CHECK - NULLABILITY : func methodB ( _ block : ( @ escaping ( Int32 , Int32 ) - > Int32 ) ? = nil ) - > Any { { $ } } <nl> / / CHECK - NULLABILITY : func methodC ( ) - > Any ? <nl> / / CHECK - NULLABILITY : var property : Any ? <nl> / / CHECK - NULLABILITY : func stringMethod ( ) - > String { { $ } } <nl> mmm a / test / IDE / print_clang_swift_name . swift <nl> ppp b / test / IDE / print_clang_swift_name . swift <nl> class TestError : NSObject { <nl> convenience init ( aa x : Any ? , error : ( ) ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( aa : error : ) ' " ) <nl> class func err2 ( _ x : Any ? ) throws - > Self <nl> - convenience init ( aa x : Any ? , error : ( ) , block : ( ) - > Void ) throws <nl> + convenience init ( aa x : Any ? , error : ( ) , block : @ escaping ( ) - > Void ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( aa : error : block : ) ' " ) <nl> - class func err3 ( _ x : Any ? , callback block : ( ) - > Void ) throws - > Self <nl> - convenience init ( error : ( ) , block : ( ) - > Void ) throws <nl> + class func err3 ( _ x : Any ? , callback block : @ escaping ( ) - > Void ) throws - > Self <nl> + convenience init ( error : ( ) , block : @ escaping ( ) - > Void ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( error : block : ) ' " ) <nl> - class func err4 ( callback block : ( ) - > Void ) throws - > Self <nl> + class func err4 ( callback block : @ escaping ( ) - > Void ) throws - > Self <nl> <nl> convenience init ( aa x : Any ? ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( aa : ) ' " ) <nl> class func err5 ( _ x : Any ? ) throws - > Self <nl> - convenience init ( aa x : Any ? , block : ( ) - > Void ) throws <nl> + convenience init ( aa x : Any ? , block : @ escaping ( ) - > Void ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( aa : block : ) ' " ) <nl> - class func err6 ( _ x : Any ? , callback block : ( ) - > Void ) throws - > Self <nl> - convenience init ( block : ( ) - > Void ) throws <nl> + class func err6 ( _ x : Any ? , callback block : @ escaping ( ) - > Void ) throws - > Self <nl> + convenience init ( block : @ escaping ( ) - > Void ) throws <nl> @ available ( * , unavailable , message : " use object construction ' TestError ( block : ) ' " ) <nl> - class func err7 ( callback block : ( ) - > Void ) throws - > Self <nl> + class func err7 ( callback block : @ escaping ( ) - > Void ) throws - > Self <nl> <nl> / / Would - be initializers . <nl> class func ww ( _ x : Any ? ) throws - > Self <nl> class TestSub : Test { <nl> class TestErrorSub : TestError { <nl> convenience init ( error : ( ) ) throws <nl> convenience init ( aa x : Any ? , error : ( ) ) throws <nl> - convenience init ( aa x : Any ? , error : ( ) , block : ( ) - > Void ) throws <nl> - convenience init ( error : ( ) , block : ( ) - > Void ) throws <nl> + convenience init ( aa x : Any ? , error : ( ) , block : @ escaping ( ) - > Void ) throws <nl> + convenience init ( error : ( ) , block : @ escaping ( ) - > Void ) throws <nl> convenience init ( aa x : Any ? ) throws <nl> - convenience init ( aa x : Any ? , block : ( ) - > Void ) throws <nl> - convenience init ( block : ( ) - > Void ) throws <nl> + convenience init ( aa x : Any ? , block : @ escaping ( ) - > Void ) throws <nl> + convenience init ( block : @ escaping ( ) - > Void ) throws <nl> init ( ) <nl> } <nl> mmm a / test / IDE / print_module_without_deinit . swift <nl> ppp b / test / IDE / print_module_without_deinit . swift <nl> public class ImplicitOptionalInitContainer { <nl> public class AttributeContainer1 { <nl> / / ATTR1 : func m1 ( a : @ autoclosure ( ) - > Int ) <nl> public func m1 ( a : @ autoclosure ( ) - > Int ) { } <nl> - / / ATTR1 : func m2 ( a : @ noescape ( ) - > Int ) <nl> - public func m2 ( a : @ noescape ( ) - > Int ) { } <nl> + / / ATTR1 : func m2 ( a : ( ) - > Int ) <nl> + public func m2 ( a : @ noescape ( ) - > Int ) { } / / TODO : drop @ noescape <nl> + / / ATTR1 : func m3 ( a : @ escaping ( ) - > Int ) <nl> + public func m3 ( a : @ escaping ( ) - > Int ) { } <nl> } <nl> mmm a / test / IDE / print_omit_needless_words . swift <nl> ppp b / test / IDE / print_omit_needless_words . swift <nl> <nl> / / CHECK - FOUNDATION - NEXT : case binary <nl> <nl> / / Note : Make sure NSURL works in various places <nl> - / / CHECK - FOUNDATION : open ( _ : NSURL ! , completionHandler : ( ( Bool ) - > Void ) ! ) <nl> + / / CHECK - FOUNDATION : open ( _ : NSURL ! , completionHandler : ( @ escaping ( Bool ) - > Void ) ! ) <nl> <nl> / / Note : property name stripping property type . <nl> / / CHECK - FOUNDATION : var uppercased : String <nl> <nl> / / CHECK - FOUNDATION : static var reverse : EnumerationOptions <nl> <nl> / / Note : usingBlock - > body <nl> - / / CHECK - FOUNDATION : func enumerateObjects ( _ : ( ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ! ) <nl> - / / CHECK - FOUNDATION : func enumerateObjects ( options : EnumerationOptions = [ ] , using : ( ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ! ) <nl> + / / CHECK - FOUNDATION : func enumerateObjects ( _ : ( @ escaping ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ! ) <nl> + / / CHECK - FOUNDATION : func enumerateObjects ( options : EnumerationOptions = [ ] , using : ( @ escaping ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ! ) <nl> <nl> / / Note : WithBlock - > body , nullable closures default to nil . <nl> - / / CHECK - FOUNDATION : func enumerateObjectsRandomly ( block : ( ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ? = nil ) <nl> + / / CHECK - FOUNDATION : func enumerateObjectsRandomly ( block : ( @ escaping ( Any ? , Int , UnsafeMutablePointer < ObjCBool > ? ) - > Void ) ? = nil ) <nl> <nl> / / Note : id < Proto > treated as " Proto " . <nl> / / CHECK - FOUNDATION : func doSomething ( with : NSCopying ) <nl> <nl> / / CHECK - FOUNDATION : func doSomethingElse ( with : NSCopying & NSObjectProtocol ) <nl> <nl> / / Note : Function type - > " Function " . <nl> - / / CHECK - FOUNDATION : func sort ( _ : @ convention ( c ) ( Any , Any ) - > Int ) <nl> + / / CHECK - FOUNDATION : func sort ( _ : @ escaping @ convention ( c ) ( Any , Any ) - > Int ) <nl> <nl> / / Note : Plural : NSArray without type arguments - > " Objects " . <nl> / / CHECK - FOUNDATION : func remove ( _ : [ Any ] ) <nl> mmm a / test / IDE / print_types . swift <nl> ppp b / test / IDE / print_types . swift <nl> typealias MyInt = Int <nl> / / FULL : TypeAliasDecl ' ' ' MyInt ' ' ' swift_ide_test . MyInt . Type { { $ } } <nl> <nl> func testVariableTypes ( _ param : Int , param2 : inout Double ) { <nl> - / / CHECK : FuncDecl ' ' ' testVariableTypes ' ' ' ( Int , param2 : inout Double ) - > ( ) { { $ } } <nl> - / / FULL : FuncDecl ' ' ' testVariableTypes ' ' ' ( Swift . Int , param2 : inout Swift . Double ) - > ( ) { { $ } } <nl> + / / CHECK : FuncDecl ' ' ' testVariableTypes ' ' ' ( Int , inout Double ) - > ( ) { { $ } } <nl> + / / FULL : FuncDecl ' ' ' testVariableTypes ' ' ' ( Swift . Int , inout Swift . Double ) - > ( ) { { $ } } <nl> <nl> var a1 = 42 <nl> / / CHECK : VarDecl ' ' ' a1 ' ' ' Int { { $ } } <nl> func testFuncType6 ( ) - > ( Int , Int ) { } <nl> / / FULL : FuncDecl ' ' ' testFuncType6 ' ' ' ( ) - > ( Swift . Int , Swift . Int ) { { $ } } <nl> <nl> func testFuncType7 ( _ a : Int , withFloat b : Float ) { } <nl> - / / CHECK : FuncDecl ' ' ' testFuncType7 ' ' ' ( Int , withFloat : Float ) - > ( ) { { $ } } <nl> - / / FULL : FuncDecl ' ' ' testFuncType7 ' ' ' ( Swift . Int , withFloat : Swift . Float ) - > ( ) { { $ } } <nl> + / / CHECK : FuncDecl ' ' ' testFuncType7 ' ' ' ( Int , Float ) - > ( ) { { $ } } <nl> + / / FULL : FuncDecl ' ' ' testFuncType7 ' ' ' ( Swift . Int , Swift . Float ) - > ( ) { { $ } } <nl> <nl> func testVariadicFuncType ( _ a : Int , b : Float . . . ) { } <nl> - / / CHECK : FuncDecl ' ' ' testVariadicFuncType ' ' ' ( Int , b : Float . . . ) - > ( ) { { $ } } <nl> - / / FULL : FuncDecl ' ' ' testVariadicFuncType ' ' ' ( Swift . Int , b : Swift . Float . . . ) - > ( ) { { $ } } <nl> + / / CHECK : FuncDecl ' ' ' testVariadicFuncType ' ' ' ( Int , Float . . . ) - > ( ) { { $ } } <nl> + / / FULL : FuncDecl ' ' ' testVariadicFuncType ' ' ' ( Swift . Int , Swift . Float . . . ) - > ( ) { { $ } } <nl> <nl> - func testCurriedFuncType1 ( _ a : Int ) - > ( b : Float ) - > ( ) { } <nl> - / / CHECK : FuncDecl ' ' ' testCurriedFuncType1 ' ' ' ( Int ) - > ( b : Float ) - > ( ) { { $ } } <nl> - / / FULL : FuncDecl ' ' ' testCurriedFuncType1 ' ' ' ( Swift . Int ) - > ( b : Swift . Float ) - > ( ) { { $ } } <nl> + func testCurriedFuncType1 ( _ a : Int ) - > ( _ b : Float ) - > ( ) { } <nl> + / / CHECK : FuncDecl ' ' ' testCurriedFuncType1 ' ' ' ( Int ) - > ( Float ) - > ( ) { { $ } } <nl> + / / FULL : FuncDecl ' ' ' testCurriedFuncType1 ' ' ' ( Swift . Int ) - > ( Swift . Float ) - > ( ) { { $ } } <nl> <nl> protocol FooProtocol { } <nl> protocol BarProtocol { } <nl> mmm a / test / IRGen / builtins . swift <nl> ppp b / test / IRGen / builtins . swift <nl> func testCondFail ( _ b : Bool , c : Bool ) { <nl> / / CHECK - objc : [ [ IS_DONE : % . * ] ] = icmp eq [ [ WORD ] ] [ [ PRED ] ] , - 1 <nl> / / CHECK - objc : call void @ llvm . assume ( i1 [ [ IS_DONE ] ] ) <nl> <nl> - func testOnce ( _ p : Builtin . RawPointer , f : @ convention ( thin ) ( ) - > ( ) ) { <nl> + func testOnce ( _ p : Builtin . RawPointer , f : @ escaping @ convention ( thin ) ( ) - > ( ) ) { <nl> Builtin . once ( p , f ) <nl> } <nl> <nl> struct S { } <nl> protocol P { } <nl> <nl> / / CHECK - LABEL : define hidden void @ _TF8builtins10canBeClass <nl> - func canBeClass < T > ( _ f : ( Builtin . Int8 ) - > ( ) , _ : T ) { <nl> + func canBeClass < T > ( _ f : @ escaping ( Builtin . Int8 ) - > ( ) , _ : T ) { <nl> / / CHECK : call void { { % . * } } ( i8 1 <nl> f ( Builtin . canBeClass ( O . self ) ) <nl> / / CHECK : call void { { % . * } } ( i8 1 <nl> mmm a / test / IRGen / objc_factory_method . sil <nl> ppp b / test / IRGen / objc_factory_method . sil <nl> sil @ _TFCSo4HiveCfMS_FT5queenGSQCSo3Bee__S_ : $ @ convention ( thin ) ( @ owned Implici <nl> bb0 ( % 0 : $ ImplicitlyUnwrappedOptional < Bee > , % 1 : $ @ thick Hive . Type ) : <nl> % 2 = thick_to_objc_metatype % 1 : $ @ thick Hive . Type to $ @ objc_metatype Hive . Type / / users : % 3 , % 4 <nl> / / CHECK : load i8 * , i8 * * @ " \ 01L_selector ( hiveWithQueen : ) " <nl> - % 3 = class_method % 2 : $ @ objc_metatype Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( queen : ImplicitlyUnwrappedOptional < Bee > ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > / / user : % 4 <nl> + % 3 = class_method % 2 : $ @ objc_metatype Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( ImplicitlyUnwrappedOptional < Bee > ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > / / user : % 4 <nl> / / CHECK : call { { . * } } @ objc_msgSend <nl> % 4 = apply % 3 ( % 0 , % 2 ) : $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > / / users : % 5 , % 6 <nl> / / CHECK : call { { . * } } @ objc_autorelease <nl> mmm a / test / IRGen / partial_apply . sil <nl> ppp b / test / IRGen / partial_apply . sil <nl> entry ( % f : $ @ callee_owned ( Builtin . Word ) - > ( ) , % x : $ Builtin . Word ) : <nl> <nl> sil @ objc_partial_apply : $ @ convention ( thin ) ObjCClass - > @ callee_owned Int - > ( ) { <nl> entry ( % c : $ ObjCClass ) : <nl> - % m = class_method [ volatile ] % c : $ ObjCClass , # ObjCClass . method ! 1 . foreign : ( ObjCClass ) - > ( x : Int ) - > ( ) , $ @ convention ( objc_method ) ( Int , ObjCClass ) - > ( ) <nl> + % m = class_method [ volatile ] % c : $ ObjCClass , # ObjCClass . method ! 1 . foreign : ( ObjCClass ) - > ( Int ) - > ( ) , $ @ convention ( objc_method ) ( Int , ObjCClass ) - > ( ) <nl> % p = partial_apply % m ( % c ) : $ @ convention ( objc_method ) ( Int , ObjCClass ) - > ( ) <nl> return % p : $ @ callee_owned Int - > ( ) <nl> } <nl> mmm a / test / Inputs / comment_to_something_conversion . swift <nl> ppp b / test / Inputs / comment_to_something_conversion . swift <nl> public enum A012_AttachToEntities { <nl> / / / - Parameter rhs : The right - hand side of the operator <nl> / / / - Returns : A result . <nl> / / / - Throws : Nothing . <nl> - @ objc public func closureParameterExplodedExploded ( a : Int , combine : ( lhs : Int , rhs : Int ) - > Int ) { } <nl> + @ objc public func closureParameterExplodedExploded ( a : Int , combine : ( _ lhs : Int , _ rhs : Int ) - > Int ) { } <nl> / / CHECK : DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > closureParameterExplodedExploded ( a : combine : ) < / Name > < USR > s : FC14swift_ide_test16ClosureContainer32closureParameterExplodedExplodedFT1aSi7combineFT3lhsSi3rhsSi_Si_T_ < / USR > < Declaration > @ objc public func closureParameterExplodedExploded ( a : Int , combine : ( lhs : Int , rhs : Int ) - & gt ; Int ) < / Declaration > < Abstract > < Para > Partially applies a binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > a < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side to partially apply . < / Para > < / Discussion > < / Parameter > < Parameter > < Name > combine < / Name > < Direction isExplicit = " 0 " > in < / Direction > < ClosureParameter > < Abstract > < Para > A binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > lhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side of the operator < / Para > < / Discussion > < / Parameter > < Parameter > < Name > rhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The right - hand side of the operator < / Para > < / Discussion > < / Parameter > < / Parameters > < ResultDiscussion > < Para > A result . < / Para > < / ResultDiscussion > < ThrowsDiscussion > < Para > Nothing . < / Para > < / ThrowsDiscussion > < / ClosureParameter > < / Parameter > < / Parameters > < / Function > ] <nl> <nl> / / / Partially applies a binary operator . <nl> public enum A012_AttachToEntities { <nl> / / / - Parameter rhs : The right - hand side of the operator <nl> / / / - Returns : A result . <nl> / / / - Throws : Nothing . <nl> - @ objc public func closureParameterOutlineExploded ( a : Int , combine : ( lhs : Int , rhs : Int ) - > Int ) { } <nl> + @ objc public func closureParameterOutlineExploded ( a : Int , combine : ( _ lhs : Int , _ rhs : Int ) - > Int ) { } <nl> / / CHECK : DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > closureParameterOutlineExploded ( a : combine : ) < / Name > < USR > s : FC14swift_ide_test16ClosureContainer31closureParameterOutlineExplodedFT1aSi7combineFT3lhsSi3rhsSi_Si_T_ < / USR > < Declaration > @ objc public func closureParameterOutlineExploded ( a : Int , combine : ( lhs : Int , rhs : Int ) - & gt ; Int ) < / Declaration > < Abstract > < Para > Partially applies a binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > a < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side to partially apply . < / Para > < / Discussion > < / Parameter > < Parameter > < Name > combine < / Name > < Direction isExplicit = " 0 " > in < / Direction > < ClosureParameter > < Abstract > < Para > A binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > lhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side of the operator < / Para > < / Discussion > < / Parameter > < Parameter > < Name > rhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The right - hand side of the operator < / Para > < / Discussion > < / Parameter > < / Parameters > < ResultDiscussion > < Para > A result . < / Para > < / ResultDiscussion > < ThrowsDiscussion > < Para > Nothing . < / Para > < / ThrowsDiscussion > < / ClosureParameter > < / Parameter > < / Parameters > < / Function > ] <nl> <nl> / / / Partially applies a binary operator . <nl> public enum A012_AttachToEntities { <nl> / / / - rhs : The right - hand side of the operator <nl> / / / - Returns : A result . <nl> / / / - Throws : Nothing . <nl> - @ objc public func closureParameterOutlineOutline ( a : Int , combine : ( lhs : Int , rhs : Int ) - > Int ) { } <nl> + @ objc public func closureParameterOutlineOutline ( a : Int , combine : ( _ lhs : Int , _ rhs : Int ) - > Int ) { } <nl> / / CHECK : DocCommentAsXML = [ < Function file = " { { . * } } " line = " { { . * } } " column = " { { . * } } " > < Name > closureParameterOutlineOutline ( a : combine : ) < / Name > < USR > s : FC14swift_ide_test16ClosureContainer30closureParameterOutlineOutlineFT1aSi7combineFT3lhsSi3rhsSi_Si_T_ < / USR > < Declaration > @ objc public func closureParameterOutlineOutline ( a : Int , combine : ( lhs : Int , rhs : Int ) - & gt ; Int ) < / Declaration > < Abstract > < Para > Partially applies a binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > a < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side to partially apply . < / Para > < / Discussion > < / Parameter > < Parameter > < Name > combine < / Name > < Direction isExplicit = " 0 " > in < / Direction > < ClosureParameter > < Abstract > < Para > A binary operator . < / Para > < / Abstract > < Parameters > < Parameter > < Name > lhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The left - hand side of the operator < / Para > < / Discussion > < / Parameter > < Parameter > < Name > rhs < / Name > < Direction isExplicit = " 0 " > in < / Direction > < Discussion > < Para > The right - hand side of the operator < / Para > < / Discussion > < / Parameter > < / Parameters > < ResultDiscussion > < Para > A result . < / Para > < / ResultDiscussion > < ThrowsDiscussion > < Para > Nothing . < / Para > < / ThrowsDiscussion > < / ClosureParameter > < / Parameter > < / Parameters > < / Function > ] <nl> } <nl> <nl> mmm a / test / Interpreter / FunctionConversion . swift <nl> ppp b / test / Interpreter / FunctionConversion . swift <nl> func generic1 < T > ( t : Parent ) - > ( T , Trivial ) { <nl> return ( t as ! T , Trivial ( n : 0 ) ) <nl> } <nl> <nl> - func generic2 < T : Parent > ( f : ( Parent ) - > ( T , Trivial ) , t : T ) - > ( Child ) - > ( Parent , Trivial ? ) { <nl> + func generic2 < T : Parent > ( f : @ escaping ( Parent ) - > ( T , Trivial ) , t : T ) - > ( Child ) - > ( Parent , Trivial ? ) { <nl> return f <nl> } <nl> <nl> mmm a / test / Interpreter / SDK / Accelerate . swift <nl> ppp b / test / Interpreter / SDK / Accelerate . swift <nl> extension vU1024 : ExpressibleByIntegerLiteral , CustomStringConvertible , Equatabl <nl> public init ( integerLiteral : Int ) { <nl> var integerLiteral = integerLiteral <nl> self . init ( ) <nl> - memcpy ( & self , & integerLiteral , sizeof ( Int . self ) ) <nl> + memcpy ( & self , & integerLiteral , MemoryLayout < Int > . size ) <nl> } <nl> <nl> init ( _ int : Int ) { <nl> extension Int { <nl> var u1024 = u1024 <nl> / / NB : Doesn ' t overflow check <nl> self . init ( ) <nl> - memcpy ( & self , & u1024 , sizeof ( Int . self ) ) <nl> + memcpy ( & self , & u1024 , MemoryLayout < Int > . size ) <nl> } <nl> } <nl> <nl> func quorem ( _ x : vU1024 , _ y : vU1024 ) - > ( vU1024 , vU1024 ) { <nl> public func = = ( x : vU1024 , y : vU1024 ) - > Bool { <nl> var x = x <nl> var y = y <nl> - return memcmp ( & x , & y , sizeof ( vU1024 . self ) ) = = 0 <nl> + return memcmp ( & x , & y , MemoryLayout < vU1024 > . size ) = = 0 <nl> } <nl> <nl> func factorial ( _ x : Int ) - > vU1024 { <nl> mmm a / test / Interpreter / SDK / c_pointers . swift <nl> ppp b / test / Interpreter / SDK / c_pointers . swift <nl> print ( " < \ ( r ) \ ( g ) \ ( b ) \ ( a ) > " ) / / CHECK - NEXT : < 1 . 0 0 . 0 0 . 0 1 . 0 > <nl> <nl> / / FIXME : Array type annotation should not be required <nl> let data = NSData ( bytes : [ 1 . 5 , 2 . 25 , 3 . 125 ] as [ Double ] , <nl> - length : sizeof ( Double . self ) * 3 ) <nl> + length : MemoryLayout < Double > . size * 3 ) <nl> var fromData = [ 0 . 25 , 0 . 25 , 0 . 25 ] <nl> let notFromData = fromData <nl> - data . getBytes ( & fromData , length : sizeof ( Double . self ) * 3 ) <nl> + data . getBytes ( & fromData , length : MemoryLayout < Double > . size * 3 ) <nl> <nl> / / CHECK - LABEL : Data is : <nl> print ( " Data is : " ) <nl> puts ( s ) <nl> / / <nl> <nl> var unsorted = [ 3 , 14 , 15 , 9 , 2 , 6 , 5 ] <nl> - qsort ( & unsorted , unsorted . count , sizeofValue ( unsorted [ 0 ] ) ) { a , b in <nl> + qsort ( & unsorted , unsorted . count , MemoryLayout . _ofInstance ( unsorted [ 0 ] ) . size ) { a , b in <nl> return Int32 ( a ! . load ( as : Int . self ) - b ! . load ( as : Int . self ) ) <nl> } <nl> / / CHECK - NEXT : [ 2 , 3 , 5 , 6 , 9 , 14 , 15 ] <nl> mmm a / test / Interpreter / SDK / objc_cast . swift <nl> ppp b / test / Interpreter / SDK / objc_cast . swift <nl> if let strArr = objImplicitOpt as ? [ String ] { <nl> / / CHECK : Numbers - as - doubles cast produces [ 3 . 9375 , 2 . 71828 , 0 . 0 ] <nl> obj = ( [ 3 . 9375 , 2 . 71828 , 0 ] as [ Double ] ) as AnyObject <nl> if let doubleArr = obj as ? [ Double ] { <nl> - print ( sizeof ( Double . self ) ) <nl> + print ( MemoryLayout < Double > . size ) <nl> print ( " Numbers - as - doubles cast produces \ ( doubleArr ) " ) <nl> } else { <nl> print ( " Numbers - as - doubles failed " ) <nl> if let doubleArr = obj as ? [ Double ] { <nl> <nl> / / CHECK : Numbers - as - floats cast produces [ 3 . 9375 , 2 . 71828 { { . * } } , 0 . 0 ] <nl> if let floatArr = obj as ? [ Float ] { <nl> - print ( sizeof ( Float . self ) ) <nl> + print ( MemoryLayout < Float > . size ) <nl> print ( " Numbers - as - floats cast produces \ ( floatArr ) " ) <nl> } else { <nl> print ( " Numbers - as - floats failed " ) <nl> mmm a / test / Interpreter / builtin_bridge_object . swift <nl> ppp b / test / Interpreter / builtin_bridge_object . swift <nl> func hitOptionalSpecifically ( _ x : Builtin . BridgeObject ? ) { <nl> <nl> if true { <nl> / / CHECK - NEXT : true <nl> - print ( sizeof ( Optional < Builtin . BridgeObject > . self ) <nl> - = = sizeof ( Builtin . BridgeObject . self ) ) <nl> + print ( MemoryLayout < Optional < Builtin . BridgeObject > > . size <nl> + = = MemoryLayout < Builtin . BridgeObject > . size ) <nl> <nl> var bo : Builtin . BridgeObject ? = nil <nl> <nl> mmm a / test / Interpreter / closures . swift <nl> ppp b / test / Interpreter / closures . swift <nl> func localFunc ( _ x : Int ) - > Int { <nl> return addToX ( 1 ) <nl> } <nl> <nl> - func localFunc2 ( _ x : Int ) - > ( y : Int ) - > Int { <nl> + func localFunc2 ( _ x : Int ) - > ( _ y : Int ) - > Int { <nl> func addToX ( _ y : Int ) - > Int { <nl> return x + y <nl> } <nl> func test ( ) { <nl> / / CHECK : 3 <nl> print ( localFunc ( 2 ) ) <nl> / / CHECK : 5 <nl> - print ( localFunc2 ( 2 ) ( y : 3 ) ) <nl> + print ( localFunc2 ( 2 ) ( 3 ) ) <nl> <nl> var lf = localFunc <nl> / / CHECK : 8 <nl> func test ( ) { <nl> var lf2 = localFunc2 <nl> var lf2_ = lf2 ( 5 ) <nl> / / CHECK : 13 <nl> - print ( lf2_ ( y : 8 ) ) <nl> + print ( lf2_ ( 8 ) ) <nl> } <nl> <nl> test ( ) <nl> mmm a / test / Interpreter / currying_generics . swift <nl> ppp b / test / Interpreter / currying_generics . swift <nl> <nl> / / RUN : % target - run - simple - swift | FileCheck % s <nl> / / REQUIRES : executable_test <nl> <nl> - func curry < T , U , V > ( _ f : ( T , U ) - > V ) - > ( T ) - > ( U ) - > V { <nl> + func curry < T , U , V > ( _ f : @ escaping ( T , U ) - > V ) - > @ escaping ( T ) - > @ escaping ( U ) - > V { <nl> return { x in { y in f ( x , y ) } } <nl> } <nl> <nl> - func curry < T1 , T2 , T3 , T4 > ( _ f : ( T1 , T2 , T3 ) - > T4 ) - > ( T1 ) - > ( T2 ) - > ( T3 ) - > T4 { <nl> + func curry < T1 , T2 , T3 , T4 > ( _ f : @ escaping ( T1 , T2 , T3 ) - > T4 ) - > @ escaping ( T1 ) - > @ escaping ( T2 ) - > @ escaping ( T3 ) - > T4 { <nl> return { x in { y in { z in f ( x , y , z ) } } } <nl> } <nl> <nl> print ( test_compose_closure ( 20 ) ) / / CHECK - NEXT : 21 <nl> <nl> / / rdar : / / problem / 18988428 <nl> <nl> - func clamp < T : Comparable > ( _ minValue : T , _ maxValue : T ) - > ( n : T ) - > T { <nl> + func clamp < T : Comparable > ( _ minValue : T , _ maxValue : T ) - > @ escaping ( _ n : T ) - > T { <nl> return { n in max ( minValue , min ( n , maxValue ) ) } <nl> } <nl> <nl> let clampFoo2 = clamp ( 10 . 0 , 30 . 0 ) <nl> <nl> - print ( clampFoo2 ( n : 3 . 0 ) ) / / CHECK - NEXT : 10 . 0 <nl> + print ( clampFoo2 ( 3 . 0 ) ) / / CHECK - NEXT : 10 . 0 <nl> <nl> / / rdar : / / problem / 19195470 <nl> <nl> func pair < T , U > ( _ a : T ) - > ( U ) - > ( T , U ) { <nl> return { b in ( a , b ) } <nl> } <nl> <nl> - func pair_ < T , U > ( _ a : T ) - > ( b : U ) - > ( T , U ) { <nl> + func pair_ < T , U > ( _ a : T ) - > ( _ b : U ) - > ( T , U ) { <nl> return { b in ( a , b ) } <nl> } <nl> <nl> infix operator < + > { } <nl> - func < + > < T , U , V > ( lhs : T ? , rhs : ( T ) - > ( U ) - > V ) - > ( U ) - > V ? { <nl> + func < + > < T , U , V > ( lhs : T ? , rhs : @ escaping ( T ) - > @ escaping ( U ) - > V ) - > @ escaping ( U ) - > V ? { <nl> if let x = lhs { <nl> return { y in . some ( rhs ( x ) ( y ) ) } <nl> } else { <nl> print ( ( b < + > pair_ ) ( a ! ) ) / / CHECK - NEXT : ( 42 , 23 ) <nl> struct Identity < A > { let value : A } <nl> struct Const < A , B > { let value : A } <nl> <nl> - func fmap < A , B > ( _ f : ( A ) - > B ) - > ( Identity < A > ) - > Identity < B > { <nl> + func fmap < A , B > ( _ f : @ escaping ( A ) - > B ) - > @ escaping ( Identity < A > ) - > Identity < B > { <nl> return { identity in Identity ( value : f ( identity . value ) ) } <nl> } <nl> <nl> - func fmap < A , B > ( _ f : ( A ) - > B ) - > ( Const < A , B > ) - > Const < A , B > { <nl> + func fmap < A , B > ( _ f : @ escaping ( A ) - > B ) - > @ escaping ( Const < A , B > ) - > Const < A , B > { <nl> return { const in const } <nl> } <nl> <nl> func runIdentity < A > ( _ i : Identity < A > ) - > A { <nl> } <nl> <nl> <nl> - func view < S , A > ( _ lens : ( ( A ) - > Const < A , S > ) - > ( S ) - > ( ( ( A ) - > S ) - > ( Const < A , S > ) - > Const < A , S > ) - > Const < A , S > ) - > ( S ) - > A { <nl> + func view < S , A > ( _ lens : @ escaping ( @ escaping ( A ) - > Const < A , S > ) - > @ escaping ( S ) - > @ escaping ( @ escaping ( @ escaping ( A ) - > S ) - > @ escaping ( Const < A , S > ) - > Const < A , S > ) - > Const < A , S > ) - > @ escaping ( S ) - > A { <nl> return { s in getConst ( lens ( _Const ) ( s ) ( fmap ) ) } <nl> } <nl> <nl> - func over < S , A > ( _ lens : ( ( A ) - > Identity < A > ) - > ( S ) - > ( ( ( A ) - > S ) - > ( Identity < A > ) - > Identity < S > ) - > Identity < S > ) - > ( ( A ) - > A ) - > ( S ) - > S { <nl> + func over < S , A > ( _ lens : @ escaping ( @ escaping ( A ) - > Identity < A > ) - > @ escaping ( S ) - > @ escaping ( @ escaping ( @ escaping ( A ) - > S ) - > @ escaping ( Identity < A > ) - > Identity < S > ) - > Identity < S > ) - > @ escaping ( @ escaping ( A ) - > A ) - > @ escaping ( S ) - > S { <nl> return { f in { s in runIdentity ( lens ( { _Identity ( f ( $ 0 ) ) } ) ( s ) ( fmap ) ) } } <nl> } <nl> <nl> - func set < S , A > ( _ lens : ( ( A ) - > Identity < A > ) - > ( S ) - > ( ( ( A ) - > S ) - > ( Identity < A > ) - > Identity < S > ) - > Identity < S > ) - > ( A ) - > ( S ) - > S { <nl> + func set < S , A > ( _ lens : @ escaping ( @ escaping ( A ) - > Identity < A > ) - > @ escaping ( S ) - > @ escaping ( @ escaping ( @ escaping ( A ) - > S ) - > @ escaping ( Identity < A > ) - > Identity < S > ) - > Identity < S > ) - > @ escaping ( A ) - > @ escaping ( S ) - > S { <nl> return { x in { y in over ( lens ) ( const ( x ) ) ( y ) } } <nl> } <nl> <nl> - func _1 < A , B , C , D > ( _ f : ( A ) - > C ) - > ( A , B ) - > ( ( ( A ) - > ( A , B ) ) - > ( C ) - > D ) - > D { <nl> + func _1 < A , B , C , D > ( _ f : @ escaping ( A ) - > C ) - > @ escaping ( A , B ) - > @ escaping ( @ escaping ( @ escaping ( A ) - > ( A , B ) ) - > @ escaping ( C ) - > D ) - > D { <nl> return { ( x , y ) in { fmap in fmap ( { ( $ 0 , y ) } ) ( f ( x ) ) } } <nl> } <nl> <nl> - func _2 < A , B , C , D > ( _ f : ( B ) - > C ) - > ( A , B ) - > ( ( ( B ) - > ( A , B ) ) - > ( C ) - > D ) - > D { <nl> + func _2 < A , B , C , D > ( _ f : @ escaping ( B ) - > C ) - > @ escaping ( A , B ) - > @ escaping ( @ escaping ( @ escaping ( B ) - > ( A , B ) ) - > @ escaping ( C ) - > D ) - > D { <nl> return { ( x , y ) in { fmap in fmap ( { ( x , $ 0 ) } ) ( f ( y ) ) } } <nl> } <nl> <nl> <nl> - public func > > > < T , U , V > ( f : ( T ) - > U , g : ( U ) - > V ) - > ( T ) - > V { <nl> + public func > > > < T , U , V > ( f : @ escaping ( T ) - > U , g : @ escaping ( U ) - > V ) - > @ escaping ( T ) - > V { <nl> return { g ( f ( $ 0 ) ) } <nl> } <nl> <nl> - public func < < < < T , U , V > ( f : ( U ) - > V , g : ( T ) - > U ) - > ( T ) - > V { <nl> + public func < < < < T , U , V > ( f : @ escaping ( U ) - > V , g : @ escaping ( T ) - > U ) - > @ escaping ( T ) - > V { <nl> return { f ( g ( $ 0 ) ) } <nl> } <nl> <nl> mmm a / test / Interpreter / enum . swift <nl> ppp b / test / Interpreter / enum . swift <nl> struct OptionalTuple < T > { <nl> } <nl> } <nl> func test_optional_generic_tuple < T > ( _ a : OptionalTuple < T > ) - > T { <nl> - print ( " optional pair is same size as pair : \ ( sizeofValue ( a ) = = sizeof ( T ) * 2 ) " ) <nl> + print ( " optional pair is same size as pair : \ ( MemoryLayout . _ofInstance ( a ) . size = = MemoryLayout < T > . size * 2 ) " ) <nl> return a . value ! . 0 <nl> } <nl> print ( " Int result : \ ( test_optional_generic_tuple ( OptionalTuple < Int > ( ( 5 , 6 ) ) ) ) " ) <nl> mmm a / test / Interpreter / fractal . swift <nl> ppp b / test / Interpreter / fractal . swift <nl> func getMandelbrotIterations ( _ c : Complex , maxIterations : Int ) - > Int { <nl> return n <nl> } <nl> <nl> - func fractal ( _ densityFunc : ( c : Complex , maxIterations : Int ) - > Int , <nl> + func fractal ( _ densityFunc : ( _ c : Complex , _ maxIterations : Int ) - > Int , <nl> xMin : Double , xMax : Double , <nl> yMin : Double , yMax : Double , <nl> rows : Int , cols : Int , <nl> func fractal ( _ densityFunc : ( c : Complex , maxIterations : Int ) - > Int , <nl> for row in stride ( from : xMin , to : xMax , by : dX ) { <nl> for col in stride ( from : yMin , to : yMax , by : dY ) { <nl> var c = Complex ( real : col , imag : row ) <nl> - printDensity ( densityFunc ( c : c , maxIterations : maxIterations ) ) <nl> + printDensity ( densityFunc ( c , maxIterations ) ) <nl> } <nl> print ( " \ n " , terminator : " " ) <nl> } <nl> mmm a / test / Interpreter / layout_reabstraction . swift <nl> ppp b / test / Interpreter / layout_reabstraction . swift <nl> printMetatypeConditional ( any , Q . self ) <nl> / / of the size of a type . <nl> @ inline ( never ) <nl> func unspecializedSizeOf < T > ( _ t : T . Type ) - > Int { <nl> - return sizeof ( t ) <nl> + return MemoryLayout < T > . size <nl> } <nl> <nl> struct ContainsTrivialMetatype < T > { <nl> struct ContainsTupleOfTrivialMetatype < T > { <nl> } <nl> <nl> / / CHECK - NEXT : 8 <nl> - print ( sizeof ( ContainsTrivialMetatype < Int64 > . self ) ) <nl> + print ( MemoryLayout < ContainsTrivialMetatype < Int64 > > . size ) <nl> / / CHECK - NEXT : 8 <nl> print ( unspecializedSizeOf ( ContainsTrivialMetatype < Int64 > . self ) ) <nl> <nl> / / CHECK - NEXT : 8 <nl> - print ( sizeof ( ContainsTupleOfTrivialMetatype < Int64 > . self ) ) <nl> + print ( MemoryLayout < ContainsTupleOfTrivialMetatype < Int64 > > . size ) <nl> / / CHECK - NEXT : 8 <nl> print ( unspecializedSizeOf ( ContainsTupleOfTrivialMetatype < Int64 > . self ) ) <nl> <nl> struct ContainsTupleOfFunctions < T > { <nl> } <nl> <nl> / / CHECK - NEXT : 2 <nl> - print ( sizeof ( ContainsTupleOfFunctions < ( ) > . self ) / sizeof ( Int . self ) ) <nl> + print ( MemoryLayout < ContainsTupleOfFunctions < ( ) > > . size / MemoryLayout < Int > . size ) <nl> / / CHECK - NEXT : 2 <nl> - print ( unspecializedSizeOf ( ContainsTupleOfFunctions < ( ) > . self ) / sizeof ( Int . self ) ) <nl> + print ( unspecializedSizeOf ( ContainsTupleOfFunctions < ( ) > . self ) / MemoryLayout < Int > . size ) <nl> / / CHECK - NEXT : 3 <nl> - print ( sizeof ( ContainsTupleOfFunctions < Int > . self ) / sizeof ( Int . self ) ) <nl> + print ( MemoryLayout < ContainsTupleOfFunctions < Int > > . size / MemoryLayout < Int > . size ) <nl> / / CHECK - NEXT : 3 <nl> - print ( unspecializedSizeOf ( ContainsTupleOfFunctions < Int > . self ) / sizeof ( Int . self ) ) <nl> + print ( unspecializedSizeOf ( ContainsTupleOfFunctions < Int > . self ) / MemoryLayout < Int > . size ) <nl> <nl> let x = ContainsTupleOfFunctions ( x : ( 1 , { $ 0 + 1 } ) ) <nl> let y = ContainsTupleOfFunctions ( x : ( " foo " , { $ 0 + " bar " } ) ) <nl> mmm a / test / Interpreter / optional . swift <nl> ppp b / test / Interpreter / optional . swift <nl> class B : A { <nl> } <nl> <nl> func printA ( _ v : A ) { v . printA ( ) } <nl> - func printOpt < T > ( _ subprint : ( T ) - > ( ) ) - > ( T ? ) - > ( ) { <nl> + func printOpt < T > ( _ subprint : @ escaping ( T ) - > ( ) ) - > ( T ? ) - > ( ) { <nl> return { x in <nl> switch ( x ) { <nl> case . some ( let y ) : print ( " . some ( " , terminator : " " ) ; subprint ( y ) ; print ( " ) " , terminator : " " ) <nl> mmm a / test / Interpreter / repl . swift <nl> ppp b / test / Interpreter / repl . swift <nl> var _ : ( [ Int ] ) . Type = [ 4 ] . dynamicType <nl> / / CHECK : : ( [ Int ] ) . Type <nl> var _ : ( ( Int ) - > Int ) ? = . none <nl> / / CHECK : : ( ( Int ) - > Int ) ? <nl> - func chained ( f f : ( Int ) - > ( ) ) - > Int { return 0 } <nl> + func chained ( f f : @ escaping ( Int ) - > ( ) ) - > Int { return 0 } <nl> chained <nl> - / / CHECK : : ( f : ( Int ) - > ( ) ) - > Int <nl> + / / CHECK : : ( @ escaping ( Int ) - > ( ) ) - > Int <nl> [ chained ] <nl> - / / CHECK : : [ ( f : ( Int ) - > ( ) ) - > Int ] <nl> + / / CHECK : : [ ( @ escaping ( Int ) - > ( ) ) - > Int ] <nl> <nl> ( { 97210 } ( ) ) <nl> / / CHECK : = 97210 <nl> mmm a / test / Interpreter / selector_arguments . swift <nl> ppp b / test / Interpreter / selector_arguments . swift <nl> func divide ( _ a : Int , byDividend b : Int ) - > Int { return a / b } <nl> print ( divide ( 12 , byDividend : 4 ) ) / / CHECK : 3 <nl> print ( divide ( 12 , byDividend : 3 ) ) / / CHECK : 4 <nl> <nl> - var f : ( _ : Int , byDividend : Int ) - > Int = divide <nl> + var f : ( _ : Int , _ byDividend : Int ) - > Int = divide <nl> <nl> - print ( f ( 20 , byDividend : 2 ) ) / / CHECK : 10 <nl> + print ( f ( 20 , 2 ) ) / / CHECK : 10 <nl> <nl> func divide ( _ a : Int , byDividends b : Int , _ c : Int , thenAdd d : Int ) - > Int { <nl> return a / b / c + d <nl> func divide ( _ a : Int , byDividends b : Int , _ c : Int , _ d : Int , thenAdd e : Int ) - > <nl> print ( divide ( 60 , byDividends : 2 , 3 , thenAdd : 100 ) ) / / CHECK : 110 <nl> print ( divide ( 60 , byDividends : 2 , 3 , 5 , thenAdd : 100 ) ) / / CHECK : 102 <nl> <nl> - var g : ( _ : Int , byDividends : Int , _ : Int , thenAdd : Int ) - > Int = divide <nl> - var h : ( _ : Int , byDividends : Int , _ : Int , _ : Int , thenAdd : Int ) - > Int = divide <nl> + var g : ( _ : Int , _ byDividends : Int , _ : Int , _ thenAdd : Int ) - > Int = divide <nl> + var h : ( _ : Int , _ byDividends : Int , _ : Int , _ : Int , _ thenAdd : Int ) - > Int = divide <nl> <nl> - print ( g ( 60 , byDividends : 2 , 3 , thenAdd : 300 ) ) / / CHECK : 310 <nl> - print ( h ( 60 , byDividends : 2 , 3 , 5 , thenAdd : 300 ) ) / / CHECK : 302 <nl> + print ( g ( 60 , 2 , 3 , 300 ) ) / / CHECK : 310 <nl> + print ( h ( 60 , 2 , 3 , 5 , 300 ) ) / / CHECK : 302 <nl> mmm a / test / NameBinding / accessibility . swift <nl> ppp b / test / NameBinding / accessibility . swift <nl> struct ConformerByLocalType : TypeProto { <nl> } <nl> <nl> private struct PrivateConformerByLocalType : TypeProto { <nl> - private struct TheType { } / / okay <nl> + struct TheType { } / / okay <nl> + } <nl> + <nl> + private struct PrivateConformerByLocalTypeBad : TypeProto { <nl> + private struct TheType { } / / expected - error { { struct ' TheType ' must be as accessible as its enclosing type because it matches a requirement in protocol ' TypeProto ' } } { { 3 - 10 = fileprivate } } <nl> } <nl> # endif <nl> <nl> mmm a / test / NameBinding / name_lookup . swift <nl> ppp b / test / NameBinding / name_lookup . swift <nl> protocol MyProto { <nl> <nl> / / < rdar : / / problem / 14488311 > <nl> struct DefaultArgumentFromExtension { <nl> - func g ( _ x : ( DefaultArgumentFromExtension ) - > ( ) - > ( ) = f ) { <nl> + func g ( _ x : @ escaping ( DefaultArgumentFromExtension ) - > ( ) - > ( ) = f ) { <nl> let f = 42 <nl> var x2 = x <nl> x2 = f / / expected - error { { cannot assign value of type ' Int ' to type ' ( DefaultArgumentFromExtension ) - > ( ) - > ( ) ' } } <nl> mmm a / test / Parse / invalid . swift <nl> ppp b / test / Parse / invalid . swift <nl> func test1 ( inout var x : Int ) { } / / expected - error { { parameter may not have mul <nl> / / expected - error @ - 1 { { ' inout ' before a parameter name is not allowed , place it before the parameter type instead } } { { 12 - 17 = } } { { 26 - 26 = inout } } <nl> func test2 ( inout let x : Int ) { } / / expected - error { { parameter may not have multiple ' inout ' , ' var ' , or ' let ' specifiers } } { { 18 - 22 = } } <nl> / / expected - error @ - 1 { { ' inout ' before a parameter name is not allowed , place it before the parameter type instead } } { { 12 - 17 = } } { { 26 - 26 = inout } } <nl> - func test3 ( f : ( inout x : Int ) - > Void ) { } / / expected - error { { ' inout ' before a parameter name is not allowed , place it before the parameter type instead } } <nl> + func test3 ( f : ( inout _ x : Int ) - > Void ) { } / / expected - error { { ' inout ' before a parameter name is not allowed , place it before the parameter type instead } } <nl> <nl> func test3 ( ) { <nl> undeclared_func ( / / expected - error { { use of unresolved identifier ' undeclared_func ' } } expected - note { { to match this opening ' ( ' } } expected - error { { expected ' , ' separator } } { { 19 - 19 = , } } <nl> mmm a / test / PrintAsObjC / Inputs / comments - expected - output . h <nl> ppp b / test / PrintAsObjC / Inputs / comments - expected - output . h <nl> SWIFT_CLASS ( " _TtC8comments16ClosureContainer " ) <nl> \ a combine error : Nothing . <nl> <nl> * / <nl> - - ( void ) closureParameterExplodedExplodedWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger lhs , NSInteger rhs ) ) combine ; <nl> + - ( void ) closureParameterExplodedExplodedWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger , NSInteger ) ) combine ; <nl> / * * <nl> Partially applies a binary operator . <nl> \ param a The left - hand side to partially apply . <nl> SWIFT_CLASS ( " _TtC8comments16ClosureContainer " ) <nl> \ a combine error : Nothing . <nl> <nl> * / <nl> - - ( void ) closureParameterOutlineExplodedWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger lhs , NSInteger rhs ) ) combine ; <nl> + - ( void ) closureParameterOutlineExplodedWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger , NSInteger ) ) combine ; <nl> / * * <nl> Partially applies a binary operator . <nl> \ param a The left - hand side to partially apply . <nl> SWIFT_CLASS ( " _TtC8comments16ClosureContainer " ) <nl> \ a combine error : Nothing . <nl> <nl> * / <nl> - - ( void ) closureParameterOutlineOutlineWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger lhs , NSInteger rhs ) ) combine ; <nl> + - ( void ) closureParameterOutlineOutlineWithA : ( NSInteger ) a combine : ( NSInteger ( ^ _Nonnull ) ( NSInteger , NSInteger ) ) combine ; <nl> - ( nonnull instancetype ) init OBJC_DESIGNATED_INITIALIZER ; <nl> @ end <nl> <nl> mmm a / test / PrintAsObjC / blocks . swift <nl> ppp b / test / PrintAsObjC / blocks . swift <nl> typealias MyInt = Int <nl> / / CHECK - NEXT : - ( void ( ^ _Nullable ) ( NSObject * _Nonnull , NSObject * _Nonnull ) ) returnsBlockWithTwoInputs ; <nl> / / CHECK - NEXT : - ( void ) blockWithTypealias : ( NSInteger ( ^ _Nonnull ) ( NSInteger , id _Nullable ) ) input ; <nl> / / CHECK - NEXT : - ( void ) blockWithSimpleTypealias : ( NSInteger ( ^ _Nonnull ) ( NSInteger ) ) input ; <nl> - / / CHECK - NEXT : - ( void ) namedArguments : ( void ( ^ _Nonnull ) ( float f1 , float f2 , double d1 , double d2 ) ) input ; <nl> - / / CHECK - NEXT : - ( void ) blockTakesNamedBlock : ( void ( ^ _Nonnull ) ( void ( ^ _Nonnull block ) ( void ) ) ) input ; <nl> - / / CHECK - NEXT : - ( void ( ^ _Nullable ) ( NSObject * _Nonnull object ) ) returnsBlockWithNamedInput ; <nl> + / / CHECK - NEXT : - ( void ) namedArguments : ( void ( ^ _Nonnull ) ( float , float , double , double ) ) input ; <nl> + / / CHECK - NEXT : - ( void ) blockTakesNamedBlock : ( void ( ^ _Nonnull ) ( void ( ^ _Nonnull ) ( void ) ) ) input ; <nl> + / / CHECK - NEXT : - ( void ( ^ _Nullable ) ( NSObject * _Nonnull ) ) returnsBlockWithNamedInput ; <nl> / / CHECK - NEXT : - ( void ) blockWithTypealiasWithNames : ( NSInteger ( ^ _Nonnull ) ( NSInteger a , id _Nullable b ) ) input ; <nl> - / / CHECK - NEXT : - ( void ) blockWithKeyword : ( NSInteger ( ^ _Nonnull ) ( NSInteger class_ ) ) _Nullable_ ; <nl> + / / CHECK - NEXT : - ( void ) blockWithKeyword : ( NSInteger ( ^ _Nonnull ) ( NSInteger ) ) _Nullable_ ; <nl> / / CHECK - NEXT : - ( NSInteger ( * _Nonnull ) ( NSInteger ) ) functionPointers : ( NSInteger ( * _Nonnull ) ( NSInteger ) ) input ; <nl> / / CHECK - NEXT : - ( void ) functionPointerTakesAndReturnsFunctionPointer : ( NSInteger ( * _Nonnull ( ^ _Nonnull ( * _Nonnull ) ( NSInteger ) ) ( NSInteger ) ) ( NSInteger ) ) input ; <nl> - / / CHECK - NEXT : - ( NSInteger ( * _Nonnull ) ( NSInteger result ) ) functionPointersWithName : ( NSInteger ( * _Nonnull ) ( NSInteger value ) ) input ; <nl> + / / CHECK - NEXT : - ( NSInteger ( * _Nonnull ) ( NSInteger ) ) functionPointersWithName : ( NSInteger ( * _Nonnull ) ( NSInteger ) ) input ; <nl> / / CHECK - NEXT : @ property ( nonatomic , copy ) NSInteger ( ^ _Nullable savedBlock ) ( NSInteger ) ; <nl> - / / CHECK - NEXT : @ property ( nonatomic , copy ) NSInteger ( ^ _Nullable savedBlockWithName ) ( NSInteger x ) ; <nl> + / / CHECK - NEXT : @ property ( nonatomic , copy ) NSInteger ( ^ _Nullable savedBlockWithName ) ( NSInteger ) ; <nl> / / CHECK - NEXT : @ property ( nonatomic ) NSInteger ( * _Nonnull savedFunctionPointer ) ( NSInteger ) ; <nl> / / CHECK - NEXT : @ property ( nonatomic ) NSInteger ( * _Nullable savedFunctionPointer2 ) ( NSInteger ) ; <nl> - / / CHECK - NEXT : @ property ( nonatomic ) NSInteger ( * _Nonnull savedFunctionPointerWithName ) ( NSInteger x ) ; <nl> - / / CHECK - NEXT : @ property ( nonatomic , copy , getter = this , setter = setThis : ) NSInteger ( ^ _Nonnull this_ ) ( NSInteger block ) ; <nl> - / / CHECK - NEXT : @ property ( nonatomic , getter = class , setter = setClass : ) NSInteger ( * _Nonnull class_ ) ( NSInteger function ) ; <nl> + / / CHECK - NEXT : @ property ( nonatomic ) NSInteger ( * _Nonnull savedFunctionPointerWithName ) ( NSInteger ) ; <nl> + / / CHECK - NEXT : @ property ( nonatomic , copy , getter = this , setter = setThis : ) NSInteger ( ^ _Nonnull this_ ) ( NSInteger ) ; <nl> + / / CHECK - NEXT : @ property ( nonatomic , getter = class , setter = setClass : ) NSInteger ( * _Nonnull class_ ) ( NSInteger ) ; <nl> / / CHECK - NEXT : init <nl> / / CHECK - NEXT : @ end <nl> @ objc class Callbacks { <nl> - func voidBlocks ( _ input : ( ) - > ( ) ) - > ( ) - > ( ) { <nl> + func voidBlocks ( _ input : @ escaping ( ) - > ( ) ) - > ( ) - > ( ) { <nl> return input <nl> } <nl> - func manyArguments ( _ input : ( Float , Float , Double , Double ) - > ( ) ) { } <nl> + func manyArguments ( _ input : @ escaping ( Float , Float , Double , Double ) - > ( ) ) { } <nl> <nl> - func blockTakesBlock ( _ input : ( ( ) - > ( ) ) - > ( ) ) { } <nl> - func blockReturnsBlock ( _ input : ( ) - > ( ) - > ( ) ) { } <nl> + func blockTakesBlock ( _ input : @ escaping ( ( ) - > ( ) ) - > ( ) ) { } <nl> + func blockReturnsBlock ( _ input : @ escaping ( ) - > ( ) - > ( ) ) { } <nl> func blockTakesAndReturnsBlock ( _ input : <nl> ( ( Int16 ) - > ( UInt16 ) ) - > <nl> ( ( Int8 ) - > ( UInt8 ) ) ) { } <nl> typealias MyInt = Int <nl> return nil <nl> } <nl> <nl> - func blockWithTypealias ( _ input : ( MyTuple ) - > MyInt ) { } <nl> - func blockWithSimpleTypealias ( _ input : ( MyInt ) - > MyInt ) { } <nl> + func blockWithTypealias ( _ input : @ escaping ( MyTuple ) - > MyInt ) { } <nl> + func blockWithSimpleTypealias ( _ input : @ escaping ( MyInt ) - > MyInt ) { } <nl> <nl> - func namedArguments ( _ input : ( f1 : Float , f2 : Float , d1 : Double , d2 : Double ) - > ( ) ) { } <nl> - func blockTakesNamedBlock ( _ input : ( block : ( ) - > ( ) ) - > ( ) ) { } <nl> - func returnsBlockWithNamedInput ( ) - > ( ( object : NSObject ) - > ( ) ) ? { <nl> + func namedArguments ( _ input : @ escaping ( _ f1 : Float , _ f2 : Float , _ d1 : Double , _ d2 : Double ) - > ( ) ) { } <nl> + func blockTakesNamedBlock ( _ input : @ escaping ( _ block : ( ) - > ( ) ) - > ( ) ) { } <nl> + func returnsBlockWithNamedInput ( ) - > ( ( _ object : NSObject ) - > ( ) ) ? { <nl> return nil <nl> } <nl> <nl> func blockWithTypealiasWithNames ( _ input : ( MyNamedTuple ) - > MyInt ) { } <nl> <nl> - func blockWithKeyword ( _ _Nullable : ( ` class ` : Int ) - > Int ) { } <nl> + func blockWithKeyword ( _ _Nullable : ( _ ` class ` : Int ) - > Int ) { } <nl> <nl> - func functionPointers ( _ input : @ convention ( c ) ( Int ) - > Int ) <nl> + func functionPointers ( _ input : @ escaping @ convention ( c ) ( Int ) - > Int ) <nl> - > @ convention ( c ) ( Int ) - > Int { <nl> return input <nl> } <nl> <nl> func functionPointerTakesAndReturnsFunctionPointer ( <nl> - _ input : @ convention ( c ) ( Int ) - > ( Int ) <nl> + _ input : @ escaping @ convention ( c ) ( Int ) - > ( Int ) <nl> - > @ convention ( c ) ( Int ) - > Int <nl> ) { <nl> } <nl> <nl> - func functionPointersWithName ( _ input : @ convention ( c ) ( value : Int ) - > Int ) <nl> - - > @ convention ( c ) ( result : Int ) - > Int { <nl> + func functionPointersWithName ( _ input : @ escaping @ convention ( c ) ( _ value : Int ) - > Int ) <nl> + - > @ convention ( c ) ( _ result : Int ) - > Int { <nl> return input <nl> } <nl> <nl> var savedBlock : ( ( Int ) - > Int ) ? <nl> - var savedBlockWithName : ( ( x : Int ) - > Int ) ? <nl> + var savedBlockWithName : ( ( _ x : Int ) - > Int ) ? <nl> var savedFunctionPointer : @ convention ( c ) ( Int ) - > Int = { $ 0 } <nl> var savedFunctionPointer2 : ( @ convention ( c ) ( Int ) - > Int ) ? = { $ 0 } <nl> - var savedFunctionPointerWithName : @ convention ( c ) ( x : Int ) - > Int = { $ 0 } <nl> + var savedFunctionPointerWithName : @ convention ( c ) ( _ x : Int ) - > Int = { $ 0 } <nl> <nl> / / The following uses a clang keyword as the name . <nl> - var this : ( block : Int ) - > Int = { $ 0 } <nl> - var ` class ` : @ convention ( c ) ( function : Int ) - > Int = { $ 0 } <nl> + var this : ( _ block : Int ) - > Int = { $ 0 } <nl> + var ` class ` : @ convention ( c ) ( _ function : Int ) - > Int = { $ 0 } <nl> } <nl> mmm a / test / Prototypes / CollectionTransformers . swift <nl> ppp b / test / Prototypes / CollectionTransformers . swift <nl> final class _ForkJoinWorkDeque < T > { <nl> <nl> func tryReplace ( <nl> _ value : T , <nl> - makeReplacement : ( ) - > T , <nl> - isEquivalent : ( T , T ) - > Bool <nl> + makeReplacement : @ escaping ( ) - > T , <nl> + isEquivalent : @ escaping ( T , T ) - > Bool <nl> ) - > Bool { <nl> return _dequeMutex . withLock { <nl> for i in _deque . indices { <nl> final public class ForkJoinTask < Result > : ForkJoinTaskBase , _Future { <nl> internal let _task : ( ) - > Result <nl> internal var _result : Result ? = nil <nl> <nl> - public init ( _task : ( ) - > Result ) { <nl> + public init ( _task : @ escaping ( ) - > Result ) { <nl> self . _task = _task <nl> } <nl> <nl> final public class ForkJoinPool { <nl> } <nl> } <nl> <nl> - internal func _compensateForBlockedWorkerThread ( _ blockingBody : ( ) - > ( ) ) { <nl> + internal func _compensateForBlockedWorkerThread ( _ blockingBody : @ escaping ( ) - > ( ) ) { <nl> / / FIXME : limit the number of compensating threads . <nl> let submissionQueue = _ForkJoinWorkDeque < ForkJoinTaskBase > ( ) <nl> let workDeque = _ForkJoinWorkDeque < ForkJoinTaskBase > ( ) <nl> final public class ForkJoinPool { <nl> } <nl> <nl> / / FIXME : return a Future instead ? <nl> - public func forkTask < Result > ( task : ( ) - > Result ) - > ForkJoinTask < Result > { <nl> + public func forkTask < Result > ( task : @ escaping ( ) - > Result ) - > ForkJoinTask < Result > { <nl> let forkJoinTask = ForkJoinTask ( _task : task ) <nl> forkTask ( forkJoinTask ) <nl> return forkJoinTask <nl> internal class _CollectionTransformerStep < PipelineInputElement_ , OutputElement_ > <nl> typealias PipelineInputElement = PipelineInputElement_ <nl> typealias OutputElement = OutputElement_ <nl> <nl> - func map < U > ( _ transform : ( OutputElement ) - > U ) <nl> + func map < U > ( _ transform : @ escaping ( OutputElement ) - > U ) <nl> - > _CollectionTransformerStep < PipelineInputElement , U > { <nl> <nl> fatalError ( " abstract method " ) <nl> } <nl> <nl> - func filter ( _ isIncluded : ( OutputElement ) - > Bool ) <nl> + func filter ( _ isIncluded : @ escaping ( OutputElement ) - > Bool ) <nl> - > _CollectionTransformerStep < PipelineInputElement , OutputElement > { <nl> <nl> fatalError ( " abstract method " ) <nl> } <nl> <nl> - func reduce < U > ( _ initial : U , _ combine : ( U , OutputElement ) - > U ) <nl> + func reduce < U > ( _ initial : U , _ combine : @ escaping ( U , OutputElement ) - > U ) <nl> - > _CollectionTransformerFinalizer < PipelineInputElement , U > { <nl> <nl> fatalError ( " abstract method " ) <nl> final internal class _CollectionTransformerStepCollectionSource < <nl> <nl> typealias InputElement = PipelineInputElement <nl> <nl> - override func map < U > ( _ transform : ( InputElement ) - > U ) <nl> + override func map < U > ( _ transform : @ escaping ( InputElement ) - > U ) <nl> - > _CollectionTransformerStep < PipelineInputElement , U > { <nl> <nl> return _CollectionTransformerStepOneToMaybeOne ( self ) { <nl> final internal class _CollectionTransformerStepCollectionSource < <nl> } <nl> } <nl> <nl> - override func filter ( _ isIncluded : ( InputElement ) - > Bool ) <nl> + override func filter ( _ isIncluded : @ escaping ( InputElement ) - > Bool ) <nl> - > _CollectionTransformerStep < PipelineInputElement , InputElement > { <nl> <nl> return _CollectionTransformerStepOneToMaybeOne ( self ) { <nl> final internal class _CollectionTransformerStepCollectionSource < <nl> } <nl> } <nl> <nl> - override func reduce < U > ( _ initial : U , _ combine : ( U , InputElement ) - > U ) <nl> + override func reduce < U > ( _ initial : U , _ combine : @ escaping ( U , InputElement ) - > U ) <nl> - > _CollectionTransformerFinalizer < PipelineInputElement , U > { <nl> <nl> return _CollectionTransformerFinalizerReduce ( self , initial , combine ) <nl> final internal class _CollectionTransformerStepOneToMaybeOne < <nl> let _input : InputStep <nl> let _transform : ( InputElement ) - > OutputElement ? <nl> <nl> - init ( _ input : InputStep , _ transform : ( InputElement ) - > OutputElement ? ) { <nl> + init ( _ input : InputStep , _ transform : @ escaping ( InputElement ) - > OutputElement ? ) { <nl> self . _input = input <nl> self . _transform = transform <nl> super . init ( ) <nl> } <nl> <nl> - override func map < U > ( _ transform : ( OutputElement ) - > U ) <nl> + override func map < U > ( _ transform : @ escaping ( OutputElement ) - > U ) <nl> - > _CollectionTransformerStep < PipelineInputElement , U > { <nl> <nl> / / Let the closure below capture only one variable , not the whole ` self ` . <nl> final internal class _CollectionTransformerStepOneToMaybeOne < <nl> } <nl> } <nl> <nl> - override func filter ( _ isIncluded : ( OutputElement ) - > Bool ) <nl> + override func filter ( _ isIncluded : @ escaping ( OutputElement ) - > Bool ) <nl> - > _CollectionTransformerStep < PipelineInputElement , OutputElement > { <nl> <nl> / / Let the closure below capture only one variable , not the whole ` self ` . <nl> final internal class _CollectionTransformerStepOneToMaybeOne < <nl> } <nl> } <nl> <nl> - override func reduce < U > ( _ initial : U , _ combine : ( U , OutputElement ) - > U ) <nl> + override func reduce < U > ( _ initial : U , _ combine : @ escaping ( U , OutputElement ) - > U ) <nl> - > _CollectionTransformerFinalizer < PipelineInputElement , U > { <nl> <nl> return _CollectionTransformerFinalizerReduce ( self , initial , combine ) <nl> struct _ElementCollectorOneToMaybeOne < <nl> <nl> init ( <nl> _ baseCollector : BaseCollector , <nl> - _ transform : ( Element ) - > BaseCollector . Element ? <nl> + _ transform : @ escaping ( Element ) - > BaseCollector . Element ? <nl> ) { <nl> self . _baseCollector = baseCollector <nl> self . _transform = transform <nl> final class _CollectionTransformerFinalizerReduce < <nl> var _initial : U <nl> var _combine : ( U , InputElementTy ) - > U <nl> <nl> - init ( _ input : InputStep , _ initial : U , _ combine : ( U , InputElementTy ) - > U ) { <nl> + init ( _ input : InputStep , _ initial : U , _ combine : @ escaping ( U , InputElementTy ) - > U ) { <nl> self . _input = input <nl> self . _initial = initial <nl> self . _combine = combine <nl> struct _ElementCollectorReduce < Element_ , Result > : _ElementCollector { <nl> var _current : Result <nl> var _combine : ( Result , Element ) - > Result <nl> <nl> - init ( _ initial : Result , _ combine : ( Result , Element ) - > Result ) { <nl> + init ( _ initial : Result , _ combine : @ escaping ( Result , Element ) - > Result ) { <nl> self . _current = initial <nl> self . _combine = combine <nl> } <nl> public struct CollectionTransformerPipeline < <nl> internal var _input : InputCollection <nl> internal var _step : _CollectionTransformerStep < InputCollection . Iterator . Element , T > <nl> <nl> - public func map < U > ( _ transform : ( T ) - > U ) <nl> + public func map < U > ( _ transform : @ escaping ( T ) - > U ) <nl> - > CollectionTransformerPipeline < InputCollection , U > { <nl> <nl> return CollectionTransformerPipeline < InputCollection , U > ( <nl> public struct CollectionTransformerPipeline < <nl> ) <nl> } <nl> <nl> - public func filter ( _ isIncluded : ( T ) - > Bool ) <nl> + public func filter ( _ isIncluded : @ escaping ( T ) - > Bool ) <nl> - > CollectionTransformerPipeline < InputCollection , T > { <nl> <nl> return CollectionTransformerPipeline < InputCollection , T > ( <nl> public struct CollectionTransformerPipeline < <nl> } <nl> <nl> public func reduce < U > ( <nl> - _ initial : U , _ combine : ( U , T ) - > U <nl> + _ initial : U , _ combine : @ escaping ( U , T ) - > U <nl> ) - > U { <nl> return _runCollectionTransformer ( _input , _step . reduce ( initial , combine ) ) <nl> } <nl> t . test ( " ForkJoinPool . forkTask / Fibonacci " ) { <nl> expectEqual ( 102334155 , t . waitAndGetResult ( ) ) <nl> } <nl> <nl> - func _parallelMap ( _ input : [ Int ] , transform : ( Int ) - > Int , range : Range < Int > ) <nl> + func _parallelMap ( _ input : [ Int ] , transform : @ escaping ( Int ) - > Int , range : Range < Int > ) <nl> - > Array < Int > . Builder { <nl> <nl> var builder = Array < Int > . Builder ( ) <nl> func _parallelMap ( _ input : [ Int ] , transform : ( Int ) - > Int , range : Range < Int > ) <nl> return builder <nl> } <nl> <nl> - func parallelMap ( _ input : [ Int ] , transform : ( Int ) - > Int ) - > [ Int ] { <nl> + func parallelMap ( _ input : [ Int ] , transform : @ escaping ( Int ) - > Int ) - > [ Int ] { <nl> let t = ForkJoinPool . commonPool . forkTask { <nl> _parallelMap ( <nl> input , <nl> mmm a / test / Reflection / Inputs / ConcreteTypes . swift <nl> ppp b / test / Reflection / Inputs / ConcreteTypes . swift <nl> public class C { <nl> public let aMetatype : C . Type <nl> public let aFunction : ( C , S , E , Int ) - > ( Int ) <nl> public let aFunctionWithVarArgs : ( C , S . . . ) - > ( ) <nl> - public init ( aClass : C , aStruct : S , anEnum : E , aTuple : ( C , S , E , Int ) , aTupleWithLabels : ( a : C , s : S , e : E ) , aMetatype : C . Type , aFunction : ( C , S , E , Int ) - > Int , aFunctionWithVarArgs : ( C , S . . . ) - > ( ) ) { <nl> + public init ( aClass : C , aStruct : S , anEnum : E , aTuple : ( C , S , E , Int ) , aTupleWithLabels : ( a : C , s : S , e : E ) , aMetatype : C . Type , aFunction : @ escaping ( C , S , E , Int ) - > Int , aFunctionWithVarArgs : @ escaping ( C , S . . . ) - > ( ) ) { <nl> self . aClass = aClass <nl> self . aStruct = aStruct <nl> self . anEnum = anEnum <nl> mmm a / test / Runtime / weak - reference - racetests . swift <nl> ppp b / test / Runtime / weak - reference - racetests . swift <nl> class WBox < T : AnyObject > { <nl> <nl> class WeakReferenceRaceData { <nl> let closure : ( ) - > Void <nl> - init ( _ closure : ( ) - > Void ) { <nl> + init ( _ closure : @ escaping ( ) - > Void ) { <nl> self . closure = closure <nl> } <nl> } <nl> mmm a / test / SIL / Parser / overloaded_member . sil <nl> ppp b / test / SIL / Parser / overloaded_member . sil <nl> bb0 ( % 0 : $ A , % 1 : $ X ) : <nl> store % 5 to % 2a : $ * X <nl> % 7 = load % 2a : $ * X <nl> strong_retain % 7 : $ X <nl> - / / CHECK : class_method [ volatile ] % { { [ 0 - 9 ] + } } : $ X , # X . init ! initializer . 1 . foreign : ( X . Type ) - > ( a1 : A , a2 : A ) - > X , $ @ convention ( objc_method ) ( A , A , @ owned X ) - > @ owned X <nl> - % 9 = class_method [ volatile ] % 7 : $ X , # X . init ! initializer . 1 . foreign : ( X . Type ) - > ( a1 : A , a2 : A ) - > X , $ @ convention ( objc_method ) ( A , A , @ owned X ) - > @ owned X <nl> + / / CHECK : class_method [ volatile ] % { { [ 0 - 9 ] + } } : $ X , # X . init ! initializer . 1 . foreign : ( X . Type ) - > ( A , A ) - > X , $ @ convention ( objc_method ) ( A , A , @ owned X ) - > @ owned X <nl> + % 9 = class_method [ volatile ] % 7 : $ X , # X . init ! initializer . 1 . foreign : ( X . Type ) - > ( A , A ) - > X , $ @ convention ( objc_method ) ( A , A , @ owned X ) - > @ owned X <nl> % 10 = load % 3a : $ * A <nl> % 11 = load % 3a : $ * A <nl> % 12 = apply % 9 ( % 10 , % 11 , % 7 ) : $ @ convention ( objc_method ) ( A , A , @ owned X ) - > @ owned X <nl> mmm a / test / SILGen / apply_abstraction_nested . swift <nl> ppp b / test / SILGen / apply_abstraction_nested . swift <nl> func baz < T : P > ( _ : inout T ) - > ( Int ) - > ( ) { return { _ in ( ) } } <nl> <nl> func ~ > < T : P , Args , Result > ( <nl> x : inout T , <nl> - m : ( x : inout T ) - > ( ( Args ) - > Result ) <nl> + m : ( _ x : inout T ) - > ( ( Args ) - > Result ) <nl> ) - > ( ( Args ) - > Result ) { <nl> - return m ( x : & x ) <nl> + return m ( & x ) <nl> } <nl> <nl> struct X : P { } <nl> new file mode 100644 <nl> index 000000000000 . . 03d20c105ac8 <nl> mmm / dev / null <nl> ppp b / test / SILGen / argument_labels . swift <nl> <nl> + / / RUN : % target - swift - frontend - emit - silgen - suppress - argument - labels - in - types % s | FileCheck % s <nl> + <nl> + public struct X { } <nl> + public struct Y { } <nl> + <nl> + public class Foo { <nl> + func doSomething ( x : X , y : Y ) { } <nl> + func doSomethingElse ( x : X ) { } <nl> + } <nl> + <nl> + / / CHECK - LABEL : sil hidden @ _TF15argument_labels7testFoo <nl> + func testFoo ( foo : Foo , x : X , y : Y ) { <nl> + / / CHECK : class_method % 0 : $ Foo , # Foo . doSomething ! 1 : ( Foo ) - > ( X , Y ) - > ( ) <nl> + foo . doSomething ( x : x , y : y ) <nl> + <nl> + / / CHECK : class_method % 0 : $ Foo , # Foo . doSomethingElse ! 1 : ( Foo ) - > ( X ) - > ( ) <nl> + foo . doSomethingElse ( x : x ) <nl> + } <nl> + <nl> mmm a / test / SILGen / c_function_pointers . swift <nl> ppp b / test / SILGen / c_function_pointers . swift <nl> <nl> / / RUN : % target - swift - frontend - emit - silgen - verify % s | FileCheck % s <nl> <nl> - func values ( _ arg : @ convention ( c ) ( Int ) - > Int ) - > @ convention ( c ) ( Int ) - > Int { <nl> + func values ( _ arg : @ escaping @ convention ( c ) ( Int ) - > Int ) - > @ convention ( c ) ( Int ) - > Int { <nl> return arg <nl> } <nl> / / CHECK - LABEL : sil hidden @ _TF19c_function_pointers6valuesFcSiSicSiSi <nl> mmm a / test / SILGen / capture_typed_boxes . swift <nl> ppp b / test / SILGen / capture_typed_boxes . swift <nl> func foo ( _ x : Int ) - > ( ) - > Int { <nl> / / CHECK - LABEL : sil shared @ _TFF19capture_typed_boxes3fooFSiFT_SiU_FT_Si : $ @ convention ( thin ) ( @ owned @ box Int ) - > Int { <nl> / / CHECK : bb0 ( % 0 : $ @ box Int ) : <nl> <nl> - func closure ( _ f : ( Int ) - > Int ) - > Int { <nl> + func closure ( _ f : @ escaping ( Int ) - > Int ) - > Int { <nl> var f = f <nl> func bar ( _ x : Int ) - > Int { <nl> return f ( x ) <nl> func closure ( _ f : ( Int ) - > Int ) - > Int { <nl> / / CHECK - LABEL : sil shared @ _TFF19capture_typed_boxes7closureFFSiSiSiL_3barfSiSi : $ @ convention ( thin ) ( Int , @ owned @ box @ callee_owned ( Int ) - > Int ) - > Int { <nl> / / CHECK : bb0 ( % 0 : $ Int , % 1 : $ @ box @ callee_owned ( Int ) - > Int ) : <nl> <nl> - func closure_generic < T > ( _ f : ( T ) - > T , x : T ) - > T { <nl> + func closure_generic < T > ( _ f : @ escaping ( T ) - > T , x : T ) - > T { <nl> var f = f <nl> func bar ( _ x : T ) - > T { <nl> return f ( x ) <nl> mmm a / test / SILGen / closures . swift <nl> ppp b / test / SILGen / closures . swift <nl> func small_closure_capture ( _ x : Int ) - > Int { <nl> <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF8closures35small_closure_capture_with_argument <nl> - func small_closure_capture_with_argument ( _ x : Int ) - > ( y : Int ) - > Int { <nl> + func small_closure_capture_with_argument ( _ x : Int ) - > ( _ y : Int ) - > Int { <nl> var x = x <nl> / / CHECK : [ [ XBOX : % [ 0 - 9 ] + ] ] = alloc_box $ Int <nl> <nl> func small_closure_capture_with_argument ( _ x : Int ) - > ( y : Int ) - > Int { <nl> / / CHECK : return [ [ RET ] ] <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF8closures24small_closure_no_capture <nl> - func small_closure_no_capture ( ) - > ( y : Int ) - > Int { <nl> - / / CHECK : [ [ ANON : % [ 0 - 9 ] + ] ] = function_ref @ [ [ CLOSURE_NAME : _TFF8closures24small_closure_no_captureFT_FT1ySi_SiU_FSiSi ] ] : $ @ convention ( thin ) ( Int ) - > Int <nl> + func small_closure_no_capture ( ) - > ( _ y : Int ) - > Int { <nl> + / / CHECK : [ [ ANON : % [ 0 - 9 ] + ] ] = function_ref @ [ [ CLOSURE_NAME : _TFF8closures24small_closure_no_captureFT_FSiSiU_FSiSi ] ] : $ @ convention ( thin ) ( Int ) - > Int <nl> / / CHECK : [ [ ANON_THICK : % [ 0 - 9 ] + ] ] = thin_to_thick_function [ [ ANON ] ] : $ { { . * } } to $ @ callee_owned ( Int ) - > Int <nl> / / CHECK : return [ [ ANON_THICK ] ] <nl> return { $ 0 } <nl> mmm a / test / SILGen / complete_object_init . swift <nl> ppp b / test / SILGen / complete_object_init . swift <nl> class A { <nl> / / CHECK : [ [ SELF : % [ 0 - 9 ] + ] ] = mark_uninitialized [ delegatingself ] [ [ PB ] ] : $ * A <nl> / / CHECK : store [ [ SELF_PARAM ] ] to [ [ SELF ] ] : $ * A <nl> / / CHECK : [ [ SELFP : % [ 0 - 9 ] + ] ] = load [ [ SELF ] ] : $ * A <nl> - / / CHECK : [ [ INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELFP ] ] : $ A , # A . init ! initializer . 1 : ( A . Type ) - > ( x : X ) - > A , $ @ convention ( method ) ( X , @ owned A ) - > @ owned A <nl> + / / CHECK : [ [ INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELFP ] ] : $ A , # A . init ! initializer . 1 : ( A . Type ) - > ( X ) - > A , $ @ convention ( method ) ( X , @ owned A ) - > @ owned A <nl> / / CHECK : [ [ X_INIT : % [ 0 - 9 ] + ] ] = function_ref @ _TFV20complete_object_init1XC { { . * } } : $ @ convention ( method ) ( @ thin X . Type ) - > X <nl> / / CHECK : [ [ X_META : % [ 0 - 9 ] + ] ] = metatype $ @ thin X . Type <nl> / / CHECK : [ [ X : % [ 0 - 9 ] + ] ] = apply [ [ X_INIT ] ] ( [ [ X_META ] ] ) : $ @ convention ( method ) ( @ thin X . Type ) - > X <nl> mmm a / test / SILGen / default_arguments . swift <nl> ppp b / test / SILGen / default_arguments . swift <nl> func testTakeDefaultArgUnnamed ( _ i : Int ) { <nl> takeDefaultArgUnnamed ( i ) <nl> } <nl> <nl> - func takeDSOHandle ( _ handle : UnsafeMutableRawPointer = # dsohandle ) { } <nl> + func takeDSOHandle ( _ handle : UnsafeRawPointer = # dsohandle ) { } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF17default_arguments13testDSOHandleFT_T_ <nl> func testDSOHandle ( ) { <nl> - / / CHECK : [ [ DSO_HANDLE : % [ 0 - 9 ] + ] ] = global_addr @ __dso_handle : $ * UnsafeMutableRawPointer <nl> + / / CHECK : [ [ DSO_HANDLE : % [ 0 - 9 ] + ] ] = global_addr @ __dso_handle : $ * Builtin . RawPointer <nl> takeDSOHandle ( ) <nl> } <nl> <nl> class ReabstractDefaultArgument < T > { <nl> / / CHECK - NEXT : apply [ [ INITFN ] ] < Int > ( % 7 , <nl> <nl> func testDefaultArgumentReabstraction ( ) { <nl> - ReabstractDefaultArgument < Int > ( ) <nl> + _ = ReabstractDefaultArgument < Int > ( ) <nl> } <nl> <nl> / / < rdar : / / problem / 20494437 > SILGen crash handling default arguments <nl> mmm a / test / SILGen / dependent_member_lowering . swift <nl> ppp b / test / SILGen / dependent_member_lowering . swift <nl> struct Foo < T > : P { <nl> struct Bar < T > : P { <nl> typealias A = ( Int ) - > T <nl> <nl> - func f ( _ t : ( Int ) - > T ) { } <nl> + func f ( _ t : @ escaping ( Int ) - > T ) { } <nl> / / CHECK - LABEL : sil hidden [ transparent ] [ thunk ] @ _TTWurGV25dependent_member_lowering3Barx_S_1PS_FS1_1 { { . * } } : $ @ convention ( witness_method ) < T > ( @ in @ callee_owned ( @ in Int ) - > @ out T , @ in_guaranteed Bar < T > ) - > ( ) <nl> / / CHECK : bb0 ( % 0 : $ * @ callee_owned ( @ in Int ) - > @ out T , % 1 : $ * Bar < T > ) : <nl> } <nl> mmm a / test / SILGen / dso_handle . swift <nl> ppp b / test / SILGen / dso_handle . swift <nl> <nl> / / RUN : % target - swift - frontend - Xllvm - sil - full - demangle - emit - silgen % s | FileCheck % s <nl> <nl> - / / CHECK : sil_global hidden_external @ __dso_handle : $ UnsafeMutableRawPointer <nl> + / / CHECK : sil_global hidden_external [ [ DSO : @ __dso_handle ] ] : $ Builtin . RawPointer <nl> <nl> / / CHECK - LABEL : sil @ main : $ @ convention ( c ) <nl> / / CHECK : bb0 <nl> - / / CHECK : [ [ DSO : % [ 0 - 9 ] + ] ] = global_addr @ __dso_handle : $ * UnsafeMutableRawPointer <nl> - / / CHECK : load [ [ DSO ] ] <nl> + / / CHECK : [ [ DSOAddr : % [ 0 - 9 ] + ] ] = global_addr [ [ DSO ] ] : $ * Builtin . RawPointer <nl> + / / CHECK - NEXT : [ [ DSOPtr : % [ 0 - 9 ] + ] ] = address_to_pointer [ [ DSOAddr ] ] : $ * Builtin . RawPointer to $ Builtin . RawPointer <nl> + / / CHECK - NEXT : [ [ DSOPtrStruct : [ 0 - 9 ] + ] ] = struct $ UnsafeRawPointer ( [ [ DSOPtr ] ] : $ Builtin . RawPointer ) <nl> <nl> - / / CHECK - LABEL : sil hidden @ _TIF10dso_handle14printDSOHandleFT3dsoSv_SvA_ <nl> - / / CHECK : [ [ DSO : % [ 0 - 9 ] + ] ] = global_addr @ __dso_handle : $ * UnsafeMutableRawPointer <nl> - / / CHECK : load [ [ DSO ] ] <nl> - func printDSOHandle ( dso : UnsafeMutableRawPointer = # dsohandle ) - > UnsafeMutableRawPointer { <nl> + <nl> + / / CHECK - LABEL : sil hidden @ _TIF10dso_handle14printDSOHandleFT3dsoSV_SVA_ <nl> + / / CHECK : [ [ DSOAddr : % [ 0 - 9 ] + ] ] = global_addr [ [ DSO ] ] : $ * Builtin . RawPointer <nl> + / / CHECK - NEXT : [ [ DSOPtr : % [ 0 - 9 ] + ] ] = address_to_pointer [ [ DSOAddr ] ] : $ * Builtin . RawPointer to $ Builtin . RawPointer <nl> + / / CHECK - NEXT : [ [ DSOPtrStruct : % [ 0 - 9 ] + ] ] = struct $ UnsafeRawPointer ( [ [ DSOPtr ] ] : $ Builtin . RawPointer ) <nl> + / / CHECK - NEXT : return [ [ DSOPtrStruct ] ] : $ UnsafeRawPointer <nl> + func printDSOHandle ( dso : UnsafeRawPointer = # dsohandle ) - > UnsafeRawPointer { <nl> print ( dso ) <nl> + return dso <nl> } <nl> <nl> - printDSOHandle ( ) <nl> + _ = printDSOHandle ( ) <nl> <nl> mmm a / test / SILGen / dynamic_self . swift <nl> ppp b / test / SILGen / dynamic_self . swift <nl> class X : P , CP { <nl> <nl> / / CHECK - LABEL : sil hidden @ _TZFC12dynamic_self1X7factory { { . * } } : $ @ convention ( method ) ( Int , @ thick X . Type ) - > @ owned X <nl> / / CHECK : bb0 ( [ [ I : % [ 0 - 9 ] + ] ] : $ Int , [ [ SELF : % [ 0 - 9 ] + ] ] : $ @ thick X . Type ) : <nl> - / / CHECK : [ [ CTOR : % [ 0 - 9 ] + ] ] = class_method [ [ SELF ] ] : $ @ thick X . Type , # X . init ! allocator . 1 : ( X . Type ) - > ( int : Int ) - > X , $ @ convention ( method ) ( Int , @ thick X . Type ) - > @ owned X <nl> + / / CHECK : [ [ CTOR : % [ 0 - 9 ] + ] ] = class_method [ [ SELF ] ] : $ @ thick X . Type , # X . init ! allocator . 1 : ( X . Type ) - > ( Int ) - > X , $ @ convention ( method ) ( Int , @ thick X . Type ) - > @ owned X <nl> / / CHECK : apply [ [ CTOR ] ] ( [ [ I ] ] , [ [ SELF ] ] ) : $ @ convention ( method ) ( Int , @ thick X . Type ) - > @ owned X <nl> class func factory ( i : Int ) - > Self { return self . init ( int : i ) } <nl> } <nl> mmm a / test / SILGen / foreign_errors . swift <nl> ppp b / test / SILGen / foreign_errors . swift <nl> func testBridgedResult ( ) throws { <nl> let array = try ErrorProne . collection ( withCount : 0 ) <nl> } <nl> / / CHECK : sil hidden @ _TF14foreign_errors17testBridgedResultFzT_T_ : $ @ convention ( thin ) ( ) - > @ error Error { <nl> - / / CHECK : class_method [ volatile ] % 0 : $ @ thick ErrorProne . Type , # ErrorProne . collection ! 1 . foreign : ( ErrorProne . Type ) - > ( withCount : Int ) throws - > [ Any ] , $ @ convention ( objc_method ) ( Int , ImplicitlyUnwrappedOptional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , @ objc_metatype ErrorProne . Type ) - > @ autoreleased Optional < NSArray > <nl> + / / CHECK : class_method [ volatile ] % 0 : $ @ thick ErrorProne . Type , # ErrorProne . collection ! 1 . foreign : ( ErrorProne . Type ) - > ( Int ) throws - > [ Any ] , $ @ convention ( objc_method ) ( Int , ImplicitlyUnwrappedOptional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , @ objc_metatype ErrorProne . Type ) - > @ autoreleased Optional < NSArray > <nl> <nl> / / rdar : / / 20861374 <nl> / / Clear out the self box before delegating . <nl> class VeryErrorProne : ErrorProne { <nl> / / CHECK : [ [ MARKED_BOX : % . * ] ] = mark_uninitialized [ derivedself ] [ [ PB ] ] <nl> / / CHECK : [ [ T0 : % . * ] ] = load [ [ MARKED_BOX ] ] <nl> / / CHECK - NEXT : [ [ T1 : % . * ] ] = upcast [ [ T0 ] ] : $ VeryErrorProne to $ ErrorProne <nl> - / / CHECK - NEXT : [ [ T2 : % . * ] ] = super_method [ volatile ] [ [ T0 ] ] : $ VeryErrorProne , # ErrorProne . init ! initializer . 1 . foreign : ( ErrorProne . Type ) - > ( one : Any ? ) throws - > ErrorProne , $ @ convention ( objc_method ) ( Optional < AnyObject > , ImplicitlyUnwrappedOptional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , @ owned ErrorProne ) - > @ owned Optional < ErrorProne > <nl> + / / CHECK - NEXT : [ [ T2 : % . * ] ] = super_method [ volatile ] [ [ T0 ] ] : $ VeryErrorProne , # ErrorProne . init ! initializer . 1 . foreign : ( ErrorProne . Type ) - > ( Any ? ) throws - > ErrorProne , $ @ convention ( objc_method ) ( Optional < AnyObject > , ImplicitlyUnwrappedOptional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , @ owned ErrorProne ) - > @ owned Optional < ErrorProne > <nl> / / CHECK : { { $ } } <nl> / / CHECK - NOT : [ [ BOX ] ] { { ^ [ 0 - 9 ] } } <nl> / / CHECK - NOT : [ [ MARKED_BOX ] ] { { ^ [ 0 - 9 ] } } <nl> mmm a / test / SILGen / function_conversion . swift <nl> ppp b / test / SILGen / function_conversion . swift <nl> <nl> / / CHECK : [ [ THUNK : % . * ] ] = function_ref @ _TTRXFtCc_dSi_dSi_XFo_dSi_dSi_ <nl> / / CHECK : [ [ FUNC : % . * ] ] = partial_apply [ [ THUNK ] ] ( % 0 ) <nl> / / CHECK : return [ [ FUNC ] ] <nl> - func cToFunc ( _ arg : @ convention ( c ) ( Int ) - > Int ) - > ( Int ) - > Int { <nl> + func cToFunc ( _ arg : @ escaping @ convention ( c ) ( Int ) - > Int ) - > ( Int ) - > Int { <nl> return arg <nl> } <nl> <nl> func cToFunc ( _ arg : @ convention ( c ) ( Int ) - > Int ) - > ( Int ) - > Int { <nl> / / CHECK : [ [ BLOCK : % . * ] ] = init_block_storage_header [ [ BLOCK_STORAGE ] ] <nl> / / CHECK : [ [ COPY : % . * ] ] = copy_block [ [ BLOCK ] ] : $ @ convention ( block ) ( Int ) - > Int <nl> / / CHECK : return [ [ COPY ] ] <nl> - func cToBlock ( _ arg : @ convention ( c ) ( Int ) - > Int ) - > @ convention ( block ) ( Int ) - > Int { <nl> + func cToBlock ( _ arg : @ escaping @ convention ( c ) ( Int ) - > Int ) - > @ convention ( block ) ( Int ) - > Int { <nl> return arg <nl> } <nl> <nl> func cToBlock ( _ arg : @ convention ( c ) ( Int ) - > Int ) - > @ convention ( block ) ( Int ) - > <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion12funcToThrowsFFT_T_FzT_T_ : $ @ convention ( thin ) ( @ owned @ callee_owned ( ) - > ( ) ) - > @ owned @ callee_owned ( ) - > @ error Error <nl> / / CHECK : [ [ FUNC : % . * ] ] = convert_function % 0 : $ @ callee_owned ( ) - > ( ) to $ @ callee_owned ( ) - > @ error Error <nl> / / CHECK : return [ [ FUNC ] ] <nl> - func funcToThrows ( _ x : ( ) - > ( ) ) - > ( ) throws - > ( ) { <nl> + func funcToThrows ( _ x : @ escaping ( ) - > ( ) ) - > ( ) throws - > ( ) { <nl> return x <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion12thinToThrowsFXfT_T_XfzT_T_ : $ @ convention ( thin ) ( @ convention ( thin ) ( ) - > ( ) ) - > @ convention ( thin ) ( ) - > @ error Error <nl> / / CHECK : [ [ FUNC : % . * ] ] = convert_function % 0 : $ @ convention ( thin ) ( ) - > ( ) to $ @ convention ( thin ) ( ) - > @ error Error <nl> / / CHECK : return [ [ FUNC ] ] : $ @ convention ( thin ) ( ) - > @ error Error <nl> - func thinToThrows ( _ x : @ convention ( thin ) ( ) - > ( ) ) - > @ convention ( thin ) ( ) throws - > ( ) { <nl> + func thinToThrows ( _ x : @ escaping @ convention ( thin ) ( ) - > ( ) ) - > @ convention ( thin ) ( ) throws - > ( ) { <nl> return x <nl> } <nl> <nl> class Domesticated : Feral { } <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion12funcToUpcastFFT_CS_12DomesticatedFT_CS_5Feral : $ @ convention ( thin ) ( @ owned @ callee_owned ( ) - > @ owned Domesticated ) - > @ owned @ callee_owned ( ) - > @ owned Feral <nl> / / CHECK : [ [ FUNC : % . * ] ] = convert_function % 0 : $ @ callee_owned ( ) - > @ owned Domesticated to $ @ callee_owned ( ) - > @ owned Feral <nl> / / CHECK : return [ [ FUNC ] ] <nl> - func funcToUpcast ( _ x : ( ) - > Domesticated ) - > ( ) - > Feral { <nl> + func funcToUpcast ( _ x : @ escaping ( ) - > Domesticated ) - > ( ) - > Feral { <nl> return x <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion12funcToUpcastFFCS_5FeralT_FCS_12DomesticatedT_ : $ @ convention ( thin ) ( @ owned @ callee_owned ( @ owned Feral ) - > ( ) ) - > @ owned @ callee_owned ( @ owned Domesticated ) - > ( ) <nl> / / CHECK : [ [ FUNC : % . * ] ] = convert_function % 0 : $ @ callee_owned ( @ owned Feral ) - > ( ) to $ @ callee_owned ( @ owned Domesticated ) - > ( ) { { . * } } / / user : % 3 <nl> / / CHECK : return [ [ FUNC ] ] <nl> - func funcToUpcast ( _ x : ( Feral ) - > ( ) ) - > ( Domesticated ) - > ( ) { <nl> + func funcToUpcast ( _ x : @ escaping ( Feral ) - > ( ) ) - > ( Domesticated ) - > ( ) { <nl> return x <nl> } <nl> <nl> struct AddrOnly { <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion19convOptionalTrivialFFGSqVS_7Trivial_S0_T_ <nl> - func convOptionalTrivial ( _ t1 : ( Trivial ? ) - > Trivial ) { <nl> + func convOptionalTrivial ( _ t1 : @ escaping ( Trivial ? ) - > Trivial ) { <nl> / / CHECK : function_ref @ _TTRXFo_dGSqV19function_conversion7Trivial__dS0__XFo_dS0__dGSqS0___ <nl> / / CHECK : partial_apply <nl> let _ : ( Trivial ) - > Trivial ? = t1 <nl> func convOptionalTrivial ( _ t1 : ( Trivial ? ) - > Trivial ) { <nl> / / CHECK - NEXT : return <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion20convOptionalLoadableFFGSqVS_8Loadable_S0_T_ <nl> - func convOptionalLoadable ( _ l1 : ( Loadable ? ) - > Loadable ) { <nl> + func convOptionalLoadable ( _ l1 : @ escaping ( Loadable ? ) - > Loadable ) { <nl> / / CHECK : function_ref @ _TTRXFo_oGSqV19function_conversion8Loadable__oS0__XFo_oS0__oGSqS0___ <nl> / / CHECK : partial_apply <nl> let _ : ( Loadable ) - > Loadable ? = l1 <nl> func convOptionalLoadable ( _ l1 : ( Loadable ? ) - > Loadable ) { <nl> / / CHECK - NEXT : return <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion20convOptionalAddrOnlyFFGSqVS_8AddrOnly_S0_T_ <nl> - func convOptionalAddrOnly ( _ a1 : ( AddrOnly ? ) - > AddrOnly ) { <nl> + func convOptionalAddrOnly ( _ a1 : @ escaping ( AddrOnly ? ) - > AddrOnly ) { <nl> / / CHECK : function_ref @ _TTRXFo_iGSqV19function_conversion8AddrOnly__iS0__XFo_iGSqS0___iGSqS0___ <nl> / / CHECK : partial_apply <nl> let _ : ( AddrOnly ? ) - > AddrOnly ? = a1 <nl> extension Loadable : P { } <nl> extension AddrOnly : P { } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion22convExistentialTrivialFTFPS_1Q_VS_7Trivial2t3FGSqPS0___S1__T_ <nl> - func convExistentialTrivial ( _ t2 : ( Q ) - > Trivial , t3 : ( Q ? ) - > Trivial ) { <nl> + func convExistentialTrivial ( _ t2 : @ escaping ( Q ) - > Trivial , t3 : @ escaping ( Q ? ) - > Trivial ) { <nl> / / CHECK : function_ref @ _TTRXFo_iP19function_conversion1Q__dVS_7Trivial_XFo_dS1__iPS_1P__ <nl> / / CHECK : partial_apply <nl> let _ : ( Trivial ) - > P = t2 <nl> func convExistentialTrivial ( _ t2 : ( Q ) - > Trivial , t3 : ( Q ? ) - > Trivial ) { <nl> / / = = = = Existential metatypes <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion23convExistentialMetatypeFFGSqPMPS_1Q__MVS_7TrivialT_ <nl> - func convExistentialMetatype ( _ em : ( Q . Type ? ) - > Trivial . Type ) { <nl> + func convExistentialMetatype ( _ em : @ escaping ( Q . Type ? ) - > Trivial . Type ) { <nl> / / CHECK : function_ref @ _TTRXFo_dGSqPMP19function_conversion1Q___dXMtVS_7Trivial_XFo_dXMtS1__dXPMTPS_1P__ <nl> / / CHECK : partial_apply <nl> let _ : ( Trivial . Type ) - > P . Type = em <nl> class Child : Parent { } <nl> / / to be generated <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion18convUpcastMetatypeFTFTMCS_6ParentGSqVS_7Trivial__MCS_5Child2c5FTGSqMS0__GSqS1___MS2__T_ <nl> - func convUpcastMetatype ( _ c4 : ( Parent . Type , Trivial ? ) - > Child . Type , <nl> - c5 : ( Parent . Type ? , Trivial ? ) - > Child . Type ) { <nl> + func convUpcastMetatype ( _ c4 : @ escaping ( Parent . Type , Trivial ? ) - > Child . Type , <nl> + c5 : @ escaping ( Parent . Type ? , Trivial ? ) - > Child . Type ) { <nl> / / CHECK : function_ref @ _TTRXFo_dXMTC19function_conversion6ParentdGSqVS_7Trivial__dXMTCS_5Child_XFo_dXMTS2_dS1__dXMTS0__ <nl> / / CHECK : partial_apply <nl> let _ : ( Child . Type , Trivial ) - > Parent . Type = c4 <nl> func convUpcastMetatype ( _ c4 : ( Parent . Type , Trivial ? ) - > Child . Type , <nl> / / = = = = Function to existential - - make sure we maximally abstract it <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion19convFuncExistentialFFP_FSiSiT_ : $ @ convention ( thin ) ( @ owned @ callee_owned ( @ in Any ) - > @ owned @ callee_owned ( Int ) - > Int ) - > ( ) <nl> - func convFuncExistential ( _ f1 : ( Any ) - > ( Int ) - > Int ) { <nl> + func convFuncExistential ( _ f1 : @ escaping ( Any ) - > ( Int ) - > Int ) { <nl> / / CHECK : function_ref @ _TTRXFo_iP__oXFo_dSi_dSi__XFo_oXFo_dSi_dSi__iP__ <nl> / / CHECK : partial_apply % 3 ( % 0 ) <nl> let _ : ( ( Int ) - > Int ) - > Any = f1 <nl> func convFuncExistential ( _ f1 : ( Any ) - > ( Int ) - > Int ) { <nl> / / = = = = Class - bound archetype upcast <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion29convClassBoundArchetypeUpcast <nl> - func convClassBoundArchetypeUpcast < T : Parent > ( _ f1 : ( Parent ) - > ( T , Trivial ) ) { <nl> + func convClassBoundArchetypeUpcast < T : Parent > ( _ f1 : @ escaping ( Parent ) - > ( T , Trivial ) ) { <nl> / / CHECK : function_ref @ _TTRGRxC19function_conversion6ParentrXFo_oS0__oxdVS_7Trivial_XFo_ox_oS0_dGSqS1___ <nl> / / CHECK : partial_apply <nl> let _ : ( T ) - > ( Parent , Trivial ? ) = f1 <nl> func convClassBoundArchetypeUpcast < T : Parent > ( _ f1 : ( Parent ) - > ( T , Trivial ) ) { <nl> / / CHECK - NEXT : return <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF19function_conversion37convClassBoundMetatypeArchetypeUpcast <nl> - func convClassBoundMetatypeArchetypeUpcast < T : Parent > ( _ f1 : ( Parent . Type ) - > ( T . Type , Trivial ) ) { <nl> + func convClassBoundMetatypeArchetypeUpcast < T : Parent > ( _ f1 : @ escaping ( Parent . Type ) - > ( T . Type , Trivial ) ) { <nl> / / CHECK : function_ref @ _TTRGRxC19function_conversion6ParentrXFo_dXMTS0__dXMTxdVS_7Trivial_XFo_dXMTx_dXMTS0_dGSqS1___ <nl> / / CHECK : partial_apply <nl> let _ : ( T . Type ) - > ( Parent . Type , Trivial ? ) = f1 <nl> func convClassBoundMetatypeArchetypeUpcast < T : Parent > ( _ f1 : ( Parent . Type ) - > ( T <nl> <nl> / / = = = = Make sure we destructure one - element tuples <nl> <nl> - / / CHECK - LABEL : sil hidden @ _TF19function_conversion15convTupleScalarFTFPS_1Q_T_2f2FT6parentPS0___T_2f3FT5tupleGSqTSiSi___T__T_ <nl> + / / CHECK - LABEL : sil hidden @ _TF19function_conversion15convTupleScalarFTFPS_1Q_T_2f2FPS0__T_2f3FGSqTSiSi__T__T_ <nl> / / CHECK : function_ref @ _TTRXFo_iP19function_conversion1Q___XFo_iPS_1P___ <nl> / / CHECK : function_ref @ _TTRXFo_iP19function_conversion1Q___XFo_iPS_1P___ <nl> / / CHECK : function_ref @ _TTRXFo_dGSqTSiSi____XFo_dSidSi__ <nl> func convClassBoundMetatypeArchetypeUpcast < T : Parent > ( _ f1 : ( Parent . Type ) - > ( T <nl> <nl> / / CHECK - LABEL : sil shared [ transparent ] [ reabstraction_thunk ] @ _TTRXFo_dGSqTSiSi____XFo_dSidSi__ : $ @ convention ( thin ) ( Int , Int , @ owned @ callee_owned ( Optional < ( Int , Int ) > ) - > ( ) ) - > ( ) <nl> <nl> - func convTupleScalar ( _ f1 : ( Q ) - > ( ) , <nl> - f2 : ( parent : Q ) - > ( ) , <nl> - f3 : ( tuple : ( Int , Int ) ? ) - > ( ) ) { <nl> - let _ : ( parent : P ) - > ( ) = f1 <nl> + func convTupleScalar ( _ f1 : @ escaping ( Q ) - > ( ) , <nl> + f2 : @ escaping ( _ parent : Q ) - > ( ) , <nl> + f3 : @ escaping ( _ tuple : ( Int , Int ) ? ) - > ( ) ) { <nl> + let _ : ( P ) - > ( ) = f1 <nl> let _ : ( P ) - > ( ) = f2 <nl> let _ : ( Int , Int ) - > ( ) = f3 <nl> } <nl> <nl> - / / CHECK - LABEL : sil hidden @ _TF19function_conversion21convTupleScalarOpaqueurFFt4argsGSax__T_GSqFt4argsGSax__T__ <nl> - / / CHECK : function_ref @ _TTRGrXFo_oGSax___XFo_it4argsGSax___iT__ <nl> + / / CHECK - LABEL : sil hidden @ _TF19function_conversion21convTupleScalarOpaqueurFFtGSax__T_GSqFtGSax__T__ <nl> + / / CHECK : function_ref @ _TTRGrXFo_oGSax___XFo_itGSax___iT__ <nl> <nl> - / / CHECK - LABEL : sil shared [ transparent ] [ reabstraction_thunk ] @ _TTRGrXFo_oGSax___XFo_it4argsGSax___iT__ : $ @ convention ( thin ) < T > ( @ in ( args : T . . . ) , @ owned @ callee_owned ( @ owned Array < T > ) - > ( ) ) - > @ out ( ) <nl> + / / CHECK - LABEL : sil shared [ transparent ] [ reabstraction_thunk ] @ _TTRGrXFo_oGSax___XFo_itGSax___iT__ : $ @ convention ( thin ) < T > ( @ in ( T . . . ) , @ owned @ callee_owned ( @ owned Array < T > ) - > ( ) ) - > @ out ( ) <nl> <nl> - func convTupleScalarOpaque < T > ( _ f : ( args : T . . . ) - > ( ) ) - > ( ( args : T . . . ) - > ( ) ) ? { <nl> + func convTupleScalarOpaque < T > ( _ f : @ escaping ( T . . . ) - > ( ) ) - > ( ( _ args : T . . . ) - > ( ) ) ? { <nl> return f <nl> } <nl> mmm a / test / SILGen / function_conversion_objc . swift <nl> ppp b / test / SILGen / function_conversion_objc . swift <nl> import Foundation <nl> / / = = = = Metatype to object conversions <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF24function_conversion_objc20convMetatypeToObjectFFCSo8NSObjectMS0_T_ <nl> - func convMetatypeToObject ( _ f : ( NSObject ) - > NSObject . Type ) { <nl> + func convMetatypeToObject ( _ f : @ escaping ( NSObject ) - > NSObject . Type ) { <nl> / / CHECK : function_ref @ _TTRXFo_oCSo8NSObject_dXMTS__XFo_oS__oPs9AnyObject__ <nl> / / CHECK : partial_apply <nl> let _ : ( NSObject ) - > AnyObject = f <nl> func convMetatypeToObject ( _ f : ( NSObject ) - > NSObject . Type ) { <nl> @ objc protocol NSBurrito { } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF24function_conversion_objc31convExistentialMetatypeToObjectFFPS_9NSBurrito_PMPS0__T_ <nl> - func convExistentialMetatypeToObject ( _ f : ( NSBurrito ) - > NSBurrito . Type ) { <nl> + func convExistentialMetatypeToObject ( _ f : @ escaping ( NSBurrito ) - > NSBurrito . Type ) { <nl> / / CHECK : function_ref @ _TTRXFo_oP24function_conversion_objc9NSBurrito__dXPMTPS0___XFo_oPS0___oPs9AnyObject__ <nl> / / CHECK : partial_apply <nl> let _ : ( NSBurrito ) - > AnyObject = f <nl> func convExistentialMetatypeToObject ( _ f : ( NSBurrito ) - > NSBurrito . Type ) { <nl> / / CHECK : return <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF24function_conversion_objc28convProtocolMetatypeToObjectFFT_MPS_9NSBurrito_T_ <nl> - func convProtocolMetatypeToObject ( _ f : ( ) - > NSBurrito . Protocol ) { <nl> + func convProtocolMetatypeToObject ( _ f : @ escaping ( ) - > NSBurrito . Protocol ) { <nl> / / CHECK : function_ref @ _TTRXFo__dXMtP24function_conversion_objc9NSBurrito__XFo__oCSo8Protocol_ <nl> / / CHECK : partial_apply <nl> let _ : ( ) - > Protocol = f <nl> func convProtocolMetatypeToObject ( _ f : ( ) - > NSBurrito . Protocol ) { <nl> / / CHECK : [ [ BLOCK : % . * ] ] = init_block_storage_header [ [ BLOCK_STORAGE ] ] <nl> / / CHECK : [ [ COPY : % . * ] ] = copy_block [ [ BLOCK ] ] : $ @ convention ( block ) ( ) - > ( ) <nl> / / CHECK : return [ [ COPY ] ] <nl> - func funcToBlock ( _ x : ( ) - > ( ) ) - > @ convention ( block ) ( ) - > ( ) { <nl> + func funcToBlock ( _ x : @ escaping ( ) - > ( ) ) - > @ convention ( block ) ( ) - > ( ) { <nl> return x <nl> } <nl> <nl> func funcToBlock ( _ x : ( ) - > ( ) ) - > @ convention ( block ) ( ) - > ( ) { <nl> / / CHECK : [ [ THUNK : % . * ] ] = function_ref @ _TTRXFdCb___XFo___ <nl> / / CHECK : [ [ FUNC : % . * ] ] = partial_apply [ [ THUNK ] ] ( [ [ COPIED ] ] ) <nl> / / CHECK : return [ [ FUNC ] ] <nl> - func blockToFunc ( _ x : @ convention ( block ) ( ) - > ( ) ) - > ( ) - > ( ) { <nl> + func blockToFunc ( _ x : @ escaping @ convention ( block ) ( ) - > ( ) ) - > ( ) - > ( ) { <nl> return x <nl> } <nl> <nl> func blockToFunc ( _ x : @ convention ( block ) ( ) - > ( ) ) - > ( ) - > ( ) { <nl> / / CHECK : function_ref @ _TTRXFo__dSi_XFo__iP__ <nl> / / CHECK : partial_apply <nl> / / CHECK : return <nl> - func blockToFuncExistential ( _ x : @ convention ( block ) ( ) - > Int ) - > ( ) - > Any { <nl> + func blockToFuncExistential ( _ x : @ escaping @ convention ( block ) ( ) - > Int ) - > ( ) - > Any { <nl> return x <nl> } <nl> <nl> class A : NSObject { } <nl> class B : A { } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF24function_conversion_objc18cFuncPtrConversionFcCS_1AT_cCS_1BT_ <nl> - func cFuncPtrConversion ( _ x : @ convention ( c ) ( A ) - > ( ) ) - > @ convention ( c ) ( B ) - > ( ) { <nl> + func cFuncPtrConversion ( _ x : @ escaping @ convention ( c ) ( A ) - > ( ) ) - > @ convention ( c ) ( B ) - > ( ) { <nl> / / CHECK : convert_function % 0 : $ @ convention ( c ) ( A ) - > ( ) to $ @ convention ( c ) ( B ) - > ( ) <nl> / / CHECK : return <nl> return x <nl> func cFuncDeclConversion ( ) - > @ convention ( c ) ( B ) - > ( ) { <nl> return cFuncPtr <nl> } <nl> <nl> - func cFuncPtrConversionUnsupported ( _ x : @ convention ( c ) ( @ convention ( block ) ( ) - > ( ) ) - > ( ) ) <nl> + func cFuncPtrConversionUnsupported ( _ x : @ escaping @ convention ( c ) ( @ convention ( block ) ( ) - > ( ) ) - > ( ) ) <nl> - > @ convention ( c ) ( @ convention ( c ) ( ) - > ( ) ) - > ( ) { <nl> return x / / expected - error { { C function pointer signature ' @ convention ( c ) ( @ convention ( block ) ( ) - > ( ) ) - > ( ) ' is not compatible with expected type ' @ convention ( c ) ( @ convention ( c ) ( ) - > ( ) ) - > ( ) ' } } <nl> } <nl> mmm a / test / SILGen / functions . swift <nl> ppp b / test / SILGen / functions . swift <nl> func standalone_function ( _ x : Int , _ y : Int ) - > Int { <nl> return x <nl> } <nl> <nl> - func higher_order_function ( _ f : ( x : Int , y : Int ) - > Int , _ x : Int , _ y : Int ) - > Int { <nl> - return f ( x : x , y : y ) <nl> + func higher_order_function ( _ f : ( _ x : Int , _ y : Int ) - > Int , _ x : Int , _ y : Int ) - > Int { <nl> + return f ( x , y ) <nl> } <nl> <nl> func higher_order_function2 ( _ f : ( Int , Int ) - > Int , _ x : Int , _ y : Int ) - > Int { <nl> func calls ( _ i : Int , j : Int , k : Int ) { <nl> / / CHECK : % 2 = partial_apply % 1 ( % 0 ) <nl> / / CHECK : return % 2 <nl> <nl> - func return_func ( ) - > ( x : Builtin . Int64 , y : Builtin . Int64 ) - > Builtin . Int64 { <nl> + func return_func ( ) - > ( _ x : Builtin . Int64 , _ y : Builtin . Int64 ) - > Builtin . Int64 { <nl> / / CHECK : [ [ FUNC_THIN : % [ 0 - 9 ] + ] ] = function_ref @ _TF9functions19standalone_function { { . * } } : $ @ convention ( thin ) ( Builtin . Int64 , Builtin . Int64 ) - > Builtin . Int64 <nl> / / CHECK : [ [ FUNC_THICK : % [ 0 - 9 ] + ] ] = thin_to_thick_function [ [ FUNC_THIN ] ] <nl> / / CHECK : return [ [ FUNC_THICK ] ] <nl> func return_func ( ) - > ( x : Builtin . Int64 , y : Builtin . Int64 ) - > Builtin . Int64 { <nl> <nl> func standalone_generic < T > ( _ x : T , y : T ) - > T { return x } <nl> <nl> - / / CHECK - LABEL : sil hidden @ _TF9functions14return_genericFT_FT1xBi64_1yBi64__Bi64_ <nl> - func return_generic ( ) - > ( x : Builtin . Int64 , y : Builtin . Int64 ) - > Builtin . Int64 { <nl> + / / CHECK - LABEL : sil hidden @ _TF9functions14return_genericFT_FTBi64_Bi64__Bi64_ <nl> + func return_generic ( ) - > ( _ x : Builtin . Int64 , _ y : Builtin . Int64 ) - > Builtin . Int64 { <nl> / / CHECK : [ [ GEN : % . * ] ] = function_ref @ _TF9functions18standalone_generic { { . * } } : $ @ convention ( thin ) < τ_0_0 > ( @ in τ_0_0 , @ in τ_0_0 ) - > @ out τ_0_0 <nl> / / CHECK : [ [ SPEC : % . * ] ] = partial_apply [ [ GEN ] ] < Builtin . Int64 > ( ) <nl> / / CHECK : [ [ THUNK : % . * ] ] = function_ref @ { { . * } } : $ @ convention ( thin ) ( Builtin . Int64 , Builtin . Int64 , @ owned @ callee_owned ( @ in Builtin . Int64 , @ in Builtin . Int64 ) - > @ out Builtin . Int64 ) - > Builtin . Int64 <nl> func return_generic ( ) - > ( x : Builtin . Int64 , y : Builtin . Int64 ) - > Builtin . Int64 { <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF9functions20return_generic_tuple { { . * } } <nl> func return_generic_tuple ( ) <nl> - - > ( x : ( Builtin . Int64 , Builtin . Int64 ) , y : ( Builtin . Int64 , Builtin . Int64 ) ) - > ( Builtin . Int64 , Builtin . Int64 ) { <nl> + - > ( _ x : ( Builtin . Int64 , Builtin . Int64 ) , _ y : ( Builtin . Int64 , Builtin . Int64 ) ) - > ( Builtin . Int64 , Builtin . Int64 ) { <nl> / / CHECK : [ [ GEN : % . * ] ] = function_ref @ _TF9functions18standalone_generic { { . * } } : $ @ convention ( thin ) < τ_0_0 > ( @ in τ_0_0 , @ in τ_0_0 ) - > @ out τ_0_0 <nl> / / CHECK : [ [ SPEC : % . * ] ] = partial_apply [ [ GEN ] ] < ( Builtin . Int64 , Builtin . Int64 ) > ( ) <nl> / / CHECK : [ [ THUNK : % . * ] ] = function_ref @ { { . * } } : $ @ convention ( thin ) ( Builtin . Int64 , Builtin . Int64 , Builtin . Int64 , Builtin . Int64 , @ owned @ callee_owned ( @ in ( Builtin . Int64 , Builtin . Int64 ) , @ in ( Builtin . Int64 , Builtin . Int64 ) ) - > @ out ( Builtin . Int64 , Builtin . Int64 ) ) - > ( Builtin . Int64 , Builtin . Int64 ) <nl> final class r17828355Class { <nl> <nl> / / < rdar : / / problem / 19981118 > Swift 1 . 2 beta 2 : Closures nested in @ noescape closures copy , rather than reference , captured vars . <nl> func noescapefunc ( f : @ noescape ( ) - > ( ) ) { } <nl> - func escapefunc ( _ f : ( ) - > ( ) ) { } <nl> + func escapefunc ( _ f : @ escaping ( ) - > ( ) ) { } <nl> <nl> func testNoescape ( ) { <nl> / / " a " must be captured by - box into noescapefunc because the inner closure <nl> mmm a / test / SILGen / generic_closures . swift <nl> ppp b / test / SILGen / generic_closures . swift <nl> protocol HasClassAssoc { associatedtype Assoc : Class } <nl> / / CHECK : [ [ GENERIC_FN : % . * ] ] = function_ref @ _TFF16generic_closures34captures_class_constrained_genericuRxS_13HasClassAssocrFTx1fFwx5AssocwxS1__T_U_FT_FQQ_5AssocS2_ <nl> / / CHECK : [ [ CONCRETE_FN : % . * ] ] = partial_apply [ [ GENERIC_FN ] ] < T , T . Assoc > ( % 1 ) <nl> <nl> - func captures_class_constrained_generic < T : HasClassAssoc > ( _ x : T , f : ( T . Assoc ) - > T . Assoc ) { <nl> + func captures_class_constrained_generic < T : HasClassAssoc > ( _ x : T , f : @ escaping ( T . Assoc ) - > T . Assoc ) { <nl> let _ : ( ) - > ( T . Assoc ) - > T . Assoc = { f } <nl> } <nl> <nl> mmm a / test / SILGen / generic_signatures . swift <nl> ppp b / test / SILGen / generic_signatures . swift <nl> func concreteJungle < T where T : Fooable , T . Foo = = C > ( t : T . Foo ) - > C { <nl> return c <nl> } <nl> <nl> - func concreteJungle < T where T : Fooable , T . Foo = = C > ( f : ( T . Foo ) - > C ) - > T . Foo { <nl> + func concreteJungle < T where T : Fooable , T . Foo = = C > ( f : @ escaping ( T . Foo ) - > C ) - > T . Foo { <nl> let ff : ( C ) - > T . Foo = f <nl> return ff ( C ( ) ) <nl> } <nl> mmm a / test / SILGen / indirect_enum . swift <nl> ppp b / test / SILGen / indirect_enum . swift <nl> func TreeA_cases < T > ( _ t : T , l : TreeA < T > , r : TreeA < T > ) { <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF13indirect_enum16TreeA_reabstractFFSiSiT_ <nl> - func TreeA_reabstract ( _ f : ( Int ) - > Int ) { <nl> + func TreeA_reabstract ( _ f : @ escaping ( Int ) - > Int ) { <nl> <nl> / / CHECK : [ [ METATYPE : % . * ] ] = metatype $ @ thin TreeA < ( Int ) - > Int > . Type <nl> / / CHECK - NEXT : [ [ BOX : % . * ] ] = alloc_box $ @ callee_owned ( @ in Int ) - > @ out Int <nl> mmm a / test / SILGen / init_ref_delegation . swift <nl> ppp b / test / SILGen / init_ref_delegation . swift <nl> class C1 { <nl> / / CHECK : store [ [ ORIG_SELF ] ] to [ [ SELF ] ] : $ * C1 <nl> / / CHECK : [ [ SELF_FROM_BOX : % [ 0 - 9 ] + ] ] = load [ [ SELF ] ] : $ * C1 <nl> <nl> - / / CHECK : [ [ DELEG_INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELF_FROM_BOX ] ] : $ C1 , # C1 . init ! initializer . 1 : ( C1 . Type ) - > ( x1 : X , x2 : X ) - > C1 , $ @ convention ( method ) ( X , X , @ owned C1 ) - > @ owned C1 <nl> + / / CHECK : [ [ DELEG_INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELF_FROM_BOX ] ] : $ C1 , # C1 . init ! initializer . 1 : ( C1 . Type ) - > ( X , X ) - > C1 , $ @ convention ( method ) ( X , X , @ owned C1 ) - > @ owned C1 <nl> / / CHECK : [ [ SELFP : % [ 0 - 9 ] + ] ] = apply [ [ DELEG_INIT ] ] ( [ [ X ] ] , [ [ X ] ] , [ [ SELF_FROM_BOX ] ] ) : $ @ convention ( method ) ( X , X , @ owned C1 ) - > @ owned C1 <nl> / / CHECK : store [ [ SELFP ] ] to [ [ SELF ] ] : $ * C1 <nl> / / CHECK : [ [ SELFP : % [ 0 - 9 ] + ] ] = load [ [ SELF ] ] : $ * C1 <nl> class C1 { <nl> / / CHECK : store [ [ ORIG_SELF ] ] to [ [ UNINIT_SELF ] ] : $ * C2 <nl> / / CHECK : [ [ SELF : % [ 0 - 9 ] + ] ] = load [ [ UNINIT_SELF ] ] : $ * C2 <nl> <nl> - / / CHECK : [ [ DELEG_INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELF ] ] : $ C2 , # C2 . init ! initializer . 1 : ( C2 . Type ) - > ( x1 : X , x2 : X ) - > C2 , $ @ convention ( method ) ( X , X , @ owned C2 ) - > @ owned C2 <nl> + / / CHECK : [ [ DELEG_INIT : % [ 0 - 9 ] + ] ] = class_method [ [ SELF ] ] : $ C2 , # C2 . init ! initializer . 1 : ( C2 . Type ) - > ( X , X ) - > C2 , $ @ convention ( method ) ( X , X , @ owned C2 ) - > @ owned C2 <nl> / / CHECK : [ [ REPLACE_SELF : % [ 0 - 9 ] + ] ] = apply [ [ DELEG_INIT ] ] ( [ [ X ] ] , [ [ X ] ] , [ [ SELF ] ] ) : $ @ convention ( method ) ( X , X , @ owned C2 ) - > @ owned C2 <nl> / / CHECK : store [ [ REPLACE_SELF ] ] to [ [ UNINIT_SELF ] ] : $ * C2 <nl> / / CHECK : [ [ VAR_15 : % [ 0 - 9 ] + ] ] = load [ [ UNINIT_SELF ] ] : $ * C2 <nl> class C3 { <nl> convenience init ( ) { <nl> / / CHECK : mark_uninitialized [ delegatingself ] <nl> / / CHECK - NOT : integer_literal <nl> - / / CHECK : class_method [ [ SELF : % [ 0 - 9 ] + ] ] : $ C3 , # C3 . init ! initializer . 1 : ( C3 . Type ) - > ( x : X ) - > C3 , $ @ convention ( method ) ( X , @ owned C3 ) - > @ owned C3 <nl> + / / CHECK : class_method [ [ SELF : % [ 0 - 9 ] + ] ] : $ C3 , # C3 . init ! initializer . 1 : ( C3 . Type ) - > ( X ) - > C3 , $ @ convention ( method ) ( X , @ owned C3 ) - > @ owned C3 <nl> / / CHECK - NOT : integer_literal <nl> / / CHECK : return <nl> self . init ( x : x ) <nl> mmm a / test / SILGen / let_decls . swift <nl> ppp b / test / SILGen / let_decls . swift <nl> struct CloseOverAddressOnlyConstant < T > { <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ { { . * } } callThroughLet <nl> - func callThroughLet ( _ predicate : ( Int , Int ) - > Bool ) { <nl> + func callThroughLet ( _ predicate : @ escaping ( Int , Int ) - > Bool ) { <nl> let p = predicate <nl> if p ( 1 , 2 ) { <nl> } <nl> mmm a / test / SILGen / materializeForSet . swift <nl> ppp b / test / SILGen / materializeForSet . swift <nl> extension Derived : Abstractable { } <nl> / / CHECK - NEXT : function_ref <nl> / / CHECK - NEXT : [ [ REABSTRACTOR : % . * ] ] = function_ref @ _TTRXFo__iSi_XFo__dSi_ : $ @ convention ( thin ) ( @ owned @ callee_owned ( ) - > @ out Int ) - > Int <nl> / / CHECK - NEXT : [ [ NEWVALUE : % . * ] ] = partial_apply [ [ REABSTRACTOR ] ] ( [ [ VALUE ] ] ) <nl> - / / CHECK - NEXT : [ [ FN : % . * ] ] = class_method [ [ SELF ] ] : $ Base , # Base . storedFunction ! setter . 1 : ( Base ) - > ( ( ) - > Int ) - > ( ) <nl> + / / CHECK - NEXT : [ [ FN : % . * ] ] = class_method [ [ SELF ] ] : $ Base , # Base . storedFunction ! setter . 1 : ( Base ) - > ( @ escaping ( ) - > Int ) - > ( ) <nl> / / CHECK - NEXT : apply [ [ FN ] ] ( [ [ NEWVALUE ] ] , [ [ SELF ] ] ) <nl> / / CHECK - NEXT : tuple ( ) <nl> / / CHECK - NEXT : return <nl> mmm a / test / SILGen / metatype_abstraction . swift <nl> ppp b / test / SILGen / metatype_abstraction . swift <nl> func existential_metatype_of_metatype ( _ x : Any ) - > Any . Type . Type { <nl> } <nl> * / <nl> <nl> - func function_metatype_of_metatype ( _ x : ( ) - > ( ) ) - > ( ( ) - > ( ) ) . Type . Type { <nl> + func function_metatype_of_metatype ( _ x : @ escaping ( ) - > ( ) ) - > ( ( ) - > ( ) ) . Type . Type { <nl> return x . dynamicType . dynamicType <nl> } <nl> mmm a / test / SILGen / objc_blocks_bridging . swift <nl> ppp b / test / SILGen / objc_blocks_bridging . swift <nl> import Foundation <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF20objc_blocks_bridging10callBlocks <nl> func callBlocks ( _ x : Foo , <nl> - f : ( Int ) - > Int , <nl> - g : ( String ) - > String , <nl> - h : ( String ? ) - > String ? <nl> + f : @ escaping ( Int ) - > Int , <nl> + g : @ escaping ( String ) - > String , <nl> + h : @ escaping ( String ? ) - > String ? <nl> ) - > ( Int , String , String ? , String ? ) { <nl> / / CHECK : [ [ FOO : % . * ] ] = class_method [ volatile ] % 0 : $ Foo , # Foo . foo ! 1 . foreign <nl> / / CHECK : [ [ F_BLOCK_STORAGE : % . * ] ] = alloc_stack $ @ block_storage <nl> mmm a / test / SILGen / objc_init_ref_delegation . swift <nl> ppp b / test / SILGen / objc_init_ref_delegation . swift <nl> extension Gizmo { <nl> / / CHECK : [ [ SELFMUI : % [ 0 - 9 ] + ] ] = mark_uninitialized [ delegatingself ] [ [ PB ] ] : $ * Gizmo <nl> / / CHECK : store [ [ ORIG_SELF ] ] to [ [ SELFMUI ] ] : $ * Gizmo <nl> / / CHECK : [ [ SELF : % [ 0 - 9 ] + ] ] = load [ [ SELFMUI ] ] : $ * Gizmo <nl> - / / CHECK : [ [ INIT_DELEG : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ SELF ] ] : $ Gizmo , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( bellsOn : Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> + / / CHECK : [ [ INIT_DELEG : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ SELF ] ] : $ Gizmo , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> / / CHECK : [ [ SELF_RET : % [ 0 - 9 ] + ] ] = apply [ [ INIT_DELEG ] ] ( [ [ I ] ] , [ [ SELF ] ] ) : $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> / / CHECK : strong_retain [ [ SELF4 : % [ 0 - 9 ] + ] ] : $ Gizmo <nl> / / CHECK : strong_release [ [ SELF_BOX : % [ 0 - 9 ] + ] ] : $ @ box Gizmo <nl> mmm a / test / SILGen / objc_thunks . swift <nl> ppp b / test / SILGen / objc_thunks . swift <nl> class Hoozit : Gizmo { <nl> / / CHECK : [ [ PB : % . * ] ] = project_box [ [ SELF_BOX ] ] <nl> / / CHECK : [ [ SELFMUI : % [ 0 - 9 ] + ] ] = mark_uninitialized [ derivedself ] [ [ PB ] ] <nl> / / CHECK : [ [ GIZMO : % [ 0 - 9 ] + ] ] = upcast [ [ SELF : % [ 0 - 9 ] + ] ] : $ Hoozit to $ Gizmo <nl> - / / CHECK : [ [ SUPERMETHOD : % [ 0 - 9 ] + ] ] = super_method [ volatile ] [ [ SELF ] ] : $ Hoozit , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( bellsOn : Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> + / / CHECK : [ [ SUPERMETHOD : % [ 0 - 9 ] + ] ] = super_method [ volatile ] [ [ SELF ] ] : $ Hoozit , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> / / CHECK - NEXT : [ [ SELF_REPLACED : % [ 0 - 9 ] + ] ] = apply [ [ SUPERMETHOD ] ] ( % 0 , [ [ X : % [ 0 - 9 ] + ] ] ) : $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> / / CHECK - NOT : unconditional_checked_cast downcast [ [ SELF_REPLACED ] ] : $ Gizmo to $ Hoozit <nl> / / CHECK : unchecked_ref_cast <nl> extension Hoozit { <nl> / / CHECK : [ [ SELFMUI : % [ 0 - 9 ] + ] ] = mark_uninitialized [ delegatingself ] [ [ PB ] ] <nl> / / CHECK : [ [ X_BOX : % [ 0 - 9 ] + ] ] = alloc_box $ X <nl> var x = X ( ) <nl> - / / CHECK : [ [ CTOR : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ SELF : % [ 0 - 9 ] + ] ] : $ Hoozit , # Hoozit . init ! initializer . 1 . foreign : ( Hoozit . Type ) - > ( int : Int ) - > Hoozit , $ @ convention ( objc_method ) ( Int , @ owned Hoozit ) - > @ owned Hoozit <nl> + / / CHECK : [ [ CTOR : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ SELF : % [ 0 - 9 ] + ] ] : $ Hoozit , # Hoozit . init ! initializer . 1 . foreign : ( Hoozit . Type ) - > ( Int ) - > Hoozit , $ @ convention ( objc_method ) ( Int , @ owned Hoozit ) - > @ owned Hoozit <nl> / / CHECK : [ [ NEW_SELF : % [ 0 - 9 ] + ] ] = apply [ [ CTOR ] ] <nl> / / CHECK : store [ [ NEW_SELF ] ] to [ [ SELFMUI ] ] : $ * Hoozit <nl> / / CHECK : [ [ NONNULL : % [ 0 - 9 ] + ] ] = is_nonnull [ [ NEW_SELF ] ] : $ Hoozit <nl> class DesignatedOverrides : Gizmo { <nl> <nl> / / CHECK - LABEL : sil hidden @ _TFC11objc_thunks19DesignatedOverridesc { { . * } } <nl> / / CHECK : function_ref @ _TFSiC { { . * } } <nl> - / / CHECK : super_method [ volatile ] [ [ SELF : % [ 0 - 9 ] + ] ] : $ DesignatedOverrides , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( bellsOn : Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> + / / CHECK : super_method [ volatile ] [ [ SELF : % [ 0 - 9 ] + ] ] : $ DesignatedOverrides , # Gizmo . init ! initializer . 1 . foreign : ( Gizmo . Type ) - > ( Int ) - > Gizmo ! , $ @ convention ( objc_method ) ( Int , @ owned Gizmo ) - > @ owned ImplicitlyUnwrappedOptional < Gizmo > <nl> / / CHECK : return <nl> } <nl> <nl> mmm a / test / SILGen / optional_lvalue . swift <nl> ppp b / test / SILGen / optional_lvalue . swift <nl> func assign_iuo_lvalue_implicit ( _ s : inout S ! , _ y : Int ) { <nl> / / CHECK : [ [ REABSTRACTED : % . * ] ] = partial_apply [ [ REABSTRACT ] ] <nl> / / CHECK : assign [ [ REABSTRACTED ] ] to { { % . * } } : $ * @ callee_owned ( @ in Int ) - > @ out Int <nl> func assign_optional_lvalue_reabstracted ( _ x : inout ( ( Int ) - > Int ) ? , <nl> - _ y : ( Int ) - > Int ) { <nl> + _ y : @ escaping ( Int ) - > Int ) { <nl> x ! = y <nl> } <nl> <nl> mmm a / test / SILGen / property_abstraction . swift <nl> ppp b / test / SILGen / property_abstraction . swift <nl> func getF ( _ x : Foo < Int , Int > ) - > ( Int ) - > Int { <nl> / / CHECK : [ [ F_ORIG : % . * ] ] = partial_apply [ [ REABSTRACT_FN ] ] ( { { % . * } } ) <nl> / / CHECK : [ [ F_ADDR : % . * ] ] = struct_element_addr { { % . * } } : $ * Foo < Int , Int > , # Foo . f <nl> / / CHECK : assign [ [ F_ORIG ] ] to [ [ F_ADDR ] ] <nl> - func setF ( _ x : inout Foo < Int , Int > , f : ( Int ) - > Int ) { <nl> + func setF ( _ x : inout Foo < Int , Int > , f : @ escaping ( Int ) - > Int ) { <nl> x . f = f <nl> } <nl> <nl> func getF ( _ x : Bar < Int , Int > ) - > ( Int ) - > Int { <nl> } <nl> } <nl> <nl> - func makeF ( _ f : ( Int ) - > Int ) - > Bar < Int , Int > { <nl> + func makeF ( _ f : @ escaping ( Int ) - > Int ) - > Bar < Int , Int > { <nl> return Bar . F ( f ) <nl> } <nl> <nl> mmm a / test / SILGen / reabstract . swift <nl> ppp b / test / SILGen / reabstract . swift <nl> func notFun ( _ c : inout C , i : Int ) { } <nl> func testInoutOpaque ( _ c : C , i : Int ) { <nl> var c = c <nl> let box = Box ( t : notFun ) <nl> - box . t ( & c , i : i ) <nl> + box . t ( & c , i ) <nl> } <nl> <nl> / / CHECK - LABEL : sil hidden @ _TF10reabstract15testInoutOpaqueFTCS_1C1iSi_T_ <nl> mmm a / test / SILGen / vtable_thunks . swift <nl> ppp b / test / SILGen / vtable_thunks . swift <nl> class Y : X < D > { <nl> / / optional . <nl> <nl> class Foo { <nl> - func foo ( x : ( Int ) - > Int ) - > ( ( Int ) - > Int ) ? { } <nl> + func foo ( x : @ escaping ( Int ) - > Int ) - > ( ( Int ) - > Int ) ? { } <nl> } <nl> <nl> class Bar : Foo { <nl> mmm a / test / SILGen / vtable_thunks_reabstraction . swift <nl> ppp b / test / SILGen / vtable_thunks_reabstraction . swift <nl> class Opaque < T > { <nl> func inAndOut ( x : T ) - > T { return x } <nl> func inAndOutGeneric < U > ( x : T , y : U ) - > U { return y } <nl> func inAndOutMetatypes ( x : T . Type ) - > T . Type { return x } <nl> - func inAndOutFunctions ( x : ( T ) - > T ) - > ( T ) - > T { return x } <nl> + func inAndOutFunctions ( x : @ escaping ( T ) - > T ) - > ( T ) - > T { return x } <nl> func inAndOutTuples ( x : ObnoxiousTuple ) - > ObnoxiousTuple { return x } <nl> func variantOptionality ( x : T ) - > T ? { return x } <nl> func variantOptionalityMetatypes ( x : T . Type ) - > T . Type ? { return x } <nl> - func variantOptionalityFunctions ( x : ( T ) - > T ) - > ( ( T ) - > T ) ? { return x } <nl> + func variantOptionalityFunctions ( x : @ escaping ( T ) - > T ) - > ( ( T ) - > T ) ? { return x } <nl> func variantOptionalityTuples ( x : ObnoxiousTuple ) - > ObnoxiousTuple ? { return x } <nl> } <nl> <nl> class ConcreteValue < X > : Opaque < S > { <nl> override func inAndOut ( x : S ) - > S { return x } <nl> override func inAndOutGeneric < Z > ( x : S , y : Z ) - > Z { return y } <nl> override func inAndOutMetatypes ( x : S . Type ) - > S . Type { return x } <nl> - override func inAndOutFunctions ( x : ( S ) - > S ) - > ( S ) - > S { return x } <nl> + override func inAndOutFunctions ( x : @ escaping ( S ) - > S ) - > ( S ) - > S { return x } <nl> override func inAndOutTuples ( x : ObnoxiousTuple ) - > ObnoxiousTuple { return x } <nl> override func variantOptionality ( x : S ? ) - > S { return x ! } <nl> override func variantOptionalityMetatypes ( x : S . Type ? ) - > S . Type { return x ! } <nl> - override func variantOptionalityFunctions ( x : ( ( S ) - > S ) ? ) - > ( S ) - > S { return x ! } <nl> + override func variantOptionalityFunctions ( x : ( @ escaping ( S ) - > S ) ? ) - > ( S ) - > S { return x ! } <nl> override func variantOptionalityTuples ( x : ObnoxiousTuple ? ) - > ObnoxiousTuple { return x ! } <nl> } <nl> <nl> class ConcreteClass < X > : Opaque < C > { <nl> override func inAndOut ( x : C ) - > C { return x } <nl> override func inAndOutMetatypes ( x : C . Type ) - > C . Type { return x } <nl> - override func inAndOutFunctions ( x : ( C ) - > C ) - > ( C ) - > C { return x } <nl> + override func inAndOutFunctions ( x : @ escaping ( C ) - > C ) - > ( C ) - > C { return x } <nl> override func inAndOutTuples ( x : ObnoxiousTuple ) - > ObnoxiousTuple { return x } <nl> override func variantOptionality ( x : C ? ) - > C { return x ! } <nl> override func variantOptionalityMetatypes ( x : C . Type ? ) - > C . Type { return x ! } <nl> - override func variantOptionalityFunctions ( x : ( ( C ) - > C ) ? ) - > ( C ) - > C { return x ! } <nl> + override func variantOptionalityFunctions ( x : ( @ escaping ( C ) - > C ) ? ) - > ( C ) - > C { return x ! } <nl> override func variantOptionalityTuples ( x : ObnoxiousTuple ? ) - > ObnoxiousTuple { return x ! } <nl> } <nl> <nl> class ConcreteTuple < X > : Opaque < ( S , S ) > { <nl> } <nl> <nl> class OpaqueFunction < U , V > : Opaque < ( U ) - > V > { <nl> - override func inAndOut ( x : ( U ) - > V ) - > ( U ) - > V { return x } <nl> + override func inAndOut ( x : @ escaping ( U ) - > V ) - > ( U ) - > V { return x } <nl> override func variantOptionality ( x : ( ( U ) - > V ) ? ) - > ( U ) - > V { return x ! } <nl> } <nl> <nl> class ConcreteFunction < X > : Opaque < ( S ) - > S > { <nl> - override func inAndOut ( x : ( S ) - > S ) - > ( S ) - > S { return x } <nl> + override func inAndOut ( x : @ escaping ( S ) - > S ) - > ( S ) - > S { return x } <nl> override func variantOptionality ( x : ( ( S ) - > S ) ? ) - > ( S ) - > S { return x ! } <nl> } <nl> <nl> mmm a / test / SILGen / weak . swift <nl> ppp b / test / SILGen / weak . swift <nl> class C { <nl> func f ( ) - > Int { return 42 } <nl> } <nl> <nl> - func takeClosure ( fn : ( ) - > Int ) { } <nl> + func takeClosure ( fn : @ escaping ( ) - > Int ) { } <nl> <nl> struct A { <nl> weak var x : C ? <nl> mmm a / test / SILOptimizer / allocbox_to_stack_not_crash . swift <nl> ppp b / test / SILOptimizer / allocbox_to_stack_not_crash . swift <nl> <nl> infix operator ~ > <nl> protocol Target { } <nl> <nl> - func ~ > < Target , Arg0 , Result > ( x : inout Target , f : ( _ : inout Target , _ : Arg0 ) - > Result ) - > ( Arg0 ) - > Result { <nl> + func ~ > < Target , Arg0 , Result > ( x : inout Target , f : @ escaping ( _ : inout Target , _ : Arg0 ) - > Result ) - > ( Arg0 ) - > Result { <nl> return { f ( & x , $ 0 ) } / / expected - error { { escaping closures can only capture inout parameters explicitly by value } } <nl> } <nl> <nl> - func ~ > ( x : inout Int , f : ( _ : inout Int , _ : Target ) - > Target ) - > ( Target ) - > Target { <nl> + func ~ > ( x : inout Int , f : @ escaping ( _ : inout Int , _ : Target ) - > Target ) - > ( Target ) - > Target { <nl> return { f ( & x , $ 0 ) } / / expected - error { { escaping closures can only capture inout parameters explicitly by value } } <nl> } <nl> <nl> mmm a / test / SILOptimizer / basic - callee - printer . sil <nl> ppp b / test / SILOptimizer / basic - callee - printer . sil <nl> private protocol private_proto_4 { <nl> } <nl> <nl> public class private_proto_public_class_private_method : private_proto_4 { <nl> - private func theMethod ( ) <nl> + fileprivate func theMethod ( ) <nl> } <nl> <nl> private func call_through_private_proto_4 < T : private_proto_4 > ( x : T ) <nl> mmm a / test / SILOptimizer / definite_init_objc_factory_init . swift <nl> ppp b / test / SILOptimizer / definite_init_objc_factory_init . swift <nl> import ImportAsMember . Class <nl> func testInstanceTypeFactoryMethod ( queen : Bee ) { <nl> / / CHECK : bb0 ( [ [ QUEEN : % [ 0 - 9 ] + ] ] : $ ImplicitlyUnwrappedOptional < Bee > , [ [ HIVE_META : % [ 0 - 9 ] + ] ] : $ @ thick Hive . Type ) : <nl> / / CHECK - NEXT : [ [ HIVE_META_OBJC : % [ 0 - 9 ] + ] ] = thick_to_objc_metatype [ [ HIVE_META ] ] : $ @ thick Hive . Type to $ @ objc_metatype Hive . Type <nl> - / / CHECK - NEXT : [ [ FACTORY : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ HIVE_META_OBJC ] ] : $ @ objc_metatype Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( queen : Bee ! ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> + / / CHECK - NEXT : [ [ FACTORY : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ HIVE_META_OBJC ] ] : $ @ objc_metatype Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( Bee ! ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> / / CHECK - NEXT : [ [ HIVE : % [ 0 - 9 ] + ] ] = apply [ [ FACTORY ] ] ( [ [ QUEEN ] ] , [ [ HIVE_META_OBJC ] ] ) : $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> / / CHECK - NEXT : release_value [ [ QUEEN ] ] <nl> / / CHECK - NEXT : return [ [ HIVE ] ] : $ ImplicitlyUnwrappedOptional < Hive > <nl> extension Hive { <nl> / / CHECK : [ [ SELF_ADDR : % [ 0 - 9 ] + ] ] = alloc_stack $ Hive <nl> / / CHECK : store [ [ OLD_SELF : % [ 0 - 9 ] + ] ] to [ [ SELF_ADDR ] ] <nl> / / CHECK : [ [ META : % [ 0 - 9 ] + ] ] = value_metatype $ @ thick Hive . Type , [ [ OLD_SELF ] ] : $ Hive <nl> - / / CHECK : [ [ FACTORY : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ META ] ] : $ @ thick Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( queen : Bee ! ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> + / / CHECK : [ [ FACTORY : % [ 0 - 9 ] + ] ] = class_method [ volatile ] [ [ META ] ] : $ @ thick Hive . Type , # Hive . init ! allocator . 1 . foreign : ( Hive . Type ) - > ( Bee ! ) - > Hive ! , $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> / / CHECK : [ [ OBJC_META : % [ 0 - 9 ] + ] ] = thick_to_objc_metatype [ [ META ] ] : $ @ thick Hive . Type to $ @ objc_metatype Hive . Type <nl> / / CHECK : apply [ [ FACTORY ] ] ( [ [ QUEEN : % [ 0 - 9 ] + ] ] , [ [ OBJC_META ] ] ) : $ @ convention ( objc_method ) ( ImplicitlyUnwrappedOptional < Bee > , @ objc_metatype Hive . Type ) - > @ autoreleased ImplicitlyUnwrappedOptional < Hive > <nl> / / CHECK : store [ [ NEW_SELF : % [ 0 - 9 ] + ] ] to [ [ SELF_ADDR ] ] <nl> mmm a / test / SILOptimizer / devirt_protocol_method_invocations . swift <nl> ppp b / test / SILOptimizer / devirt_protocol_method_invocations . swift <nl> protocol StaticP { <nl> static var size : Int { get } <nl> } <nl> struct HasStatic < T > : StaticP { <nl> - static var size : Int { return sizeof ( T . self ) } <nl> + static var size : Int { return MemoryLayout < T > . size } <nl> } <nl> public func testExMetatype ( ) - > Int { <nl> let type : StaticP . Type = HasStatic < Int > . self <nl> mmm a / test / SILOptimizer / function_order . sil <nl> ppp b / test / SILOptimizer / function_order . sil <nl> private protocol private_proto_4 { <nl> } <nl> <nl> public class private_proto_public_class_private_method : private_proto_4 { <nl> - private func theMethod ( ) <nl> + fileprivate func theMethod ( ) <nl> } <nl> <nl> private func call_through_private_proto_4 < T : private_proto_4 > ( x : T ) <nl> mmm a / test / SILOptimizer / return . swift <nl> ppp b / test / SILOptimizer / return . swift <nl> func singleBlock2 ( ) - > Int { <nl> } / / expected - error { { missing return in a function expected to return ' Int ' } } <nl> <nl> class MyClassWithClosure { <nl> - var f : ( s : String ) - > String = { ( s : String ) - > String in } / / expected - error { { missing return in a closure expected to return ' String ' } } <nl> + var f : ( _ s : String ) - > String = { ( _ s : String ) - > String in } / / expected - error { { missing return in a closure expected to return ' String ' } } <nl> } <nl> <nl> func multipleBlocksSingleMissing ( b : Bool ) - > ( String , Int ) { <nl> mmm a / test / Sema / accessibility . swift <nl> ppp b / test / Sema / accessibility . swift <nl> public struct PublicStruct : PublicProto , InternalProto , FilePrivateProto , Privat <nl> private func publicReq ( ) { } / / expected - error { { method ' publicReq ( ) ' must be declared public because it matches a requirement in public protocol ' PublicProto ' } } { { 3 - 10 = public } } <nl> private func internalReq ( ) { } / / expected - error { { method ' internalReq ( ) ' must be declared internal because it matches a requirement in internal protocol ' InternalProto ' } } { { 3 - 10 = internal } } <nl> private func filePrivateReq ( ) { } / / expected - error { { method ' filePrivateReq ( ) ' must be declared fileprivate because it matches a requirement in fileprivate protocol ' FilePrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> - private func privateReq ( ) { } <nl> + private func privateReq ( ) { } / / expected - error { { method ' privateReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> <nl> public var publicVar = 0 <nl> } <nl> internal struct InternalStruct : PublicProto , InternalProto , FilePrivateProto , Pr <nl> private func publicReq ( ) { } / / expected - error { { method ' publicReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PublicProto ' } } { { 3 - 10 = internal } } <nl> private func internalReq ( ) { } / / expected - error { { method ' internalReq ( ) ' must be declared internal because it matches a requirement in internal protocol ' InternalProto ' } } { { 3 - 10 = internal } } <nl> private func filePrivateReq ( ) { } / / expected - error { { method ' filePrivateReq ( ) ' must be declared fileprivate because it matches a requirement in fileprivate protocol ' FilePrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> - private func privateReq ( ) { } <nl> + private func privateReq ( ) { } / / expected - error { { method ' privateReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> <nl> public var publicVar = 0 <nl> } <nl> fileprivate struct FilePrivateStruct : PublicProto , InternalProto , FilePrivatePro <nl> private func publicReq ( ) { } / / expected - error { { method ' publicReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PublicProto ' } } { { 3 - 10 = fileprivate } } <nl> private func internalReq ( ) { } / / expected - error { { method ' internalReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' InternalProto ' } } { { 3 - 10 = fileprivate } } <nl> private func filePrivateReq ( ) { } / / expected - error { { method ' filePrivateReq ( ) ' must be declared fileprivate because it matches a requirement in fileprivate protocol ' FilePrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> - private func privateReq ( ) { } <nl> + private func privateReq ( ) { } / / expected - error { { method ' privateReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> <nl> public var publicVar = 0 <nl> } <nl> <nl> / / expected - note @ + 1 * { { type declared here } } <nl> private struct PrivateStruct : PublicProto , InternalProto , FilePrivateProto , PrivateProto { <nl> - private func publicReq ( ) { } <nl> - private func internalReq ( ) { } <nl> - private func filePrivateReq ( ) { } <nl> - private func privateReq ( ) { } <nl> + private func publicReq ( ) { } / / expected - error { { method ' publicReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PublicProto ' } } { { 3 - 10 = fileprivate } } <nl> + private func internalReq ( ) { } / / expected - error { { method ' internalReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' InternalProto ' } } { { 3 - 10 = fileprivate } } <nl> + private func filePrivateReq ( ) { } / / expected - error { { method ' filePrivateReq ( ) ' must be declared fileprivate because it matches a requirement in fileprivate protocol ' FilePrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> + private func privateReq ( ) { } / / expected - error { { method ' privateReq ( ) ' must be as accessible as its enclosing type because it matches a requirement in protocol ' PrivateProto ' } } { { 3 - 10 = fileprivate } } <nl> <nl> public var publicVar = 0 <nl> } <nl> mmm a / test / Sema / accessibility_private . swift <nl> ppp b / test / Sema / accessibility_private . swift <nl> class Sub : Container { <nl> var subInner : PrivateInner ? / / FIXME expected - error { { use of undeclared type ' PrivateInner ' } } <nl> var subInnerQualified : Container . PrivateInner ? / / FIXME expected - error { { ' PrivateInner ' is not a member type of ' Container ' } } <nl> } <nl> + <nl> + <nl> + protocol VeryImportantProto { <nl> + associatedtype Assoc <nl> + var value : Int { get set } / / expected - note { { protocol requires property ' value ' with type ' Int ' ; do you want to add a stub ? } } <nl> + } <nl> + <nl> + private struct VIPPrivateType : VeryImportantProto { <nl> + private typealias Assoc = Int / / expected - error { { type alias ' Assoc ' must be as accessible as its enclosing type because it matches a requirement in protocol ' VeryImportantProto ' } } <nl> + var value : Int <nl> + } <nl> + <nl> + private struct VIPPrivateProp : VeryImportantProto { <nl> + typealias Assoc = Int <nl> + private var value : Int / / expected - error { { property ' value ' must be as accessible as its enclosing type because it matches a requirement in protocol ' VeryImportantProto ' } } { { 3 - 10 = fileprivate } } <nl> + } <nl> + <nl> + private struct VIPPrivateSetProp : VeryImportantProto { <nl> + typealias Assoc = Int <nl> + private ( set ) var value : Int / / expected - error { { setter for property ' value ' must be as accessible as its enclosing type because it matches a requirement in protocol ' VeryImportantProto ' } } { { 3 - 10 = fileprivate } } <nl> + } <nl> + <nl> + private class VIPPrivateSetBase { <nl> + private var value : Int = 0 <nl> + } <nl> + private class VIPPrivateSetSub : VIPPrivateSetBase , VeryImportantProto { / / expected - error { { type ' VIPPrivateSetSub ' does not conform to protocol ' VeryImportantProto ' } } <nl> + typealias Assoc = Int <nl> + } <nl> + <nl> + private class VIPPrivateSetPropBase { <nl> + private ( set ) var value : Int = 0 / / expected - error { { setter for property ' value ' must be as accessible as its enclosing type because it matches a requirement in protocol ' VeryImportantProto ' } } { { 3 - 10 = fileprivate } } <nl> + } <nl> + private class VIPPrivateSetPropSub : VIPPrivateSetPropBase , VeryImportantProto { <nl> + typealias Assoc = Int <nl> + } <nl> mmm a / test / Sema / diag_invalid_inout_captures . swift <nl> ppp b / test / Sema / diag_invalid_inout_captures . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> func no_escape ( _ you_say_price_of_my_love_is : @ noescape ( ) - > ( ) ) { } <nl> - func do_escape ( _ not_a_price_you_are_willing_to_pay : ( ) - > ( ) ) { } <nl> + func do_escape ( _ not_a_price_you_are_willing_to_pay : @ escaping ( ) - > ( ) ) { } <nl> <nl> struct you_cry_in_your_tea { <nl> mutating func which_you_hurl_in_the_sea_when_you_see_me_go_by ( ) { <nl> mmm a / test / SourceKit / CodeExpand / code - expand . swift <nl> ppp b / test / SourceKit / CodeExpand / code - expand . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> / / RUN : % sourcekitd - test - req = expand - placeholder % s | FileCheck % s <nl> <nl> foo ( x : < # T # # ( ) - > Void # > ) <nl> mmm a / test / SourceKit / CursorInfo / cursor_info . swift <nl> ppp b / test / SourceKit / CursorInfo / cursor_info . swift <nl> <nl> + / / REQUIRES : se_0111_complete <nl> + <nl> import Foo <nl> import FooSwiftModule <nl> <nl> mmm a / test / SourceKit / CursorInfo / cursor_stdlib . swift <nl> ppp b / test / SourceKit / CursorInfo / cursor_stdlib . swift <nl> func foo3 ( a : Float , b : Bool ) { } <nl> / / CHECK - REPLACEMENT1 : < Group > Collection / Array < / Group > <nl> / / CHECK - REPLACEMENT1 : < Declaration > func sorted ( ) - & gt ; [ < Type usr = " s : Si " > Int < / Type > ] < / Declaration > <nl> / / CHECK - REPLACEMENT1 : RELATED BEGIN <nl> - / / CHECK - REPLACEMENT1 : sorted ( by : @ noescape ( Int , Int ) - & gt ; Bool ) - & gt ; [ Int ] < / RelatedName > <nl> + / / CHECK - REPLACEMENT1 : sorted ( by : ( Int , Int ) - & gt ; Bool ) - & gt ; [ Int ] < / RelatedName > <nl> / / CHECK - REPLACEMENT1 : sorted ( ) - & gt ; [ Int ] < / RelatedName > <nl> - / / CHECK - REPLACEMENT1 : sorted ( by : @ noescape ( Int , Int ) - & gt ; Bool ) - & gt ; [ Int ] < / RelatedName > <nl> + / / CHECK - REPLACEMENT1 : sorted ( by : ( Int , Int ) - & gt ; Bool ) - & gt ; [ Int ] < / RelatedName > <nl> / / CHECK - REPLACEMENT1 : RELATED END <nl> <nl> / / RUN : % sourcekitd - test - req = cursor - pos = 9 : 8 % s - - % s % mcp_opt % clang - importer - sdk | FileCheck - check - prefix = CHECK - REPLACEMENT2 % s <nl> func foo3 ( a : Float , b : Bool ) { } <nl> <nl> / / RUN : % sourcekitd - test - req = cursor - pos = 15 : 10 % s - - % s % mcp_opt % clang - importer - sdk | FileCheck - check - prefix = CHECK - REPLACEMENT3 % s <nl> / / CHECK - REPLACEMENT3 : < Group > Collection / Array < / Group > <nl> - / / CHECK - REPLACEMENT3 : func sorted ( by areInIncreasingOrder : @ noescape ( < Type usr = " s : V13cursor_stdlib2S1 " > S1 < / Type > <nl> + / / CHECK - REPLACEMENT3 : func sorted ( by areInIncreasingOrder : ( < Type usr = " s : V13cursor_stdlib2S1 " > S1 < / Type > <nl> / / CHECK - REPLACEMENT3 : sorted ( ) - & gt ; [ S1 ] < / RelatedName > <nl> / / CHECK - REPLACEMENT3 : sorted ( ) - & gt ; [ S1 ] < / RelatedName > <nl> - / / CHECK - REPLACEMENT3 : sorted ( by : @ noescape ( S1 , S1 ) - & gt ; Bool ) - & gt ; [ S1 ] < / RelatedName > <nl> + / / CHECK - REPLACEMENT3 : sorted ( by : ( S1 , S1 ) - & gt ; Bool ) - & gt ; [ S1 ] < / RelatedName > <nl> <nl> / / RUN : % sourcekitd - test - req = cursor - pos = 18 : 8 % s - - % s % mcp_opt % clang - importer - sdk | FileCheck - check - prefix = CHECK - REPLACEMENT4 % s <nl> / / CHECK - REPLACEMENT4 : < Group > Collection / Array < / Group > <nl> mmm a / test / SourceKit / CursorInfo / cursor_usr . swift <nl> ppp b / test / SourceKit / CursorInfo / cursor_usr . swift <nl> func foo ( x : FooStruct1 ) - > S1 { } <nl> / / RUN : % sourcekitd - test - req = cursor - usr " s : F10cursor_usr3fooFVSC10FooStruct1VS_2S1 " % s - - - I % t - F % S / . . / Inputs / libIDE - mock - sdk % mcp_opt % s | FileCheck % s - check - prefix = CHECK3 <nl> / / CHECK3 : source . lang . swift . decl . function . free ( 9 : 6 - 9 : 24 ) <nl> / / CHECK3 : foo ( x : ) <nl> - / / CHECK3 : ( x : FooStruct1 ) - > S1 <nl> + / / CHECK3 : ( FooStruct1 ) - > S1 <nl> / / CHECK3 : < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > foo < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > x < / decl . var . parameter . argument_label > : < decl . var . parameter . type > < ref . struct usr = " c : @ S @ FooStruct1 " > FooStruct1 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) - & gt ; < decl . function . returntype > < ref . struct usr = " s : V10cursor_usr2S1 " > S1 < / ref . struct > < / decl . function . returntype > < / decl . function . free > <nl> new file mode 100644 <nl> index 000000000000 . . a68be5a659eb <nl> mmm / dev / null <nl> ppp b / test / SourceKit / DocSupport / Inputs / MyError . h <nl> <nl> + @ import Foundation ; <nl> + <nl> + # define NS_ERROR_ENUM ( _type , _name , _domain ) \ <nl> + enum _name : _type _name ; enum __attribute__ ( ( ns_error_domain ( _domain ) ) ) _name : _type <nl> + <nl> + @ class NSString ; <nl> + extern const NSString * const MyErrorDomain ; <nl> + / / / This is my cool error code . <nl> + typedef NS_ERROR_ENUM ( int , MyErrorCode , MyErrorDomain ) { <nl> + / / / This is first error . <nl> + MyErrFirst , <nl> + / / / This is second error . <nl> + MyErrSecond , <nl> + } ; <nl> new file mode 100644 <nl> index 000000000000 . . 155b69374f18 <nl> mmm / dev / null <nl> ppp b / test / SourceKit / DocSupport / Inputs / module . modulemap <nl> <nl> + module " MyError " { <nl> + header " MyError . h " <nl> + export * <nl> + } <nl> mmm a / test / SourceKit / DocSupport / doc_clang_module . swift . response <nl> ppp b / test / SourceKit / DocSupport / doc_clang_module . swift . response <nl> var fooIntVar : Int32 <nl> func fooFunc1 ( _ a : Int32 ) - > Int32 <nl> func fooFunc1AnonymousParam ( _ _ : Int32 ) - > Int32 <nl> func fooFunc3 ( _ a : Int32 , _ b : Float , _ c : Double , _ d : UnsafeMutablePointer < Int32 > ! ) - > Int32 <nl> - func fooFuncWithBlock ( _ blk : ( ( Float ) - > Int32 ) ! ) <nl> - func fooFuncWithFunctionPointer ( _ fptr : ( ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithBlock ( _ blk : ( @ escaping ( Float ) - > Int32 ) ! ) <nl> + func fooFuncWithFunctionPointer ( _ fptr : ( @ escaping ( Float ) - > Int32 ) ! ) <nl> func fooFuncNoreturn1 ( ) - > Never <nl> func fooFuncNoreturn2 ( ) - > Never <nl> func fooFuncWithComment1 ( ) <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . offset : 3763 , <nl> key . length : 3 <nl> } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . id , <nl> + key . offset : 3769 , <nl> + key . length : 9 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> + key . offset : 3770 , <nl> + key . length : 8 <nl> + } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Float " , <nl> key . usr : " s : Sf " , <nl> - key . offset : 3770 , <nl> + key . offset : 3780 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 3780 , <nl> + key . offset : 3790 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3789 , <nl> + key . offset : 3799 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3794 , <nl> + key . offset : 3804 , <nl> key . length : 26 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 3821 , <nl> + key . offset : 3831 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 3823 , <nl> + key . offset : 3833 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3823 , <nl> + key . offset : 3833 , <nl> key . length : 4 <nl> } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . id , <nl> + key . offset : 3840 , <nl> + key . length : 9 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> + key . offset : 3841 , <nl> + key . length : 8 <nl> + } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Float " , <nl> key . usr : " s : Sf " , <nl> - key . offset : 3831 , <nl> + key . offset : 3851 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 3841 , <nl> + key . offset : 3861 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3850 , <nl> + key . offset : 3870 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3855 , <nl> + key . offset : 3875 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . enum , <nl> key . name : " Never " , <nl> key . usr : " s : Os5Never " , <nl> - key . offset : 3877 , <nl> + key . offset : 3897 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3883 , <nl> + key . offset : 3903 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3888 , <nl> + key . offset : 3908 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . enum , <nl> key . name : " Never " , <nl> key . usr : " s : Os5Never " , <nl> - key . offset : 3910 , <nl> + key . offset : 3930 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3916 , <nl> + key . offset : 3936 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3921 , <nl> + key . offset : 3941 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3943 , <nl> + key . offset : 3963 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3948 , <nl> + key . offset : 3968 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3970 , <nl> + key . offset : 3990 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3975 , <nl> + key . offset : 3995 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3997 , <nl> + key . offset : 4017 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4002 , <nl> + key . offset : 4022 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4024 , <nl> + key . offset : 4044 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4029 , <nl> + key . offset : 4049 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4051 , <nl> + key . offset : 4071 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4056 , <nl> + key . offset : 4076 , <nl> key . length : 32 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 4089 , <nl> + key . offset : 4109 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 4091 , <nl> + key . offset : 4111 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4091 , <nl> + key . offset : 4111 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4094 , <nl> + key . offset : 4114 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4104 , <nl> + key . offset : 4124 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4110 , <nl> + key . offset : 4130 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4119 , <nl> + key . offset : 4139 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4142 , <nl> + key . offset : 4162 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4147 , <nl> + key . offset : 4167 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4167 , <nl> + key . offset : 4187 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4172 , <nl> + key . offset : 4192 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4213 , <nl> + key . offset : 4233 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4218 , <nl> + key . offset : 4238 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4259 , <nl> + key . offset : 4279 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4266 , <nl> + key . offset : 4286 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4271 , <nl> + key . offset : 4291 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4296 , <nl> + key . offset : 4316 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4300 , <nl> + key . offset : 4320 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4314 , <nl> + key . offset : 4334 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4322 , <nl> + key . offset : 4342 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4326 , <nl> + key . offset : 4346 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4337 , <nl> + key . offset : 4357 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4341 , <nl> + key . offset : 4361 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4355 , <nl> + key . offset : 4375 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4363 , <nl> + key . offset : 4383 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4367 , <nl> + key . offset : 4387 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4378 , <nl> + key . offset : 4398 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4382 , <nl> + key . offset : 4402 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4396 , <nl> + key . offset : 4416 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4404 , <nl> + key . offset : 4424 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4412 , <nl> + key . offset : 4432 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4421 , <nl> + key . offset : 4441 , <nl> key . length : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> key . name : " FooProtocolBase " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase " , <nl> - key . offset : 4442 , <nl> + key . offset : 4462 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4462 , <nl> + key . offset : 4482 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4468 , <nl> + key . offset : 4488 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4488 , <nl> + key . offset : 4508 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4493 , <nl> + key . offset : 4513 , <nl> key . length : 20 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4521 , <nl> + key . offset : 4541 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4526 , <nl> + key . offset : 4546 , <nl> key . length : 20 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 4547 , <nl> + key . offset : 4567 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 4549 , <nl> + key . offset : 4569 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4549 , <nl> + key . offset : 4569 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4559 , <nl> + key . offset : 4579 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 4568 , <nl> + key . offset : 4588 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4587 , <nl> + key . offset : 4607 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4600 , <nl> + key . offset : 4620 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4612 , <nl> + key . offset : 4632 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 4618 , <nl> + key . offset : 4638 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 4624 , <nl> + key . offset : 4644 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4618 , <nl> + key . offset : 4638 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4624 , <nl> + key . offset : 4644 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Float " , <nl> key . usr : " s : Sf " , <nl> - key . offset : 4627 , <nl> + key . offset : 4647 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4639 , <nl> + key . offset : 4659 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4644 , <nl> + key . offset : 4664 , <nl> key . length : 29 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4681 , <nl> + key . offset : 4701 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4687 , <nl> + key . offset : 4707 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4692 , <nl> + key . offset : 4712 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4717 , <nl> + key . offset : 4737 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4722 , <nl> + key . offset : 4742 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4742 , <nl> + key . offset : 4762 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4752 , <nl> + key . offset : 4772 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4757 , <nl> + key . offset : 4777 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4777 , <nl> + key . offset : 4797 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4787 , <nl> + key . offset : 4807 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4792 , <nl> + key . offset : 4812 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4813 , <nl> + key . offset : 4833 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4823 , <nl> + key . offset : 4843 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4828 , <nl> + key . offset : 4848 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4848 , <nl> + key . offset : 4868 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4855 , <nl> + key . offset : 4875 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4861 , <nl> + key . offset : 4881 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 4879 , <nl> + key . offset : 4899 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> key . name : " FooProtocolDerived " , <nl> key . usr : " c : objc ( pl ) FooProtocolDerived " , <nl> - key . offset : 4893 , <nl> + key . offset : 4913 , <nl> key . length : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4919 , <nl> + key . offset : 4939 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4923 , <nl> + key . offset : 4943 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4937 , <nl> + key . offset : 4957 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4948 , <nl> + key . offset : 4968 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4952 , <nl> + key . offset : 4972 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4966 , <nl> + key . offset : 4986 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4977 , <nl> + key . offset : 4997 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4981 , <nl> + key . offset : 5001 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 4995 , <nl> + key . offset : 5015 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5003 , <nl> + key . offset : 5023 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5014 , <nl> + key . offset : 5034 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5019 , <nl> + key . offset : 5039 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5043 , <nl> + key . offset : 5063 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5048 , <nl> + key . offset : 5068 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 5065 , <nl> + key . offset : 5085 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 5067 , <nl> + key . offset : 5087 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5067 , <nl> + key . offset : 5087 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5070 , <nl> + key . offset : 5090 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5082 , <nl> + key . offset : 5102 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5087 , <nl> + key . offset : 5107 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 5104 , <nl> + key . offset : 5124 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 5106 , <nl> + key . offset : 5126 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5106 , <nl> + key . offset : 5126 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5109 , <nl> + key . offset : 5129 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 5116 , <nl> + key . offset : 5136 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 5122 , <nl> + key . offset : 5142 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5116 , <nl> + key . offset : 5136 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5122 , <nl> + key . offset : 5142 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5125 , <nl> + key . offset : 5145 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5137 , <nl> + key . offset : 5157 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5142 , <nl> + key . offset : 5162 , <nl> key . length : 29 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5179 , <nl> + key . offset : 5199 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5185 , <nl> + key . offset : 5205 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5190 , <nl> + key . offset : 5210 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5211 , <nl> + key . offset : 5231 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5216 , <nl> + key . offset : 5236 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5236 , <nl> + key . offset : 5256 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5246 , <nl> + key . offset : 5266 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5251 , <nl> + key . offset : 5271 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5271 , <nl> + key . offset : 5291 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5281 , <nl> + key . offset : 5301 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5286 , <nl> + key . offset : 5306 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5307 , <nl> + key . offset : 5327 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5317 , <nl> + key . offset : 5337 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5322 , <nl> + key . offset : 5342 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5342 , <nl> + key . offset : 5362 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5349 , <nl> + key . offset : 5369 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5353 , <nl> + key . offset : 5373 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5366 , <nl> + key . offset : 5386 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5374 , <nl> + key . offset : 5394 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5380 , <nl> + key . offset : 5400 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5384 , <nl> + key . offset : 5404 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5397 , <nl> + key . offset : 5417 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5405 , <nl> + key . offset : 5425 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5411 , <nl> + key . offset : 5431 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5415 , <nl> + key . offset : 5435 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5428 , <nl> + key . offset : 5448 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5436 , <nl> + key . offset : 5456 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5442 , <nl> + key . offset : 5462 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5446 , <nl> + key . offset : 5466 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " UInt32 " , <nl> key . usr : " s : Vs6UInt32 " , <nl> - key . offset : 5459 , <nl> + key . offset : 5479 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5468 , <nl> + key . offset : 5488 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5474 , <nl> + key . offset : 5494 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5478 , <nl> + key . offset : 5498 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " UInt64 " , <nl> key . usr : " s : Vs6UInt64 " , <nl> - key . offset : 5491 , <nl> + key . offset : 5511 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5500 , <nl> + key . offset : 5520 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5506 , <nl> + key . offset : 5526 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5510 , <nl> + key . offset : 5530 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5529 , <nl> + key . offset : 5549 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5537 , <nl> + key . offset : 5557 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5543 , <nl> + key . offset : 5563 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5547 , <nl> + key . offset : 5567 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5566 , <nl> + key . offset : 5586 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5574 , <nl> + key . offset : 5594 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5580 , <nl> + key . offset : 5600 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5585 , <nl> + key . offset : 5605 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5604 , <nl> + key . offset : 5624 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5609 , <nl> + key . offset : 5629 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5633 , <nl> + key . offset : 5653 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5640 , <nl> + key . offset : 5660 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5663 , <nl> + key . offset : 5683 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5667 , <nl> + key . offset : 5687 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5670 , <nl> + key . offset : 5690 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5681 , <nl> + key . offset : 5701 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5693 , <nl> + key . offset : 5713 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 5698 , <nl> + key . offset : 5718 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 5700 , <nl> + key . offset : 5720 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5698 , <nl> + key . offset : 5718 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5700 , <nl> + key . offset : 5720 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 5703 , <nl> + key . offset : 5723 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5712 , <nl> + key . offset : 5732 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 5722 , <nl> + key . offset : 5742 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5742 , <nl> + key . offset : 5762 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5747 , <nl> + key . offset : 5767 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5767 , <nl> + key . offset : 5787 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5774 , <nl> + key . offset : 5794 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 5784 , <nl> + key . offset : 5804 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5804 , <nl> + key . offset : 5824 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5809 , <nl> + key . offset : 5829 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5829 , <nl> + key . offset : 5849 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5839 , <nl> + key . offset : 5859 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5844 , <nl> + key . offset : 5864 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5865 , <nl> + key . offset : 5885 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5872 , <nl> + key . offset : 5892 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 5882 , <nl> + key . offset : 5902 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5902 , <nl> + key . offset : 5922 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5907 , <nl> + key . offset : 5927 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5927 , <nl> + key . offset : 5947 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5934 , <nl> + key . offset : 5954 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5943 , <nl> + key . offset : 5963 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5961 , <nl> + key . offset : 5981 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5967 , <nl> + key . offset : 5987 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> key . name : " _InternalProt " , <nl> key . usr : " c : objc ( pl ) _InternalProt " , <nl> - key . offset : 5991 , <nl> + key . offset : 6011 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6009 , <nl> + key . offset : 6029 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6015 , <nl> + key . offset : 6035 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 6043 , <nl> + key . offset : 6063 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6063 , <nl> + key . offset : 6083 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6067 , <nl> + key . offset : 6087 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6079 , <nl> + key . offset : 6099 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6089 , <nl> + key . offset : 6109 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6093 , <nl> + key . offset : 6113 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6111 , <nl> + key . offset : 6131 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6121 , <nl> + key . offset : 6141 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6125 , <nl> + key . offset : 6145 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6137 , <nl> + key . offset : 6157 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6147 , <nl> + key . offset : 6167 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6151 , <nl> + key . offset : 6171 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6162 , <nl> + key . offset : 6182 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6172 , <nl> + key . offset : 6192 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6176 , <nl> + key . offset : 6196 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6186 , <nl> + key . offset : 6206 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6196 , <nl> + key . offset : 6216 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6200 , <nl> + key . offset : 6220 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6209 , <nl> + key . offset : 6229 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6219 , <nl> + key . offset : 6239 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6223 , <nl> + key . offset : 6243 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 6231 , <nl> + key . offset : 6251 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6242 , <nl> + key . offset : 6262 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6247 , <nl> + key . offset : 6267 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6267 , <nl> + key . offset : 6287 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6277 , <nl> + key . offset : 6297 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6282 , <nl> + key . offset : 6302 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6302 , <nl> + key . offset : 6322 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6312 , <nl> + key . offset : 6332 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6317 , <nl> + key . offset : 6337 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6338 , <nl> + key . offset : 6358 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6348 , <nl> + key . offset : 6368 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6353 , <nl> + key . offset : 6373 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6373 , <nl> + key . offset : 6393 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6380 , <nl> + key . offset : 6400 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6384 , <nl> + key . offset : 6404 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6396 , <nl> + key . offset : 6416 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6402 , <nl> + key . offset : 6422 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 6426 , <nl> + key . offset : 6446 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6446 , <nl> + key . offset : 6466 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6458 , <nl> + key . offset : 6478 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 6464 , <nl> + key . offset : 6484 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 6468 , <nl> + key . offset : 6488 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6464 , <nl> + key . offset : 6484 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6468 , <nl> + key . offset : 6488 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 6471 , <nl> + key . offset : 6491 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6483 , <nl> + key . offset : 6503 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6489 , <nl> + key . offset : 6509 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6494 , <nl> + key . offset : 6514 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 6502 , <nl> + key . offset : 6522 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 6504 , <nl> + key . offset : 6524 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6504 , <nl> + key . offset : 6524 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 6507 , <nl> + key . offset : 6527 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooUnavailableMembers " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers " , <nl> - key . offset : 6517 , <nl> + key . offset : 6537 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6528 , <nl> + key . offset : 6548 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6533 , <nl> + key . offset : 6553 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6552 , <nl> + key . offset : 6572 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6557 , <nl> + key . offset : 6577 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6581 , <nl> + key . offset : 6601 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6586 , <nl> + key . offset : 6606 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6604 , <nl> + key . offset : 6624 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6609 , <nl> + key . offset : 6629 , <nl> key . length : 22 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6639 , <nl> + key . offset : 6659 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6644 , <nl> + key . offset : 6664 , <nl> key . length : 22 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6674 , <nl> + key . offset : 6694 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6679 , <nl> + key . offset : 6699 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6708 , <nl> + key . offset : 6728 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6713 , <nl> + key . offset : 6733 , <nl> key . length : 23 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6744 , <nl> + key . offset : 6764 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6749 , <nl> + key . offset : 6769 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6782 , <nl> + key . offset : 6802 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6787 , <nl> + key . offset : 6807 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6820 , <nl> + key . offset : 6840 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6825 , <nl> + key . offset : 6845 , <nl> key . length : 24 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6857 , <nl> + key . offset : 6877 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6862 , <nl> + key . offset : 6882 , <nl> key . length : 26 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6896 , <nl> + key . offset : 6916 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6901 , <nl> + key . offset : 6921 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6921 , <nl> + key . offset : 6941 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6931 , <nl> + key . offset : 6951 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6936 , <nl> + key . offset : 6956 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6956 , <nl> + key . offset : 6976 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6966 , <nl> + key . offset : 6986 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6971 , <nl> + key . offset : 6991 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6992 , <nl> + key . offset : 7012 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7002 , <nl> + key . offset : 7022 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7007 , <nl> + key . offset : 7027 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7027 , <nl> + key . offset : 7047 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7034 , <nl> + key . offset : 7054 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7040 , <nl> + key . offset : 7060 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7054 , <nl> + key . offset : 7074 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7059 , <nl> + key . offset : 7079 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 7076 , <nl> + key . offset : 7096 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 7078 , <nl> + key . offset : 7098 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> key . name : " FooCFType " , <nl> key . usr : " c : Foo . h @ T @ FooCFTypeRef " , <nl> - key . offset : 7081 , <nl> + key . offset : 7101 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7093 , <nl> + key . offset : 7113 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7098 , <nl> + key . offset : 7118 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 7110 , <nl> + key . offset : 7130 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 7112 , <nl> + key . offset : 7132 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7112 , <nl> + key . offset : 7132 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 7115 , <nl> + key . offset : 7135 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int32 " , <nl> key . usr : " s : Vs5Int32 " , <nl> - key . offset : 7125 , <nl> + key . offset : 7145 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7131 , <nl> + key . offset : 7151 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7138 , <nl> + key . offset : 7158 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> key . name : " RawRepresentable " , <nl> key . usr : " s : Ps16RawRepresentable " , <nl> - key . offset : 7152 , <nl> + key . offset : 7172 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> key . name : " Equatable " , <nl> key . usr : " s : Ps9Equatable " , <nl> - key . offset : 7170 , <nl> + key . offset : 7190 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7187 , <nl> + key . offset : 7207 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 7192 , <nl> + key . offset : 7212 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 7194 , <nl> + key . offset : 7214 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7194 , <nl> + key . offset : 7214 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " UInt32 " , <nl> key . usr : " s : Vs6UInt32 " , <nl> - key . offset : 7204 , <nl> + key . offset : 7224 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7217 , <nl> + key . offset : 7237 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . argument , <nl> - key . offset : 7222 , <nl> + key . offset : 7242 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . parameter , <nl> - key . offset : 7231 , <nl> + key . offset : 7251 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7222 , <nl> + key . offset : 7242 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7231 , <nl> + key . offset : 7251 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " UInt32 " , <nl> key . usr : " s : Vs6UInt32 " , <nl> - key . offset : 7241 , <nl> + key . offset : 7261 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7254 , <nl> + key . offset : 7274 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7258 , <nl> + key . offset : 7278 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " UInt32 " , <nl> key . usr : " s : Vs6UInt32 " , <nl> - key . offset : 7268 , <nl> + key . offset : 7288 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7277 , <nl> + key . offset : 7297 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7281 , <nl> + key . offset : 7301 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " FooSubEnum1 " , <nl> key . usr : " c : @ E @ FooSubEnum1 " , <nl> - key . offset : 7295 , <nl> + key . offset : 7315 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7309 , <nl> + key . offset : 7329 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7315 , <nl> + key . offset : 7335 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7319 , <nl> + key . offset : 7339 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " FooSubEnum1 " , <nl> key . usr : " c : @ E @ FooSubEnum1 " , <nl> - key . offset : 7333 , <nl> + key . offset : 7353 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7347 , <nl> + key . offset : 7367 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 7353 , <nl> + key . offset : 7373 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7357 , <nl> + key . offset : 7377 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> key . name : " Int " , <nl> key . usr : " s : Si " , <nl> - key . offset : 7384 , <nl> + key . offset : 7404 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 7390 , <nl> + key . offset : 7410 , <nl> key . length : 3 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithBlock ( _ : ) " , <nl> key . usr : " c : @ F @ fooFuncWithBlock " , <nl> key . offset : 3739 , <nl> - key . length : 49 , <nl> - key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithBlock < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > blk < / decl . var . parameter . name > : < decl . var . parameter . type > ( ( < ref . struct usr = \ " s : Sf \ " > Float < / ref . struct > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > ) ! < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . free > " , <nl> + key . length : 59 , <nl> + key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithBlock < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > blk < / decl . var . parameter . name > : < decl . var . parameter . type > ( @ escaping ( < ref . struct usr = \ " s : Sf \ " > Float < / ref . struct > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > ) ! < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . free > " , <nl> key . entities : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " blk " , <nl> key . offset : 3768 , <nl> - key . length : 19 <nl> + key . length : 29 <nl> } <nl> ] <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " fooFuncWithFunctionPointer ( _ : ) " , <nl> key . usr : " c : @ F @ fooFuncWithFunctionPointer " , <nl> - key . offset : 3789 , <nl> - key . length : 60 , <nl> - key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithFunctionPointer < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > fptr < / decl . var . parameter . name > : < decl . var . parameter . type > ( ( < ref . struct usr = \ " s : Sf \ " > Float < / ref . struct > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > ) ! < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . free > " , <nl> + key . offset : 3799 , <nl> + key . length : 70 , <nl> + key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithFunctionPointer < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > fptr < / decl . var . parameter . name > : < decl . var . parameter . type > ( @ escaping ( < ref . struct usr = \ " s : Sf \ " > Float < / ref . struct > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > ) ! < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . free > " , <nl> key . entities : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " fptr " , <nl> - key . offset : 3829 , <nl> - key . length : 19 <nl> + key . offset : 3839 , <nl> + key . length : 29 <nl> } <nl> ] <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " fooFuncNoreturn1 ( ) " , <nl> key . usr : " c : @ F @ fooFuncNoreturn1 " , <nl> - key . offset : 3850 , <nl> + key . offset : 3870 , <nl> key . length : 32 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncNoreturn1 < / decl . name > ( ) - & gt ; < decl . function . returntype > < ref . enum usr = \ " s : Os5Never \ " > Never < / ref . enum > < / decl . function . returntype > < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " fooFuncNoreturn2 ( ) " , <nl> key . usr : " c : @ F @ fooFuncNoreturn2 " , <nl> - key . offset : 3883 , <nl> + key . offset : 3903 , <nl> key . length : 32 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncNoreturn2 < / decl . name > ( ) - & gt ; < decl . function . returntype > < ref . enum usr = \ " s : Os5Never \ " > Never < / ref . enum > < / decl . function . returntype > < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithComment1 ( ) " , <nl> key . usr : " c : @ F @ fooFuncWithComment1 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 88 \ " column = \ " 6 \ " > < Name > fooFuncWithComment1 < / Name > < USR > c : @ F @ fooFuncWithComment1 < / USR > < Declaration > func fooFuncWithComment1 ( ) < / Declaration > < Abstract > < Para > Aaa . fooFuncWithComment1 . Bbb . Ccc . < / Para > < / Abstract > < Discussion > < Para > Ddd . < / Para > < / Discussion > < / Function > " , <nl> - key . offset : 3916 , <nl> + key . offset : 3936 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithComment1 < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithComment2 ( ) " , <nl> key . usr : " c : @ F @ fooFuncWithComment2 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 93 \ " column = \ " 6 \ " > < Name > fooFuncWithComment2 < / Name > < USR > c : @ F @ fooFuncWithComment2 < / USR > < Declaration > func fooFuncWithComment2 ( ) < / Declaration > < Abstract > < Para > Aaa . fooFuncWithComment2 . Bbb . < / Para > < / Abstract > < / Function > " , <nl> - key . offset : 3943 , <nl> + key . offset : 3963 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithComment2 < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithComment3 ( ) " , <nl> key . usr : " c : @ F @ fooFuncWithComment3 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 101 \ " column = \ " 6 \ " > < Name > fooFuncWithComment3 < / Name > < USR > c : @ F @ fooFuncWithComment3 < / USR > < Declaration > func fooFuncWithComment3 ( ) < / Declaration > < Abstract > < Para > Aaa . fooFuncWithComment3 . Bbb . < / Para > < / Abstract > < Discussion > < Para > Ccc . < / Para > < / Discussion > < / Function > " , <nl> - key . offset : 3970 , <nl> + key . offset : 3990 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithComment3 < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithComment4 ( ) " , <nl> key . usr : " c : @ F @ fooFuncWithComment4 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 107 \ " column = \ " 6 \ " > < Name > fooFuncWithComment4 < / Name > < USR > c : @ F @ fooFuncWithComment4 < / USR > < Declaration > func fooFuncWithComment4 ( ) < / Declaration > < Abstract > < Para > Aaa . fooFuncWithComment4 . Bbb . < / Para > < / Abstract > < Discussion > < Para > Ddd . < / Para > < / Discussion > < / Function > " , <nl> - key . offset : 3997 , <nl> + key . offset : 4017 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithComment4 < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooFuncWithComment5 ( ) " , <nl> key . usr : " c : @ F @ fooFuncWithComment5 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 113 \ " column = \ " 6 \ " > < Name > fooFuncWithComment5 < / Name > < USR > c : @ F @ fooFuncWithComment5 < / USR > < Declaration > func fooFuncWithComment5 ( ) < / Declaration > < Abstract > < Para > Aaa . fooFuncWithComment5 . Bbb . Ccc . < / Para > < / Abstract > < Discussion > < Para > Ddd . < / Para > < / Discussion > < / Function > " , <nl> - key . offset : 4024 , <nl> + key . offset : 4044 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooFuncWithComment5 < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " redeclaredInMultipleModulesFunc1 ( _ : ) " , <nl> key . usr : " c : @ F @ redeclaredInMultipleModulesFunc1 " , <nl> key . doc . full_as_xml : " < Function file = Foo . h line = \ " 117 \ " column = \ " 5 \ " > < Name > redeclaredInMultipleModulesFunc1 < / Name > < USR > c : @ F @ redeclaredInMultipleModulesFunc1 < / USR > < Declaration > func redeclaredInMultipleModulesFunc1 ( _ a : Int32 ) - > Int32 < / Declaration > < Abstract > < Para > Aaa . redeclaredInMultipleModulesFunc1 . Bbb . < / Para > < / Abstract > < / Function > " , <nl> - key . offset : 4051 , <nl> + key . offset : 4071 , <nl> key . length : 58 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > redeclaredInMultipleModulesFunc1 < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > a < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > < / decl . function . free > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " a " , <nl> - key . offset : 4094 , <nl> + key . offset : 4114 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " FooProtocolBase " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase " , <nl> key . doc . full_as_xml : " < Other file = Foo . h line = \ " 120 \ " column = \ " 11 \ " > < Name > FooProtocolBase < / Name > < USR > c : objc ( pl ) FooProtocolBase < / USR > < Declaration > protocol FooProtocolBase < / Declaration > < Abstract > < Para > Aaa . FooProtocolBase . Bbb . < / Para > < / Abstract > < / Other > " , <nl> - key . offset : 4110 , <nl> + key . offset : 4130 , <nl> key . length : 301 , <nl> key . fully_annotated_decl : " < decl . protocol > < syntaxtype . keyword > protocol < / syntaxtype . keyword > < decl . name > FooProtocolBase < / decl . name > < / decl . protocol > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooProtoFunc ( ) " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( im ) fooProtoFunc " , <nl> key . doc . full_as_xml : " < Function isInstanceMethod = \ " 1 \ " file = Foo . h line = \ " 124 \ " column = \ " 1 \ " > < Name > fooProtoFunc < / Name > < USR > c : objc ( pl ) FooProtocolBase ( im ) fooProtoFunc < / USR > < Declaration > func fooProtoFunc ( ) < / Declaration > < Abstract > < Para > Aaa . fooProtoFunc . Bbb . Ccc . < / Para > < / Abstract > < / Function > " , <nl> - key . offset : 4142 , <nl> + key . offset : 4162 , <nl> key . length : 19 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooProtoFunc < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooProtoFuncWithExtraIndentation1 ( ) " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( im ) fooProtoFuncWithExtraIndentation1 " , <nl> key . doc . full_as_xml : " < Function isInstanceMethod = \ " 1 \ " file = Foo . h line = \ " 128 \ " column = \ " 3 \ " > < Name > fooProtoFuncWithExtraIndentation1 < / Name > < USR > c : objc ( pl ) FooProtocolBase ( im ) fooProtoFuncWithExtraIndentation1 < / USR > < Declaration > func fooProtoFuncWithExtraIndentation1 ( ) < / Declaration > < Abstract > < Para > Aaa . fooProtoFuncWithExtraIndentation1 . Bbb . Ccc . < / Para > < / Abstract > < / Function > " , <nl> - key . offset : 4167 , <nl> + key . offset : 4187 , <nl> key . length : 40 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooProtoFuncWithExtraIndentation1 < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " fooProtoFuncWithExtraIndentation2 ( ) " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( im ) fooProtoFuncWithExtraIndentation2 " , <nl> key . doc . full_as_xml : " < Function isInstanceMethod = \ " 1 \ " file = Foo . h line = \ " 134 \ " column = \ " 3 \ " > < Name > fooProtoFuncWithExtraIndentation2 < / Name > < USR > c : objc ( pl ) FooProtocolBase ( im ) fooProtoFuncWithExtraIndentation2 < / USR > < Declaration > func fooProtoFuncWithExtraIndentation2 ( ) < / Declaration > < Abstract > < Para > Aaa . fooProtoFuncWithExtraIndentation2 . Bbb . Ccc . < / Para > < / Abstract > < / Function > " , <nl> - key . offset : 4213 , <nl> + key . offset : 4233 , <nl> key . length : 40 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooProtoFuncWithExtraIndentation2 < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . static , <nl> key . name : " fooProtoClassFunc ( ) " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( cm ) fooProtoClassFunc " , <nl> - key . offset : 4259 , <nl> + key . offset : 4279 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . function . method . static > < syntaxtype . keyword > static < / syntaxtype . keyword > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooProtoClassFunc < / decl . name > ( ) < / decl . function . method . static > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty1 " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( py ) fooProperty1 " , <nl> - key . offset : 4296 , <nl> + key . offset : 4316 , <nl> key . length : 35 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty1 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty2 " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( py ) fooProperty2 " , <nl> - key . offset : 4337 , <nl> + key . offset : 4357 , <nl> key . length : 35 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty2 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty3 " , <nl> key . usr : " c : objc ( pl ) FooProtocolBase ( py ) fooProperty3 " , <nl> - key . offset : 4378 , <nl> + key . offset : 4398 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty3 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . protocol , <nl> key . name : " FooProtocolDerived " , <nl> key . usr : " c : objc ( pl ) FooProtocolDerived " , <nl> - key . offset : 4412 , <nl> + key . offset : 4432 , <nl> key . length : 49 , <nl> key . fully_annotated_decl : " < decl . protocol > < syntaxtype . keyword > protocol < / syntaxtype . keyword > < decl . name > FooProtocolDerived < / decl . name > : < ref . protocol usr = \ " c : objc ( pl ) FooProtocolBase \ " > FooProtocolBase < / ref . protocol > < / decl . protocol > " , <nl> key . conforms : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . class , <nl> key . name : " FooClassBase " , <nl> key . usr : " c : objc ( cs ) FooClassBase " , <nl> - key . offset : 4462 , <nl> + key . offset : 4482 , <nl> key . length : 392 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > FooClassBase < / decl . name > < / decl . class > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooBaseInstanceFunc0 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) fooBaseInstanceFunc0 " , <nl> - key . offset : 4488 , <nl> + key . offset : 4508 , <nl> key . length : 27 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooBaseInstanceFunc0 < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooBaseInstanceFunc1 ( _ : ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) fooBaseInstanceFunc1 : " , <nl> - key . offset : 4521 , <nl> + key . offset : 4541 , <nl> key . length : 60 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooBaseInstanceFunc1 < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > anObject < / decl . var . parameter . name > : < decl . var . parameter . type > Any ! < / decl . var . parameter . type > < / decl . var . parameter > ) - & gt ; < decl . function . returntype > < ref . class usr = \ " c : objc ( cs ) FooClassBase \ " > FooClassBase < / ref . class > ! < / decl . function . returntype > < / decl . function . method . instance > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " anObject " , <nl> - key . offset : 4559 , <nl> + key . offset : 4579 , <nl> key . length : 4 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) init " , <nl> - key . offset : 4587 , <nl> + key . offset : 4607 , <nl> key . length : 7 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > init < / syntaxtype . keyword > ! ( ) < / decl . function . constructor > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( float : ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) initWithFloat : " , <nl> - key . offset : 4600 , <nl> + key . offset : 4620 , <nl> key . length : 33 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > convenience < / syntaxtype . keyword > < syntaxtype . keyword > init < / syntaxtype . keyword > ! ( < decl . var . parameter > < decl . var . parameter . argument_label > float < / decl . var . parameter . argument_label > < decl . var . parameter . name > f < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Sf \ " > Float < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . constructor > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " float " , <nl> key . name : " f " , <nl> - key . offset : 4627 , <nl> + key . offset : 4647 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooBaseInstanceFuncOverridden ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) fooBaseInstanceFuncOverridden " , <nl> - key . offset : 4639 , <nl> + key . offset : 4659 , <nl> key . length : 36 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooBaseInstanceFuncOverridden < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . class , <nl> key . name : " fooBaseClassFunc0 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( cm ) fooBaseClassFunc0 " , <nl> - key . offset : 4681 , <nl> + key . offset : 4701 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooBaseClassFunc0 < / decl . name > ( ) < / decl . function . method . class > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth3 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 " , <nl> - key . offset : 4717 , <nl> + key . offset : 4737 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth3 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth2 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 " , <nl> - key . offset : 4752 , <nl> + key . offset : 4772 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth2 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " nonInternalMeth ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth " , <nl> - key . offset : 4787 , <nl> + key . offset : 4807 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > nonInternalMeth < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth1 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 " , <nl> - key . offset : 4823 , <nl> + key . offset : 4843 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth1 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " FooClassDerived " , <nl> key . usr : " c : objc ( cs ) FooClassDerived " , <nl> key . doc . full_as_xml : " < Other file = Foo . h line = \ " 157 \ " column = \ " 12 \ " > < Name > FooClassDerived < / Name > < USR > c : objc ( cs ) FooClassDerived < / USR > < Declaration > class FooClassDerived : FooClassBase , FooProtocolDerived < / Declaration > < Abstract > < Para > Aaa . FooClassDerived . Bbb . < / Para > < / Abstract > < / Other > " , <nl> - key . offset : 4855 , <nl> + key . offset : 4875 , <nl> key . length : 493 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > FooClassDerived < / decl . name > : < ref . class usr = \ " c : objc ( cs ) FooClassBase \ " > FooClassBase < / ref . class > , < ref . protocol usr = \ " c : objc ( pl ) FooProtocolDerived \ " > FooProtocolDerived < / ref . protocol > < / decl . class > " , <nl> key . inherits : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty1 " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( py ) fooProperty1 " , <nl> - key . offset : 4919 , <nl> + key . offset : 4939 , <nl> key . length : 23 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty1 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty2 " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( py ) fooProperty2 " , <nl> - key . offset : 4948 , <nl> + key . offset : 4968 , <nl> key . length : 23 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty2 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " fooProperty3 " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( py ) fooProperty3 " , <nl> - key . offset : 4977 , <nl> + key . offset : 4997 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > fooProperty3 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooInstanceFunc0 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( im ) fooInstanceFunc0 " , <nl> - key . offset : 5014 , <nl> + key . offset : 5034 , <nl> key . length : 23 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooInstanceFunc0 < / decl . name > ( ) < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooInstanceFunc1 ( _ : ) " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( im ) fooInstanceFunc1 : " , <nl> - key . offset : 5043 , <nl> + key . offset : 5063 , <nl> key . length : 33 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooInstanceFunc1 < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > a < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . method . instance > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " a " , <nl> - key . offset : 5070 , <nl> + key . offset : 5090 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooInstanceFunc2 ( _ : withB : ) " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( im ) fooInstanceFunc2 : withB : " , <nl> - key . offset : 5082 , <nl> + key . offset : 5102 , <nl> key . length : 49 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooInstanceFunc2 < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > a < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > , < decl . var . parameter > < decl . var . parameter . argument_label > withB < / decl . var . parameter . argument_label > < decl . var . parameter . name > b < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . method . instance > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " a " , <nl> - key . offset : 5109 , <nl> + key . offset : 5129 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " withB " , <nl> key . name : " b " , <nl> - key . offset : 5125 , <nl> + key . offset : 5145 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " fooBaseInstanceFuncOverridden ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( im ) fooBaseInstanceFuncOverridden " , <nl> - key . offset : 5137 , <nl> + key . offset : 5157 , <nl> key . length : 36 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooBaseInstanceFuncOverridden < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . inherits : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . class , <nl> key . name : " fooClassFunc0 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassDerived ( cm ) fooClassFunc0 " , <nl> - key . offset : 5179 , <nl> + key . offset : 5199 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . function . method . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooClassFunc0 < / decl . name > ( ) < / decl . function . method . class > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth3 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 : : SYNTHESIZED : : c : objc ( cs ) FooClassDerived " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 " , <nl> - key . offset : 5211 , <nl> + key . offset : 5231 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth3 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth2 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 : : SYNTHESIZED : : c : objc ( cs ) FooClassDerived " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 " , <nl> - key . offset : 5246 , <nl> + key . offset : 5266 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth2 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " nonInternalMeth ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth : : SYNTHESIZED : : c : objc ( cs ) FooClassDerived " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth " , <nl> - key . offset : 5281 , <nl> + key . offset : 5301 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > nonInternalMeth < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth1 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 : : SYNTHESIZED : : c : objc ( cs ) FooClassDerived " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 " , <nl> - key . offset : 5317 , <nl> + key . offset : 5337 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth1 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_1 " , <nl> key . usr : " c : Foo . h @ 3647 @ macro @ FOO_MACRO_1 " , <nl> - key . offset : 5349 , <nl> + key . offset : 5369 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_1 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_2 " , <nl> key . usr : " c : Foo . h @ 3669 @ macro @ FOO_MACRO_2 " , <nl> - key . offset : 5380 , <nl> + key . offset : 5400 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_2 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_3 " , <nl> key . usr : " c : Foo . h @ 3691 @ macro @ FOO_MACRO_3 " , <nl> - key . offset : 5411 , <nl> + key . offset : 5431 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_3 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_4 " , <nl> key . usr : " c : Foo . h @ 3755 @ macro @ FOO_MACRO_4 " , <nl> - key . offset : 5442 , <nl> + key . offset : 5462 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_4 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs6UInt32 \ " > UInt32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_5 " , <nl> key . usr : " c : Foo . h @ 3787 @ macro @ FOO_MACRO_5 " , <nl> - key . offset : 5474 , <nl> + key . offset : 5494 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_5 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs6UInt64 \ " > UInt64 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_REDEF_1 " , <nl> key . usr : " c : Foo . h @ 3937 @ macro @ FOO_MACRO_REDEF_1 " , <nl> - key . offset : 5506 , <nl> + key . offset : 5526 , <nl> key . length : 36 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_REDEF_1 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_MACRO_REDEF_2 " , <nl> key . usr : " c : Foo . h @ 3994 @ macro @ FOO_MACRO_REDEF_2 " , <nl> - key . offset : 5543 , <nl> + key . offset : 5563 , <nl> key . length : 36 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_MACRO_REDEF_2 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " theLastDeclInFoo ( ) " , <nl> key . usr : " c : @ F @ theLastDeclInFoo " , <nl> - key . offset : 5580 , <nl> + key . offset : 5600 , <nl> key . length : 23 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > theLastDeclInFoo < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " _internalTopLevelFunc ( ) " , <nl> key . usr : " c : @ F @ _internalTopLevelFunc " , <nl> - key . offset : 5604 , <nl> + key . offset : 5624 , <nl> key . length : 28 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalTopLevelFunc < / decl . name > ( ) < / decl . function . free > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . struct , <nl> key . name : " _InternalStruct " , <nl> key . usr : " c : @ S @ _InternalStruct " , <nl> - key . offset : 5633 , <nl> + key . offset : 5653 , <nl> key . length : 78 , <nl> key . fully_annotated_decl : " < decl . struct > < syntaxtype . keyword > struct < / syntaxtype . keyword > < decl . name > _InternalStruct < / decl . name > < / decl . struct > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " x " , <nl> key . usr : " c : @ S @ _InternalStruct @ FI @ x " , <nl> - key . offset : 5663 , <nl> + key . offset : 5683 , <nl> key . length : 12 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > x < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( ) " , <nl> key . usr : " s : FVSC15_InternalStructcFT_S_ " , <nl> - key . offset : 5681 , <nl> + key . offset : 5701 , <nl> key . length : 6 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > init < / syntaxtype . keyword > ( ) < / decl . function . constructor > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( x : ) " , <nl> key . usr : " s : FVSC15_InternalStructcFT1xVs5Int32_S_ " , <nl> - key . offset : 5693 , <nl> + key . offset : 5713 , <nl> key . length : 16 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > init < / syntaxtype . keyword > ( < decl . var . parameter > < decl . var . parameter . argument_label > x < / decl . var . parameter . argument_label > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . constructor > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " x " , <nl> key . name : " x " , <nl> - key . offset : 5703 , <nl> + key . offset : 5723 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . extension . class , <nl> - key . offset : 5712 , <nl> + key . offset : 5732 , <nl> key . length : 61 , <nl> key . extends : { <nl> key . kind : source . lang . swift . ref . class , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth1 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 " , <nl> - key . offset : 5742 , <nl> + key . offset : 5762 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth1 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . extension . class , <nl> - key . offset : 5774 , <nl> + key . offset : 5794 , <nl> key . length : 97 , <nl> key . extends : { <nl> key . kind : source . lang . swift . ref . class , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth2 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 " , <nl> - key . offset : 5804 , <nl> + key . offset : 5824 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth2 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " nonInternalMeth ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth " , <nl> - key . offset : 5839 , <nl> + key . offset : 5859 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > nonInternalMeth < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . extension . class , <nl> - key . offset : 5872 , <nl> + key . offset : 5892 , <nl> key . length : 61 , <nl> key . extends : { <nl> key . kind : source . lang . swift . ref . class , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " _internalMeth3 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 " , <nl> - key . offset : 5902 , <nl> + key . offset : 5922 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth3 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . protocol , <nl> key . name : " _InternalProt " , <nl> key . usr : " c : objc ( pl ) _InternalProt " , <nl> - key . offset : 5934 , <nl> + key . offset : 5954 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . protocol > < syntaxtype . keyword > protocol < / syntaxtype . keyword > < decl . name > _InternalProt < / decl . name > < / decl . protocol > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . class , <nl> key . name : " ClassWithInternalProt " , <nl> key . usr : " c : objc ( cs ) ClassWithInternalProt " , <nl> - key . offset : 5961 , <nl> + key . offset : 5981 , <nl> key . length : 47 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > ClassWithInternalProt < / decl . name > : < ref . protocol usr = \ " c : objc ( pl ) _InternalProt \ " > _InternalProt < / ref . protocol > < / decl . class > " , <nl> key . conforms : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . class , <nl> key . name : " FooClassPropertyOwnership " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership " , <nl> - key . offset : 6009 , <nl> + key . offset : 6029 , <nl> key . length : 370 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > FooClassPropertyOwnership < / decl . name > : < ref . class usr = \ " c : objc ( cs ) FooClassBase \ " > FooClassBase < / ref . class > < / decl . class > " , <nl> key . inherits : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " assignable " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) assignable " , <nl> - key . offset : 6063 , <nl> + key . offset : 6083 , <nl> key . length : 20 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > assignable < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " unsafeAssignable " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) unsafeAssignable " , <nl> - key . offset : 6089 , <nl> + key . offset : 6109 , <nl> key . length : 26 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > unsafeAssignable < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " retainable " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) retainable " , <nl> - key . offset : 6121 , <nl> + key . offset : 6141 , <nl> key . length : 20 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > retainable < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " strongRef " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) strongRef " , <nl> - key . offset : 6147 , <nl> + key . offset : 6167 , <nl> key . length : 19 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > strongRef < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " copyable " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) copyable " , <nl> - key . offset : 6172 , <nl> + key . offset : 6192 , <nl> key . length : 18 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > copyable < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " weakRef " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) weakRef " , <nl> - key . offset : 6196 , <nl> + key . offset : 6216 , <nl> key . length : 17 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > weakRef < / decl . name > : < decl . var . type > Any ! < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " scalar " , <nl> key . usr : " c : objc ( cs ) FooClassPropertyOwnership ( py ) scalar " , <nl> - key . offset : 6219 , <nl> + key . offset : 6239 , <nl> key . length : 17 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > scalar < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > < syntaxtype . keyword > set < / syntaxtype . keyword > } < / decl . var . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth3 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 : : SYNTHESIZED : : c : objc ( cs ) FooClassPropertyOwnership " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 " , <nl> - key . offset : 6242 , <nl> + key . offset : 6262 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth3 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth2 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 : : SYNTHESIZED : : c : objc ( cs ) FooClassPropertyOwnership " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 " , <nl> - key . offset : 6277 , <nl> + key . offset : 6297 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth2 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " nonInternalMeth ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth : : SYNTHESIZED : : c : objc ( cs ) FooClassPropertyOwnership " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth " , <nl> - key . offset : 6312 , <nl> + key . offset : 6332 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > nonInternalMeth < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth1 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 : : SYNTHESIZED : : c : objc ( cs ) FooClassPropertyOwnership " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 " , <nl> - key . offset : 6348 , <nl> + key . offset : 6368 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth1 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FOO_NIL " , <nl> key . usr : " c : Foo . h @ 4783 @ macro @ FOO_NIL " , <nl> - key . offset : 6380 , <nl> + key . offset : 6400 , <nl> key . length : 15 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FOO_NIL < / decl . name > : < decl . var . type > < tuple > ( ) < / tuple > < / decl . var . type > < / decl . var . global > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . class , <nl> key . name : " FooUnavailableMembers " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers " , <nl> - key . offset : 6396 , <nl> + key . offset : 6416 , <nl> key . length : 637 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > FooUnavailableMembers < / decl . name > : < ref . class usr = \ " c : objc ( cs ) FooClassBase \ " > FooClassBase < / ref . class > < / decl . class > " , <nl> key . inherits : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( int : ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( cm ) unavailableMembersWithInt : " , <nl> - key . offset : 6446 , <nl> + key . offset : 6466 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > convenience < / syntaxtype . keyword > < syntaxtype . keyword > init < / syntaxtype . keyword > ! ( < decl . var . parameter > < decl . var . parameter . argument_label > int < / decl . var . parameter . argument_label > < decl . var . parameter . name > i < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . constructor > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " int " , <nl> key . name : " i " , <nl> - key . offset : 6471 , <nl> + key . offset : 6491 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . class , <nl> key . name : " withInt ( _ : ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( cm ) unavailableMembersWithInt : " , <nl> - key . offset : 6483 , <nl> + key . offset : 6503 , <nl> key . length : 39 , <nl> key . fully_annotated_decl : " < decl . function . method . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > withInt < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > i < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) - & gt ; < decl . function . returntype > < ref . class usr = \ " c : objc ( cs ) FooUnavailableMembers \ " > Self < / ref . class > ! < / decl . function . returntype > < / decl . function . method . class > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " i " , <nl> - key . offset : 6507 , <nl> + key . offset : 6527 , <nl> key . length : 5 <nl> } <nl> ] , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " unavailable ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) unavailable " , <nl> - key . offset : 6528 , <nl> + key . offset : 6548 , <nl> key . length : 18 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > unavailable < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " swiftUnavailable ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) swiftUnavailable " , <nl> - key . offset : 6552 , <nl> + key . offset : 6572 , <nl> key . length : 23 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > swiftUnavailable < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " deprecated ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) deprecated " , <nl> - key . offset : 6581 , <nl> + key . offset : 6601 , <nl> key . length : 17 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > deprecated < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityIntroduced ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityIntroduced " , <nl> - key . offset : 6604 , <nl> + key . offset : 6624 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityIntroduced < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityDeprecated ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityDeprecated " , <nl> - key . offset : 6639 , <nl> + key . offset : 6659 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityDeprecated < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityObsoleted ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityObsoleted " , <nl> - key . offset : 6674 , <nl> + key . offset : 6694 , <nl> key . length : 28 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityObsoleted < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityUnavailable ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityUnavailable " , <nl> - key . offset : 6708 , <nl> + key . offset : 6728 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityUnavailable < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityIntroducedMsg ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityIntroducedMsg " , <nl> - key . offset : 6744 , <nl> + key . offset : 6764 , <nl> key . length : 32 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityIntroducedMsg < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityDeprecatedMsg ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityDeprecatedMsg " , <nl> - key . offset : 6782 , <nl> + key . offset : 6802 , <nl> key . length : 32 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityDeprecatedMsg < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityObsoletedMsg ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityObsoletedMsg " , <nl> - key . offset : 6820 , <nl> + key . offset : 6840 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityObsoletedMsg < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . name : " availabilityUnavailableMsg ( ) " , <nl> key . usr : " c : objc ( cs ) FooUnavailableMembers ( im ) availabilityUnavailableMsg " , <nl> - key . offset : 6857 , <nl> + key . offset : 6877 , <nl> key . length : 33 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > availabilityUnavailableMsg < / decl . name > ( ) < / decl . function . method . instance > " , <nl> key . attributes : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth3 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 : : SYNTHESIZED : : c : objc ( cs ) FooUnavailableMembers " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth3 " , <nl> - key . offset : 6896 , <nl> + key . offset : 6916 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth3 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth2 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 : : SYNTHESIZED : : c : objc ( cs ) FooUnavailableMembers " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth2 " , <nl> - key . offset : 6931 , <nl> + key . offset : 6951 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth2 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " nonInternalMeth ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth : : SYNTHESIZED : : c : objc ( cs ) FooUnavailableMembers " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) nonInternalMeth " , <nl> - key . offset : 6966 , <nl> + key . offset : 6986 , <nl> key . length : 30 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > nonInternalMeth < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . name : " _internalMeth1 ( ) " , <nl> key . usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 : : SYNTHESIZED : : c : objc ( cs ) FooUnavailableMembers " , <nl> key . original_usr : " c : objc ( cs ) FooClassBase ( im ) _internalMeth1 " , <nl> - key . offset : 7002 , <nl> + key . offset : 7022 , <nl> key . length : 29 , <nl> key . fully_annotated_decl : " < decl . function . method . instance > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > _internalMeth1 < / decl . name > ( ) - & gt ; < decl . function . returntype > Any ! < / decl . function . returntype > < / decl . function . method . instance > " <nl> } <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . class , <nl> key . name : " FooCFType " , <nl> key . usr : " c : Foo . h @ T @ FooCFTypeRef " , <nl> - key . offset : 7034 , <nl> + key . offset : 7054 , <nl> key . length : 19 , <nl> key . fully_annotated_decl : " < decl . class > < syntaxtype . keyword > class < / syntaxtype . keyword > < decl . name > FooCFType < / decl . name > < / decl . class > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " FooCFTypeRelease ( _ : ) " , <nl> key . usr : " c : @ F @ FooCFTypeRelease " , <nl> - key . offset : 7054 , <nl> + key . offset : 7074 , <nl> key . length : 38 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > FooCFTypeRelease < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > : < decl . var . parameter . type > < ref . class usr = \ " c : Foo . h @ T @ FooCFTypeRef \ " > FooCFType < / ref . class > ! < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . free > " , <nl> key . entities : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> - key . offset : 7081 , <nl> + key . offset : 7101 , <nl> key . length : 10 <nl> } <nl> ] , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . name : " fooSubFunc1 ( _ : ) " , <nl> key . usr : " c : @ F @ fooSubFunc1 " , <nl> - key . offset : 7093 , <nl> + key . offset : 7113 , <nl> key . length : 37 , <nl> key . fully_annotated_decl : " < decl . function . free > < syntaxtype . keyword > func < / syntaxtype . keyword > < decl . name > fooSubFunc1 < / decl . name > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > a < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) - & gt ; < decl . function . returntype > < ref . struct usr = \ " s : Vs5Int32 \ " > Int32 < / ref . struct > < / decl . function . returntype > < / decl . function . free > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " a " , <nl> - key . offset : 7115 , <nl> + key . offset : 7135 , <nl> key . length : 5 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . struct , <nl> key . name : " FooSubEnum1 " , <nl> key . usr : " c : @ E @ FooSubEnum1 " , <nl> - key . offset : 7131 , <nl> + key . offset : 7151 , <nl> key . length : 145 , <nl> key . fully_annotated_decl : " < decl . struct > < syntaxtype . keyword > struct < / syntaxtype . keyword > < decl . name > FooSubEnum1 < / decl . name > : < ref . protocol usr = \ " s : Ps16RawRepresentable \ " > RawRepresentable < / ref . protocol > , < ref . protocol usr = \ " s : Ps9Equatable \ " > Equatable < / ref . protocol > < / decl . struct > " , <nl> key . conforms : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( _ : ) " , <nl> key . usr : " s : FVSC11FooSubEnum1cFVs6UInt32S_ " , <nl> - key . offset : 7187 , <nl> + key . offset : 7207 , <nl> key . length : 24 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > init < / syntaxtype . keyword > ( < decl . var . parameter > < decl . var . parameter . argument_label > _ < / decl . var . parameter . argument_label > < decl . var . parameter . name > rawValue < / decl . var . parameter . name > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs6UInt32 \ " > UInt32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . constructor > " , <nl> key . entities : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " _ " , <nl> key . name : " rawValue " , <nl> - key . offset : 7204 , <nl> + key . offset : 7224 , <nl> key . length : 6 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . function . constructor , <nl> key . name : " init ( rawValue : ) " , <nl> key . usr : " s : FVSC11FooSubEnum1cFT8rawValueVs6UInt32_S_ " , <nl> - key . offset : 7217 , <nl> + key . offset : 7237 , <nl> key . length : 31 , <nl> key . fully_annotated_decl : " < decl . function . constructor > < syntaxtype . keyword > init < / syntaxtype . keyword > ( < decl . var . parameter > < decl . var . parameter . argument_label > rawValue < / decl . var . parameter . argument_label > : < decl . var . parameter . type > < ref . struct usr = \ " s : Vs6UInt32 \ " > UInt32 < / ref . struct > < / decl . var . parameter . type > < / decl . var . parameter > ) < / decl . function . constructor > " , <nl> key . conforms : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . local , <nl> key . keyword : " rawValue " , <nl> key . name : " rawValue " , <nl> - key . offset : 7241 , <nl> + key . offset : 7261 , <nl> key . length : 6 <nl> } <nl> ] <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . name : " rawValue " , <nl> key . usr : " s : vVSC11FooSubEnum18rawValueVs6UInt32 " , <nl> - key . offset : 7254 , <nl> + key . offset : 7274 , <nl> key . length : 20 , <nl> key . fully_annotated_decl : " < decl . var . instance > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > rawValue < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Vs6UInt32 \ " > UInt32 < / ref . struct > < / decl . var . type > < / decl . var . instance > " , <nl> key . conforms : [ <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FooSubEnum1X " , <nl> key . usr : " c : @ E @ FooSubEnum1 @ FooSubEnum1X " , <nl> - key . offset : 7277 , <nl> + key . offset : 7297 , <nl> key . length : 37 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FooSubEnum1X < / decl . name > : < decl . var . type > < ref . struct usr = \ " c : @ E @ FooSubEnum1 \ " > FooSubEnum1 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FooSubEnum1Y " , <nl> key . usr : " c : @ E @ FooSubEnum1 @ FooSubEnum1Y " , <nl> - key . offset : 7315 , <nl> + key . offset : 7335 , <nl> key . length : 37 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FooSubEnum1Y < / decl . name > : < decl . var . type > < ref . struct usr = \ " c : @ E @ FooSubEnum1 \ " > FooSubEnum1 < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } , <nl> var FooSubUnnamedEnumeratorA1 : Int { get } <nl> key . kind : source . lang . swift . decl . var . global , <nl> key . name : " FooSubUnnamedEnumeratorA1 " , <nl> key . usr : " c : @ Ea @ FooSubUnnamedEnumeratorA1 @ FooSubUnnamedEnumeratorA1 " , <nl> - key . offset : 7353 , <nl> + key . offset : 7373 , <nl> key . length : 42 , <nl> key . fully_annotated_decl : " < decl . var . global > < syntaxtype . keyword > var < / syntaxtype . keyword > < decl . name > FooSubUnnamedEnumeratorA1 < / decl . name > : < decl . var . type > < ref . struct usr = \ " s : Si \ " > Int < / ref . struct > < / decl . var . type > { < syntaxtype . keyword > get < / syntaxtype . keyword > } < / decl . var . global > " <nl> } <nl> new file mode 100644 <nl> index 000000000000 . . 4c46fd88d998 <nl> mmm / dev / null <nl> ppp b / test / SourceKit / DocSupport / doc_error_domain . swift <nl> <nl> + / / REQUIRES : OS = macosx <nl> + / / RUN : % sourcekitd - test - req = doc - info - module MyError - - - I % S / Inputs \ <nl> + / / RUN : % mcp_opt - sdk % sdk | % sed_clean > % t . response <nl> + / / RUN : FileCheck - input - file = % t . response % s <nl> + <nl> + / / CHECK : struct MyError { <nl> + / / CHECK : enum Code : Int32 { <nl> + / / CHECK : case errFirst <nl> + / / CHECK : case errSecond <nl> + / / CHECK : } <nl> + / / CHECK : static var errFirst : MyError . Code { get } <nl> + / / CHECK : static var errSecond : MyError . Code { get } <nl> + <nl> + / / CHECK : key . kind : source . lang . swift . decl . struct , <nl> + / / CHECK - NEXT : key . name : " MyError " , <nl> + / / CHECK - NEXT : key . usr : " s : VSC7MyError " , <nl> + / / CHECK - NEXT : This is my cool error code . <nl> + <nl> + / / CHECK : key . kind : source . lang . swift . decl . enum , <nl> + / / CHECK - NEXT : key . name : " Code " , <nl> + / / CHECK - NEXT : key . usr : " c : @ E @ MyErrorCode " , <nl> + / / CHECK - NEXT : This is my cool error code . <nl> + <nl> + / / CHECK : key . kind : source . lang . swift . decl . enumelement , <nl> + / / CHECK - NEXT : key . name : " errFirst " , <nl> + / / CHECK - NEXT : key . usr : " c : @ E @ MyErrorCode @ MyErrFirst " , <nl> + / / CHECK - NEXT : This is first error . <nl> + <nl> + / / CHECK : key . kind : source . lang . swift . decl . var . static , <nl> + / / CHECK - NEXT : key . name : " errFirst " , <nl> + / / CHECK - NEXT : key . usr : " s : ZvVSC7MyError8errFirstOS_4Code " , <nl> + / / CHECK - NEXT : This is first error . <nl> mmm a / test / SourceKit / InterfaceGen / gen_clang_module . swift . response <nl> ppp b / test / SourceKit / InterfaceGen / gen_clang_module . swift . response <nl> public func fooFunc3 ( _ a : Int32 , _ b : Float , _ c : Double , _ d : UnsafeMutablePoin <nl> Very good <nl> fooFuncWithBlock function . <nl> * / <nl> - public func fooFuncWithBlock ( _ blk : ( ( Float ) - > Int32 ) ! ) <nl> + public func fooFuncWithBlock ( _ blk : ( @ escaping ( Float ) - > Int32 ) ! ) <nl> <nl> - public func fooFuncWithFunctionPointer ( _ fptr : ( @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> + public func fooFuncWithFunctionPointer ( _ fptr : ( @ escaping @ convention ( c ) ( Float ) - > Int32 ) ! ) <nl> <nl> public func fooFuncNoreturn1 ( ) - > Never <nl> public func fooFuncNoreturn2 ( ) - > Never <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . length : 3 <nl> } , <nl> { <nl> - key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> + key . kind : source . lang . swift . syntaxtype . attribute . id , <nl> + key . offset : 2301 , <nl> + key . length : 9 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> key . offset : 2302 , <nl> - key . length : 5 <nl> + key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . length : 5 <nl> } , <nl> { <nl> - key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> + key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> key . offset : 2322 , <nl> + key . length : 5 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> + key . offset : 2332 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2329 , <nl> + key . offset : 2339 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2334 , <nl> + key . offset : 2344 , <nl> key . length : 26 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2361 , <nl> + key . offset : 2371 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2363 , <nl> + key . offset : 2373 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . id , <nl> - key . offset : 2370 , <nl> + key . offset : 2380 , <nl> + key . length : 9 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> + key . offset : 2381 , <nl> + key . length : 8 <nl> + } , <nl> + { <nl> + key . kind : source . lang . swift . syntaxtype . attribute . id , <nl> + key . offset : 2390 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2371 , <nl> + key . offset : 2391 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2382 , <nl> + key . offset : 2402 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 2386 , <nl> + key . offset : 2406 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 2396 , <nl> + key . offset : 2416 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2406 , <nl> + key . offset : 2426 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2413 , <nl> + key . offset : 2433 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2418 , <nl> + key . offset : 2438 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 2440 , <nl> + key . offset : 2460 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2446 , <nl> + key . offset : 2466 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2453 , <nl> + key . offset : 2473 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2458 , <nl> + key . offset : 2478 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 2480 , <nl> + key . offset : 2500 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2487 , <nl> + key . offset : 2507 , <nl> key . length : 62 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2550 , <nl> + key . offset : 2570 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2557 , <nl> + key . offset : 2577 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2562 , <nl> + key . offset : 2582 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . comment , <nl> - key . offset : 2585 , <nl> + key . offset : 2605 , <nl> key . length : 42 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2628 , <nl> + key . offset : 2648 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2635 , <nl> + key . offset : 2655 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2640 , <nl> + key . offset : 2660 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2663 , <nl> + key . offset : 2683 , <nl> key . length : 43 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2707 , <nl> + key . offset : 2727 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2723 , <nl> + key . offset : 2743 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2730 , <nl> + key . offset : 2750 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2735 , <nl> + key . offset : 2755 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2758 , <nl> + key . offset : 2778 , <nl> key . length : 43 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2802 , <nl> + key . offset : 2822 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2811 , <nl> + key . offset : 2831 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2818 , <nl> + key . offset : 2838 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2823 , <nl> + key . offset : 2843 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2846 , <nl> + key . offset : 2866 , <nl> key . length : 37 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2883 , <nl> + key . offset : 2903 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2892 , <nl> + key . offset : 2912 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2896 , <nl> + key . offset : 2916 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2905 , <nl> + key . offset : 2925 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2912 , <nl> + key . offset : 2932 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 2917 , <nl> + key . offset : 2937 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 2940 , <nl> + key . offset : 2960 , <nl> key . length : 50 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 2990 , <nl> + key . offset : 3010 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 2997 , <nl> + key . offset : 3017 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3002 , <nl> + key . offset : 3022 , <nl> key . length : 32 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3035 , <nl> + key . offset : 3055 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3037 , <nl> + key . offset : 3057 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3040 , <nl> + key . offset : 3060 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3050 , <nl> + key . offset : 3070 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3057 , <nl> + key . offset : 3077 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3090 , <nl> + key . offset : 3110 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3097 , <nl> + key . offset : 3117 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3106 , <nl> + key . offset : 3126 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3134 , <nl> + key . offset : 3154 , <nl> key . length : 30 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3168 , <nl> + key . offset : 3188 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3181 , <nl> + key . offset : 3201 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3188 , <nl> + key . offset : 3208 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3193 , <nl> + key . offset : 3213 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3218 , <nl> + key . offset : 3238 , <nl> key . length : 51 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3273 , <nl> + key . offset : 3293 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3286 , <nl> + key . offset : 3306 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3293 , <nl> + key . offset : 3313 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3298 , <nl> + key . offset : 3318 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 3344 , <nl> + key . offset : 3364 , <nl> key . length : 77 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3426 , <nl> + key . offset : 3446 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3433 , <nl> + key . offset : 3453 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3438 , <nl> + key . offset : 3458 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3484 , <nl> + key . offset : 3504 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3491 , <nl> + key . offset : 3511 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3498 , <nl> + key . offset : 3518 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3503 , <nl> + key . offset : 3523 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3533 , <nl> + key . offset : 3553 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3540 , <nl> + key . offset : 3560 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3544 , <nl> + key . offset : 3564 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3558 , <nl> + key . offset : 3578 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3566 , <nl> + key . offset : 3586 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3570 , <nl> + key . offset : 3590 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3581 , <nl> + key . offset : 3601 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3588 , <nl> + key . offset : 3608 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3592 , <nl> + key . offset : 3612 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3606 , <nl> + key . offset : 3626 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3614 , <nl> + key . offset : 3634 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3618 , <nl> + key . offset : 3638 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3629 , <nl> + key . offset : 3649 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3636 , <nl> + key . offset : 3656 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3640 , <nl> + key . offset : 3660 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3654 , <nl> + key . offset : 3674 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3662 , <nl> + key . offset : 3682 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3671 , <nl> + key . offset : 3691 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3678 , <nl> + key . offset : 3698 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3687 , <nl> + key . offset : 3707 , <nl> key . length : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3708 , <nl> + key . offset : 3728 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3729 , <nl> + key . offset : 3749 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3736 , <nl> + key . offset : 3756 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3742 , <nl> + key . offset : 3762 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3762 , <nl> + key . offset : 3782 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3769 , <nl> + key . offset : 3789 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3774 , <nl> + key . offset : 3794 , <nl> key . length : 20 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3802 , <nl> + key . offset : 3822 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3809 , <nl> + key . offset : 3829 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3814 , <nl> + key . offset : 3834 , <nl> key . length : 20 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3835 , <nl> + key . offset : 3855 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3837 , <nl> + key . offset : 3857 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3847 , <nl> + key . offset : 3867 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3856 , <nl> + key . offset : 3876 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3875 , <nl> + key . offset : 3895 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3882 , <nl> + key . offset : 3902 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3895 , <nl> + key . offset : 3915 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3902 , <nl> + key . offset : 3922 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3914 , <nl> + key . offset : 3934 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3920 , <nl> + key . offset : 3940 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3926 , <nl> + key . offset : 3946 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 3929 , <nl> + key . offset : 3949 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3941 , <nl> + key . offset : 3961 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 3948 , <nl> + key . offset : 3968 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 3953 , <nl> + key . offset : 3973 , <nl> key . length : 29 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 3995 , <nl> + key . offset : 4015 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4002 , <nl> + key . offset : 4022 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4008 , <nl> + key . offset : 4028 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4013 , <nl> + key . offset : 4033 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . doccomment , <nl> - key . offset : 4036 , <nl> + key . offset : 4056 , <nl> key . length : 33 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4069 , <nl> + key . offset : 4089 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4076 , <nl> + key . offset : 4096 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4082 , <nl> + key . offset : 4102 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4100 , <nl> + key . offset : 4120 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4114 , <nl> + key . offset : 4134 , <nl> key . length : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4145 , <nl> + key . offset : 4165 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4152 , <nl> + key . offset : 4172 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4156 , <nl> + key . offset : 4176 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4170 , <nl> + key . offset : 4190 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4181 , <nl> + key . offset : 4201 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4188 , <nl> + key . offset : 4208 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4192 , <nl> + key . offset : 4212 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4206 , <nl> + key . offset : 4226 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4217 , <nl> + key . offset : 4237 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4224 , <nl> + key . offset : 4244 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4228 , <nl> + key . offset : 4248 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4242 , <nl> + key . offset : 4262 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4250 , <nl> + key . offset : 4270 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . comment , <nl> - key . offset : 4266 , <nl> + key . offset : 4286 , <nl> key . length : 64 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4335 , <nl> + key . offset : 4355 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4342 , <nl> + key . offset : 4362 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4347 , <nl> + key . offset : 4367 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4371 , <nl> + key . offset : 4391 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4378 , <nl> + key . offset : 4398 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4383 , <nl> + key . offset : 4403 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4400 , <nl> + key . offset : 4420 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4402 , <nl> + key . offset : 4422 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4405 , <nl> + key . offset : 4425 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4417 , <nl> + key . offset : 4437 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4424 , <nl> + key . offset : 4444 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4429 , <nl> + key . offset : 4449 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4446 , <nl> + key . offset : 4466 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4448 , <nl> + key . offset : 4468 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4451 , <nl> + key . offset : 4471 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4458 , <nl> + key . offset : 4478 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4464 , <nl> + key . offset : 4484 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4467 , <nl> + key . offset : 4487 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4484 , <nl> + key . offset : 4504 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4491 , <nl> + key . offset : 4511 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4496 , <nl> + key . offset : 4516 , <nl> key . length : 29 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4538 , <nl> + key . offset : 4558 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4545 , <nl> + key . offset : 4565 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4551 , <nl> + key . offset : 4571 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4556 , <nl> + key . offset : 4576 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . comment , <nl> - key . offset : 4575 , <nl> + key . offset : 4595 , <nl> key . length : 31 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4607 , <nl> + key . offset : 4627 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4614 , <nl> + key . offset : 4634 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4618 , <nl> + key . offset : 4638 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4631 , <nl> + key . offset : 4651 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4639 , <nl> + key . offset : 4659 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4645 , <nl> + key . offset : 4665 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4652 , <nl> + key . offset : 4672 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4656 , <nl> + key . offset : 4676 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4669 , <nl> + key . offset : 4689 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4677 , <nl> + key . offset : 4697 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4683 , <nl> + key . offset : 4703 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4690 , <nl> + key . offset : 4710 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4694 , <nl> + key . offset : 4714 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4707 , <nl> + key . offset : 4727 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4715 , <nl> + key . offset : 4735 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . comment , <nl> - key . offset : 4721 , <nl> + key . offset : 4741 , <nl> key . length : 39 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4760 , <nl> + key . offset : 4780 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4767 , <nl> + key . offset : 4787 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4771 , <nl> + key . offset : 4791 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4784 , <nl> + key . offset : 4804 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4793 , <nl> + key . offset : 4813 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4799 , <nl> + key . offset : 4819 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4806 , <nl> + key . offset : 4826 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4810 , <nl> + key . offset : 4830 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4823 , <nl> + key . offset : 4843 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4832 , <nl> + key . offset : 4852 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4839 , <nl> + key . offset : 4859 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4846 , <nl> + key . offset : 4866 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4850 , <nl> + key . offset : 4870 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4869 , <nl> + key . offset : 4889 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4877 , <nl> + key . offset : 4897 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4884 , <nl> + key . offset : 4904 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4891 , <nl> + key . offset : 4911 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4895 , <nl> + key . offset : 4915 , <nl> key . length : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 4914 , <nl> + key . offset : 4934 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4922 , <nl> + key . offset : 4942 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4929 , <nl> + key . offset : 4949 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4936 , <nl> + key . offset : 4956 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4941 , <nl> + key . offset : 4961 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4961 , <nl> + key . offset : 4981 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 4968 , <nl> + key . offset : 4988 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 4973 , <nl> + key . offset : 4993 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 4998 , <nl> + key . offset : 5018 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5005 , <nl> + key . offset : 5025 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5012 , <nl> + key . offset : 5032 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5035 , <nl> + key . offset : 5055 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5042 , <nl> + key . offset : 5062 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5046 , <nl> + key . offset : 5066 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5049 , <nl> + key . offset : 5069 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5060 , <nl> + key . offset : 5080 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5067 , <nl> + key . offset : 5087 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5079 , <nl> + key . offset : 5099 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5086 , <nl> + key . offset : 5106 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5091 , <nl> + key . offset : 5111 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5094 , <nl> + key . offset : 5114 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5104 , <nl> + key . offset : 5124 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5114 , <nl> + key . offset : 5134 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5134 , <nl> + key . offset : 5154 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5141 , <nl> + key . offset : 5161 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5146 , <nl> + key . offset : 5166 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5166 , <nl> + key . offset : 5186 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . comment , <nl> - key . offset : 5174 , <nl> + key . offset : 5194 , <nl> key . length : 44 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5219 , <nl> + key . offset : 5239 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5229 , <nl> + key . offset : 5249 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5249 , <nl> + key . offset : 5269 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5256 , <nl> + key . offset : 5276 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5261 , <nl> + key . offset : 5281 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5281 , <nl> + key . offset : 5301 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5291 , <nl> + key . offset : 5311 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5298 , <nl> + key . offset : 5318 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5303 , <nl> + key . offset : 5323 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5324 , <nl> + key . offset : 5344 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5332 , <nl> + key . offset : 5352 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5342 , <nl> + key . offset : 5362 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5362 , <nl> + key . offset : 5382 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5369 , <nl> + key . offset : 5389 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5374 , <nl> + key . offset : 5394 , <nl> key . length : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5394 , <nl> + key . offset : 5414 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5402 , <nl> + key . offset : 5422 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5409 , <nl> + key . offset : 5429 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5418 , <nl> + key . offset : 5438 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5437 , <nl> + key . offset : 5457 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5444 , <nl> + key . offset : 5464 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5450 , <nl> + key . offset : 5470 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5474 , <nl> + key . offset : 5494 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5493 , <nl> + key . offset : 5513 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5500 , <nl> + key . offset : 5520 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5506 , <nl> + key . offset : 5526 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5534 , <nl> + key . offset : 5554 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5554 , <nl> + key . offset : 5574 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5561 , <nl> + key . offset : 5581 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5565 , <nl> + key . offset : 5585 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5577 , <nl> + key . offset : 5597 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5587 , <nl> + key . offset : 5607 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5594 , <nl> + key . offset : 5614 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5598 , <nl> + key . offset : 5618 , <nl> key . length : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5616 , <nl> + key . offset : 5636 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5626 , <nl> + key . offset : 5646 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5633 , <nl> + key . offset : 5653 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5637 , <nl> + key . offset : 5657 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5649 , <nl> + key . offset : 5669 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5659 , <nl> + key . offset : 5679 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5666 , <nl> + key . offset : 5686 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5670 , <nl> + key . offset : 5690 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5681 , <nl> + key . offset : 5701 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5691 , <nl> + key . offset : 5711 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5698 , <nl> + key . offset : 5718 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5702 , <nl> + key . offset : 5722 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5712 , <nl> + key . offset : 5732 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5722 , <nl> + key . offset : 5742 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5729 , <nl> + key . offset : 5749 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5733 , <nl> + key . offset : 5753 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5742 , <nl> + key . offset : 5762 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5752 , <nl> + key . offset : 5772 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5759 , <nl> + key . offset : 5779 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5763 , <nl> + key . offset : 5783 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5771 , <nl> + key . offset : 5791 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5780 , <nl> + key . offset : 5800 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5787 , <nl> + key . offset : 5807 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5793 , <nl> + key . offset : 5813 , <nl> key . length : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5817 , <nl> + key . offset : 5837 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5837 , <nl> + key . offset : 5857 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5844 , <nl> + key . offset : 5864 , <nl> key . length : 11 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5856 , <nl> + key . offset : 5876 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5862 , <nl> + key . offset : 5882 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5866 , <nl> + key . offset : 5886 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 5869 , <nl> + key . offset : 5889 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5886 , <nl> + key . offset : 5906 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5900 , <nl> + key . offset : 5920 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5912 , <nl> + key . offset : 5932 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . string , <nl> - key . offset : 5921 , <nl> + key . offset : 5941 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5930 , <nl> + key . offset : 5950 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5937 , <nl> + key . offset : 5957 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 5942 , <nl> + key . offset : 5962 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5965 , <nl> + key . offset : 5985 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 5976 , <nl> + key . offset : 5996 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . number , <nl> - key . offset : 5980 , <nl> + key . offset : 6000 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 5993 , <nl> + key . offset : 6013 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6000 , <nl> + key . offset : 6020 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6005 , <nl> + key . offset : 6025 , <nl> key . length : 22 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6040 , <nl> + key . offset : 6060 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6051 , <nl> + key . offset : 6071 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6056 , <nl> + key . offset : 6076 , <nl> key . length : 10 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . number , <nl> - key . offset : 6068 , <nl> + key . offset : 6088 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6074 , <nl> + key . offset : 6094 , <nl> key . length : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . string , <nl> - key . offset : 6083 , <nl> + key . offset : 6103 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6092 , <nl> + key . offset : 6112 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6099 , <nl> + key . offset : 6119 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6104 , <nl> + key . offset : 6124 , <nl> key . length : 25 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6135 , <nl> + key . offset : 6155 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6142 , <nl> + key . offset : 6162 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6148 , <nl> + key . offset : 6168 , <nl> key . length : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6162 , <nl> + key . offset : 6182 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6169 , <nl> + key . offset : 6189 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6175 , <nl> + key . offset : 6195 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6202 , <nl> + key . offset : 6222 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6209 , <nl> + key . offset : 6229 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6214 , <nl> + key . offset : 6234 , <nl> key . length : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6221 , <nl> + key . offset : 6241 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6228 , <nl> + key . offset : 6248 , <nl> key . length : 5 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6234 , <nl> + key . offset : 6254 , <nl> key . length : 22 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 6259 , <nl> + key . offset : 6279 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . typeidentifier , <nl> - key . offset : 6263 , <nl> + key . offset : 6283 , <nl> key . length : 19 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6290 , <nl> + key . offset : 6310 , <nl> key . length : 8 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . attribute . builtin , <nl> - key . offset : 6299 , <nl> + key . offset : 6319 , <nl> key . length : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . keyword , <nl> - key . offset : 6306 , <nl> + key . offset : 6326 , <nl> key . length : 4 <nl> } , <nl> { <nl> key . kind : source . lang . swift . syntaxtype . identifier , <nl> - key . offset : 6311 , <nl> + key . offset : 6331 , <nl> key . length : 1 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 2302 , <nl> + key . offset : 2312 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 2312 , <nl> + key . offset : 2322 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 2386 , <nl> + key . offset : 2406 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 2396 , <nl> + key . offset : 2416 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . enum , <nl> - key . offset : 2440 , <nl> + key . offset : 2460 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . enum , <nl> - key . offset : 2480 , <nl> + key . offset : 2500 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3040 , <nl> + key . offset : 3060 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3050 , <nl> + key . offset : 3070 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3558 , <nl> + key . offset : 3578 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3606 , <nl> + key . offset : 3626 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3654 , <nl> + key . offset : 3674 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> - key . offset : 3708 , <nl> + key . offset : 3728 , <nl> key . length : 15 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 3856 , <nl> + key . offset : 3876 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 3929 , <nl> + key . offset : 3949 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 4100 , <nl> + key . offset : 4120 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> - key . offset : 4114 , <nl> + key . offset : 4134 , <nl> key . length : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4170 , <nl> + key . offset : 4190 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4206 , <nl> + key . offset : 4226 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4242 , <nl> + key . offset : 4262 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4405 , <nl> + key . offset : 4425 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4451 , <nl> + key . offset : 4471 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4467 , <nl> + key . offset : 4487 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4631 , <nl> + key . offset : 4651 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4669 , <nl> + key . offset : 4689 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4707 , <nl> + key . offset : 4727 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4784 , <nl> + key . offset : 4804 , <nl> key . length : 6 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4823 , <nl> + key . offset : 4843 , <nl> key . length : 6 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4869 , <nl> + key . offset : 4889 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 4914 , <nl> + key . offset : 4934 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 5049 , <nl> + key . offset : 5069 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 5094 , <nl> + key . offset : 5114 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 5114 , <nl> + key . offset : 5134 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 5229 , <nl> + key . offset : 5249 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 5342 , <nl> + key . offset : 5362 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . protocol , <nl> - key . offset : 5474 , <nl> + key . offset : 5494 , <nl> key . length : 13 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 5534 , <nl> + key . offset : 5554 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 5771 , <nl> + key . offset : 5791 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 5817 , <nl> + key . offset : 5837 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . struct , <nl> - key . offset : 5869 , <nl> + key . offset : 5889 , <nl> key . length : 5 , <nl> key . is_system : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . module , <nl> - key . offset : 6259 , <nl> + key . offset : 6279 , <nl> key . length : 3 <nl> } , <nl> { <nl> key . kind : source . lang . swift . ref . class , <nl> - key . offset : 6263 , <nl> + key . offset : 6283 , <nl> key . length : 19 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithBlock ( _ : ) " , <nl> key . offset : 2271 , <nl> - key . length : 49 , <nl> + key . length : 59 , <nl> key . nameoffset : 2276 , <nl> - key . namelength : 44 , <nl> + key . namelength : 54 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " blk " , <nl> key . offset : 2293 , <nl> - key . length : 26 , <nl> - key . typename : " ( ( Float ) - > Int32 ) ! " , <nl> + key . length : 36 , <nl> + key . typename : " ( @ escaping ( Float ) - > Int32 ) ! " , <nl> key . nameoffset : 0 , <nl> key . namelength : 0 <nl> } <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithFunctionPointer ( _ : ) " , <nl> - key . offset : 2329 , <nl> - key . length : 75 , <nl> - key . nameoffset : 2334 , <nl> - key . namelength : 70 , <nl> + key . offset : 2339 , <nl> + key . length : 85 , <nl> + key . nameoffset : 2344 , <nl> + key . namelength : 80 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " fptr " , <nl> - key . offset : 2361 , <nl> - key . length : 42 , <nl> - key . typename : " ( @ convention ( c ) ( Float ) - > Int32 ) ! " , <nl> + key . offset : 2371 , <nl> + key . length : 52 , <nl> + key . typename : " ( @ escaping @ convention ( c ) ( Float ) - > Int32 ) ! " , <nl> key . nameoffset : 0 , <nl> key . namelength : 0 <nl> } <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncNoreturn1 ( ) " , <nl> - key . offset : 2413 , <nl> + key . offset : 2433 , <nl> key . length : 32 , <nl> - key . nameoffset : 2418 , <nl> + key . nameoffset : 2438 , <nl> key . namelength : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncNoreturn2 ( ) " , <nl> - key . offset : 2453 , <nl> + key . offset : 2473 , <nl> key . length : 32 , <nl> - key . nameoffset : 2458 , <nl> + key . nameoffset : 2478 , <nl> key . namelength : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithComment1 ( ) " , <nl> - key . offset : 2557 , <nl> + key . offset : 2577 , <nl> key . length : 26 , <nl> - key . nameoffset : 2562 , <nl> + key . nameoffset : 2582 , <nl> key . namelength : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithComment2 ( ) " , <nl> - key . offset : 2635 , <nl> + key . offset : 2655 , <nl> key . length : 26 , <nl> - key . nameoffset : 2640 , <nl> + key . nameoffset : 2660 , <nl> key . namelength : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithComment3 ( ) " , <nl> - key . offset : 2730 , <nl> + key . offset : 2750 , <nl> key . length : 26 , <nl> - key . nameoffset : 2735 , <nl> + key . nameoffset : 2755 , <nl> key . namelength : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithComment4 ( ) " , <nl> - key . offset : 2818 , <nl> + key . offset : 2838 , <nl> key . length : 26 , <nl> - key . nameoffset : 2823 , <nl> + key . nameoffset : 2843 , <nl> key . namelength : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooFuncWithComment5 ( ) " , <nl> - key . offset : 2912 , <nl> + key . offset : 2932 , <nl> key . length : 26 , <nl> - key . nameoffset : 2917 , <nl> + key . nameoffset : 2937 , <nl> key . namelength : 21 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " redeclaredInMultipleModulesFunc1 ( _ : ) " , <nl> - key . offset : 2997 , <nl> + key . offset : 3017 , <nl> key . length : 58 , <nl> - key . nameoffset : 3002 , <nl> + key . nameoffset : 3022 , <nl> key . namelength : 44 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " a " , <nl> - key . offset : 3035 , <nl> + key . offset : 3055 , <nl> key . length : 10 , <nl> key . typename : " Int32 " , <nl> key . nameoffset : 0 , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . protocol , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooProtocolBase " , <nl> - key . offset : 3097 , <nl> + key . offset : 3117 , <nl> key . length : 572 , <nl> key . runtime_name : " _TtP4main15FooProtocolBase_ " , <nl> - key . nameoffset : 3106 , <nl> + key . nameoffset : 3126 , <nl> key . namelength : 15 , <nl> - key . bodyoffset : 3123 , <nl> + key . bodyoffset : 3143 , <nl> key . bodylength : 545 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProtoFunc ( ) " , <nl> - key . offset : 3188 , <nl> + key . offset : 3208 , <nl> key . length : 19 , <nl> - key . nameoffset : 3193 , <nl> + key . nameoffset : 3213 , <nl> key . namelength : 14 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProtoFuncWithExtraIndentation1 ( ) " , <nl> - key . offset : 3293 , <nl> + key . offset : 3313 , <nl> key . length : 40 , <nl> - key . nameoffset : 3298 , <nl> + key . nameoffset : 3318 , <nl> key . namelength : 35 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProtoFuncWithExtraIndentation2 ( ) " , <nl> - key . offset : 3433 , <nl> + key . offset : 3453 , <nl> key . length : 40 , <nl> - key . nameoffset : 3438 , <nl> + key . nameoffset : 3458 , <nl> key . namelength : 35 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . static , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProtoClassFunc ( ) " , <nl> - key . offset : 3491 , <nl> + key . offset : 3511 , <nl> key . length : 31 , <nl> - key . nameoffset : 3503 , <nl> + key . nameoffset : 3523 , <nl> key . namelength : 19 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty1 " , <nl> - key . offset : 3540 , <nl> + key . offset : 3560 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 3544 , <nl> + key . nameoffset : 3564 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 3565 , <nl> + key . bodyoffset : 3585 , <nl> key . bodylength : 9 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty2 " , <nl> - key . offset : 3588 , <nl> + key . offset : 3608 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 3592 , <nl> + key . nameoffset : 3612 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 3613 , <nl> + key . bodyoffset : 3633 , <nl> key . bodylength : 9 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . var . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty3 " , <nl> - key . offset : 3636 , <nl> + key . offset : 3656 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 3640 , <nl> + key . nameoffset : 3660 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 3661 , <nl> + key . bodyoffset : 3681 , <nl> key . bodylength : 5 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . protocol , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooProtocolDerived " , <nl> - key . offset : 3678 , <nl> + key . offset : 3698 , <nl> key . length : 49 , <nl> key . runtime_name : " _TtP4main18FooProtocolDerived_ " , <nl> - key . nameoffset : 3687 , <nl> + key . nameoffset : 3707 , <nl> key . namelength : 18 , <nl> - key . bodyoffset : 3725 , <nl> + key . bodyoffset : 3745 , <nl> key . bodylength : 1 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 3708 , <nl> + key . offset : 3728 , <nl> key . length : 15 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooClassBase " , <nl> - key . offset : 3736 , <nl> + key . offset : 3756 , <nl> key . length : 298 , <nl> key . runtime_name : " _TtC4main12FooClassBase " , <nl> - key . nameoffset : 3742 , <nl> + key . nameoffset : 3762 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 3756 , <nl> + key . bodyoffset : 3776 , <nl> key . bodylength : 277 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooBaseInstanceFunc0 ( ) " , <nl> - key . offset : 3769 , <nl> + key . offset : 3789 , <nl> key . length : 27 , <nl> - key . nameoffset : 3774 , <nl> + key . nameoffset : 3794 , <nl> key . namelength : 22 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooBaseInstanceFunc1 ( _ : ) " , <nl> - key . offset : 3809 , <nl> + key . offset : 3829 , <nl> key . length : 60 , <nl> - key . nameoffset : 3814 , <nl> + key . nameoffset : 3834 , <nl> key . namelength : 38 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " anObject " , <nl> - key . offset : 3835 , <nl> + key . offset : 3855 , <nl> key . length : 16 , <nl> key . typename : " Any ! " , <nl> key . nameoffset : 0 , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " init ( ) " , <nl> - key . offset : 3882 , <nl> + key . offset : 3902 , <nl> key . length : 7 , <nl> - key . nameoffset : 3882 , <nl> + key . nameoffset : 3902 , <nl> key . namelength : 7 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " init ( float : ) " , <nl> - key . offset : 3914 , <nl> + key . offset : 3934 , <nl> key . length : 21 , <nl> - key . nameoffset : 3914 , <nl> + key . nameoffset : 3934 , <nl> key . namelength : 21 , <nl> key . attributes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " f " , <nl> - key . offset : 3920 , <nl> + key . offset : 3940 , <nl> key . length : 14 , <nl> key . typename : " Float " , <nl> - key . nameoffset : 3920 , <nl> + key . nameoffset : 3940 , <nl> key . namelength : 5 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooBaseInstanceFuncOverridden ( ) " , <nl> - key . offset : 3948 , <nl> + key . offset : 3968 , <nl> key . length : 36 , <nl> - key . nameoffset : 3953 , <nl> + key . nameoffset : 3973 , <nl> key . namelength : 31 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooBaseClassFunc0 ( ) " , <nl> - key . offset : 4002 , <nl> + key . offset : 4022 , <nl> key . length : 30 , <nl> - key . nameoffset : 4013 , <nl> + key . nameoffset : 4033 , <nl> key . namelength : 19 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooClassDerived " , <nl> - key . offset : 4076 , <nl> + key . offset : 4096 , <nl> key . length : 497 , <nl> key . runtime_name : " _TtC4main15FooClassDerived " , <nl> - key . nameoffset : 4082 , <nl> + key . nameoffset : 4102 , <nl> key . namelength : 15 , <nl> - key . bodyoffset : 4134 , <nl> + key . bodyoffset : 4154 , <nl> key . bodylength : 438 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 4100 , <nl> + key . offset : 4120 , <nl> key . length : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 4114 , <nl> + key . offset : 4134 , <nl> key . length : 18 <nl> } <nl> ] , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty1 " , <nl> - key . offset : 4152 , <nl> + key . offset : 4172 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4156 , <nl> + key . nameoffset : 4176 , <nl> key . namelength : 12 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty2 " , <nl> - key . offset : 4188 , <nl> + key . offset : 4208 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4192 , <nl> + key . nameoffset : 4212 , <nl> key . namelength : 12 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooProperty3 " , <nl> - key . offset : 4224 , <nl> + key . offset : 4244 , <nl> key . length : 23 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4228 , <nl> + key . nameoffset : 4248 , <nl> key . namelength : 12 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooInstanceFunc0 ( ) " , <nl> - key . offset : 4342 , <nl> + key . offset : 4362 , <nl> key . length : 23 , <nl> - key . nameoffset : 4347 , <nl> + key . nameoffset : 4367 , <nl> key . namelength : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooInstanceFunc1 ( _ : ) " , <nl> - key . offset : 4378 , <nl> + key . offset : 4398 , <nl> key . length : 33 , <nl> - key . nameoffset : 4383 , <nl> + key . nameoffset : 4403 , <nl> key . namelength : 28 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " a " , <nl> - key . offset : 4400 , <nl> + key . offset : 4420 , <nl> key . length : 10 , <nl> key . typename : " Int32 " , <nl> key . nameoffset : 0 , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooInstanceFunc2 ( _ : withB : ) " , <nl> - key . offset : 4424 , <nl> + key . offset : 4444 , <nl> key . length : 49 , <nl> - key . nameoffset : 4429 , <nl> + key . nameoffset : 4449 , <nl> key . namelength : 44 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " a " , <nl> - key . offset : 4446 , <nl> + key . offset : 4466 , <nl> key . length : 10 , <nl> key . typename : " Int32 " , <nl> key . nameoffset : 0 , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " b " , <nl> - key . offset : 4458 , <nl> + key . offset : 4478 , <nl> key . length : 14 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4458 , <nl> + key . nameoffset : 4478 , <nl> key . namelength : 5 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooBaseInstanceFuncOverridden ( ) " , <nl> - key . offset : 4491 , <nl> + key . offset : 4511 , <nl> key . length : 36 , <nl> - key . nameoffset : 4496 , <nl> + key . nameoffset : 4516 , <nl> key . namelength : 31 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " fooClassFunc0 ( ) " , <nl> - key . offset : 4545 , <nl> + key . offset : 4565 , <nl> key . length : 26 , <nl> - key . nameoffset : 4556 , <nl> + key . nameoffset : 4576 , <nl> key . namelength : 15 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_1 " , <nl> - key . offset : 4614 , <nl> + key . offset : 4634 , <nl> key . length : 22 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4618 , <nl> + key . nameoffset : 4638 , <nl> key . namelength : 11 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_2 " , <nl> - key . offset : 4652 , <nl> + key . offset : 4672 , <nl> key . length : 22 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4656 , <nl> + key . nameoffset : 4676 , <nl> key . namelength : 11 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_3 " , <nl> - key . offset : 4690 , <nl> + key . offset : 4710 , <nl> key . length : 22 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4694 , <nl> + key . nameoffset : 4714 , <nl> key . namelength : 11 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_4 " , <nl> - key . offset : 4767 , <nl> + key . offset : 4787 , <nl> key . length : 23 , <nl> key . typename : " UInt32 " , <nl> - key . nameoffset : 4771 , <nl> + key . nameoffset : 4791 , <nl> key . namelength : 11 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_5 " , <nl> - key . offset : 4806 , <nl> + key . offset : 4826 , <nl> key . length : 23 , <nl> key . typename : " UInt64 " , <nl> - key . nameoffset : 4810 , <nl> + key . nameoffset : 4830 , <nl> key . namelength : 11 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_REDEF_1 " , <nl> - key . offset : 4846 , <nl> + key . offset : 4866 , <nl> key . length : 28 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4850 , <nl> + key . nameoffset : 4870 , <nl> key . namelength : 17 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FOO_MACRO_REDEF_2 " , <nl> - key . offset : 4891 , <nl> + key . offset : 4911 , <nl> key . length : 28 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 4895 , <nl> + key . nameoffset : 4915 , <nl> key . namelength : 17 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " theLastDeclInFoo ( ) " , <nl> - key . offset : 4936 , <nl> + key . offset : 4956 , <nl> key . length : 23 , <nl> - key . nameoffset : 4941 , <nl> + key . nameoffset : 4961 , <nl> key . namelength : 18 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . free , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _internalTopLevelFunc ( ) " , <nl> - key . offset : 4968 , <nl> + key . offset : 4988 , <nl> key . length : 28 , <nl> - key . nameoffset : 4973 , <nl> + key . nameoffset : 4993 , <nl> key . namelength : 23 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . struct , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _InternalStruct " , <nl> - key . offset : 5005 , <nl> + key . offset : 5025 , <nl> key . length : 97 , <nl> - key . nameoffset : 5012 , <nl> + key . nameoffset : 5032 , <nl> key . namelength : 15 , <nl> - key . bodyoffset : 5029 , <nl> + key . bodyoffset : 5049 , <nl> key . bodylength : 72 , <nl> key . substructure : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " x " , <nl> - key . offset : 5042 , <nl> + key . offset : 5062 , <nl> key . length : 12 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 5046 , <nl> + key . nameoffset : 5066 , <nl> key . namelength : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " init ( ) " , <nl> - key . offset : 5067 , <nl> + key . offset : 5087 , <nl> key . length : 6 , <nl> - key . nameoffset : 5067 , <nl> + key . nameoffset : 5087 , <nl> key . namelength : 6 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " init ( x : ) " , <nl> - key . offset : 5086 , <nl> + key . offset : 5106 , <nl> key . length : 14 , <nl> - key . nameoffset : 5086 , <nl> + key . nameoffset : 5106 , <nl> key . namelength : 14 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " x " , <nl> - key . offset : 5091 , <nl> + key . offset : 5111 , <nl> key . length : 8 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 5091 , <nl> + key . nameoffset : 5111 , <nl> key . namelength : 1 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . extension , <nl> key . name : " FooClassBase " , <nl> - key . offset : 5104 , <nl> + key . offset : 5124 , <nl> key . length : 68 , <nl> - key . nameoffset : 5114 , <nl> + key . nameoffset : 5134 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 5128 , <nl> + key . bodyoffset : 5148 , <nl> key . bodylength : 43 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _internalMeth1 ( ) " , <nl> - key . offset : 5141 , <nl> + key . offset : 5161 , <nl> key . length : 29 , <nl> - key . nameoffset : 5146 , <nl> + key . nameoffset : 5166 , <nl> key . namelength : 16 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . extension , <nl> key . name : " FooClassBase " , <nl> - key . offset : 5219 , <nl> + key . offset : 5239 , <nl> key . length : 111 , <nl> - key . nameoffset : 5229 , <nl> + key . nameoffset : 5249 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 5243 , <nl> + key . bodyoffset : 5263 , <nl> key . bodylength : 86 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _internalMeth2 ( ) " , <nl> - key . offset : 5256 , <nl> + key . offset : 5276 , <nl> key . length : 29 , <nl> - key . nameoffset : 5261 , <nl> + key . nameoffset : 5281 , <nl> key . namelength : 16 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " nonInternalMeth ( ) " , <nl> - key . offset : 5298 , <nl> + key . offset : 5318 , <nl> key . length : 30 , <nl> - key . nameoffset : 5303 , <nl> + key . nameoffset : 5323 , <nl> key . namelength : 17 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . extension , <nl> key . name : " FooClassBase " , <nl> - key . offset : 5332 , <nl> + key . offset : 5352 , <nl> key . length : 68 , <nl> - key . nameoffset : 5342 , <nl> + key . nameoffset : 5362 , <nl> key . namelength : 12 , <nl> - key . bodyoffset : 5356 , <nl> + key . bodyoffset : 5376 , <nl> key . bodylength : 43 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _internalMeth3 ( ) " , <nl> - key . offset : 5369 , <nl> + key . offset : 5389 , <nl> key . length : 29 , <nl> - key . nameoffset : 5374 , <nl> + key . nameoffset : 5394 , <nl> key . namelength : 16 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . protocol , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " _InternalProt " , <nl> - key . offset : 5409 , <nl> + key . offset : 5429 , <nl> key . length : 26 , <nl> key . runtime_name : " _TtP4main13_InternalProt_ " , <nl> - key . nameoffset : 5418 , <nl> + key . nameoffset : 5438 , <nl> key . namelength : 13 , <nl> - key . bodyoffset : 5433 , <nl> + key . bodyoffset : 5453 , <nl> key . bodylength : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " ClassWithInternalProt " , <nl> - key . offset : 5444 , <nl> + key . offset : 5464 , <nl> key . length : 47 , <nl> key . runtime_name : " _TtC4main21ClassWithInternalProt " , <nl> - key . nameoffset : 5450 , <nl> + key . nameoffset : 5470 , <nl> key . namelength : 21 , <nl> - key . bodyoffset : 5489 , <nl> + key . bodyoffset : 5509 , <nl> key . bodylength : 1 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 5474 , <nl> + key . offset : 5494 , <nl> key . length : 13 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooClassPropertyOwnership " , <nl> - key . offset : 5500 , <nl> + key . offset : 5520 , <nl> key . length : 278 , <nl> key . runtime_name : " _TtC4main25FooClassPropertyOwnership " , <nl> - key . nameoffset : 5506 , <nl> + key . nameoffset : 5526 , <nl> key . namelength : 25 , <nl> - key . bodyoffset : 5548 , <nl> + key . bodyoffset : 5568 , <nl> key . bodylength : 229 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 5534 , <nl> + key . offset : 5554 , <nl> key . length : 12 <nl> } <nl> ] , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " assignable " , <nl> - key . offset : 5561 , <nl> + key . offset : 5581 , <nl> key . length : 20 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5565 , <nl> + key . nameoffset : 5585 , <nl> key . namelength : 10 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " unsafeAssignable " , <nl> - key . offset : 5594 , <nl> + key . offset : 5614 , <nl> key . length : 26 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5598 , <nl> + key . nameoffset : 5618 , <nl> key . namelength : 16 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " retainable " , <nl> - key . offset : 5633 , <nl> + key . offset : 5653 , <nl> key . length : 20 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5637 , <nl> + key . nameoffset : 5657 , <nl> key . namelength : 10 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " strongRef " , <nl> - key . offset : 5666 , <nl> + key . offset : 5686 , <nl> key . length : 19 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5670 , <nl> + key . nameoffset : 5690 , <nl> key . namelength : 9 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " copyable " , <nl> - key . offset : 5698 , <nl> + key . offset : 5718 , <nl> key . length : 18 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5702 , <nl> + key . nameoffset : 5722 , <nl> key . namelength : 8 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " weakRef " , <nl> - key . offset : 5729 , <nl> + key . offset : 5749 , <nl> key . length : 17 , <nl> key . typename : " Any ! " , <nl> - key . nameoffset : 5733 , <nl> + key . nameoffset : 5753 , <nl> key . namelength : 7 <nl> } , <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . setter_accessibility : source . lang . swift . accessibility . public , <nl> key . name : " scalar " , <nl> - key . offset : 5759 , <nl> + key . offset : 5779 , <nl> key . length : 17 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 5763 , <nl> + key . nameoffset : 5783 , <nl> key . namelength : 6 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooUnavailableMembers " , <nl> - key . offset : 5787 , <nl> + key . offset : 5807 , <nl> key . length : 346 , <nl> key . runtime_name : " _TtC4main21FooUnavailableMembers " , <nl> - key . nameoffset : 5793 , <nl> + key . nameoffset : 5813 , <nl> key . namelength : 21 , <nl> - key . bodyoffset : 5831 , <nl> + key . bodyoffset : 5851 , <nl> key . bodylength : 301 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 5817 , <nl> + key . offset : 5837 , <nl> key . length : 12 <nl> } <nl> ] , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " init ( int : ) " , <nl> - key . offset : 5856 , <nl> + key . offset : 5876 , <nl> key . length : 19 , <nl> - key . nameoffset : 5856 , <nl> + key . nameoffset : 5876 , <nl> key . namelength : 19 , <nl> key . attributes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> { <nl> key . kind : source . lang . swift . decl . var . parameter , <nl> key . name : " i " , <nl> - key . offset : 5862 , <nl> + key . offset : 5882 , <nl> key . length : 12 , <nl> key . typename : " Int32 " , <nl> - key . nameoffset : 5862 , <nl> + key . nameoffset : 5882 , <nl> key . namelength : 3 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " deprecated ( ) " , <nl> - key . offset : 5937 , <nl> + key . offset : 5957 , <nl> key . length : 17 , <nl> - key . nameoffset : 5942 , <nl> + key . nameoffset : 5962 , <nl> key . namelength : 12 , <nl> key . attributes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " availabilityIntroduced ( ) " , <nl> - key . offset : 6000 , <nl> + key . offset : 6020 , <nl> key . length : 29 , <nl> - key . nameoffset : 6005 , <nl> + key . nameoffset : 6025 , <nl> key . namelength : 24 , <nl> key . attributes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " availabilityIntroducedMsg ( ) " , <nl> - key . offset : 6099 , <nl> + key . offset : 6119 , <nl> key . length : 32 , <nl> - key . nameoffset : 6104 , <nl> + key . nameoffset : 6124 , <nl> key . namelength : 27 , <nl> key . attributes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooCFType " , <nl> - key . offset : 6142 , <nl> + key . offset : 6162 , <nl> key . length : 19 , <nl> key . runtime_name : " _TtC4main9FooCFType " , <nl> - key . nameoffset : 6148 , <nl> + key . nameoffset : 6168 , <nl> key . namelength : 9 , <nl> - key . bodyoffset : 6159 , <nl> + key . bodyoffset : 6179 , <nl> key . bodylength : 1 <nl> } , <nl> { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooOverlayClassBase " , <nl> - key . offset : 6169 , <nl> + key . offset : 6189 , <nl> key . length : 50 , <nl> key . runtime_name : " _TtC4main19FooOverlayClassBase " , <nl> - key . nameoffset : 6175 , <nl> + key . nameoffset : 6195 , <nl> key . namelength : 19 , <nl> - key . bodyoffset : 6196 , <nl> + key . bodyoffset : 6216 , <nl> key . bodylength : 22 , <nl> key . substructure : [ <nl> { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " f ( ) " , <nl> - key . offset : 6209 , <nl> + key . offset : 6229 , <nl> key . length : 8 , <nl> - key . nameoffset : 6214 , <nl> + key . nameoffset : 6234 , <nl> key . namelength : 3 <nl> } <nl> ] <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . class , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " FooOverlayClassDerived " , <nl> - key . offset : 6228 , <nl> + key . offset : 6248 , <nl> key . length : 88 , <nl> key . runtime_name : " _TtC4main22FooOverlayClassDerived " , <nl> - key . nameoffset : 6234 , <nl> + key . nameoffset : 6254 , <nl> key . namelength : 22 , <nl> - key . bodyoffset : 6284 , <nl> + key . bodyoffset : 6304 , <nl> key . bodylength : 31 , <nl> key . inheritedtypes : [ <nl> { <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . elements : [ <nl> { <nl> key . kind : source . lang . swift . structure . elem . typeref , <nl> - key . offset : 6259 , <nl> + key . offset : 6279 , <nl> key . length : 23 <nl> } <nl> ] , <nl> public class FooOverlayClassDerived : Foo . FooOverlayClassBase { <nl> key . kind : source . lang . swift . decl . function . method . instance , <nl> key . accessibility : source . lang . swift . accessibility . public , <nl> key . name : " f ( ) " , <nl> - key . offset : 6306 , <nl> + key . offset : 6326 , <nl> key . length : 8 , <nl> - key . nameoffset : 6311 , <nl> + key . nameoffset : 6331 , <nl> key . namelength : 3 , <nl> key . attributes : [ <nl> { <nl> mmm a / test / TypeCoercion / overload_member . swift <nl> ppp b / test / TypeCoercion / overload_member . swift <nl> func test_mixed_overload_coerce ( _ a : A , x : inout X , y : Y , z : Z ) { <nl> func test_mixed_method_value_coerce ( _ a : A ) { <nl> var _ : ( X ) - > X = a . mixed <nl> var _ : ( Y ) - > Y = A . mixed <nl> - var _ : ( Y ) - > Y = a . mixed ; / / expected - error { { cannot convert value of type ' ( x : X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> + var _ : ( Y ) - > Y = a . mixed ; / / expected - error { { cannot convert value of type ' ( X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> var _ : ( A ) - > ( X ) - > X = A . mixed <nl> } <nl> <nl> extension A { <nl> <nl> func test_mixed_method_value_coerce ( ) { <nl> var _ : ( X ) - > X = mixed <nl> - var _ : ( Y ) - > Y = mixed ; / / expected - error { { cannot convert value of type ' ( x : X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> - var _ : ( Y ) - > Y = mixed ; / / expected - error { { cannot convert value of type ' ( x : X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> + var _ : ( Y ) - > Y = mixed ; / / expected - error { { cannot convert value of type ' ( X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> + var _ : ( Y ) - > Y = mixed ; / / expected - error { { cannot convert value of type ' ( X ) - > X ' to specified type ' ( Y ) - > Y ' } } <nl> var _ : ( A ) - > ( X ) - > X = A . mixed <nl> } <nl> <nl> mmm a / test / TypeCoercion / overload_noncall . swift <nl> ppp b / test / TypeCoercion / overload_noncall . swift <nl> func f0 ( _ x1 : X , x2 : X ) - > X { } / / expected - note { { found this candidate } } <nl> func f0 ( _ y1 : Y , y2 : Y ) - > Y { } / / expected - note { { found this candidate } } <nl> var f0 : X / / expected - note { { found this candidate } } expected - note { { ' f0 ' previously declared here } } <nl> func f0_init ( _ x : X , y : Y ) - > X { } <nl> - var f0 : ( x : X , y : Y ) - > X = f0_init / / expected - error { { invalid redeclaration } } <nl> + var f0 : ( _ x : X , _ y : Y ) - > X = f0_init / / expected - error { { invalid redeclaration } } <nl> func f1 ( _ x : X ) - > X { } <nl> <nl> - func f2 ( _ g : ( x : X ) - > X ) - > ( ( y : Y ) - > Y ) { } <nl> + func f2 ( _ g : ( _ x : X ) - > X ) - > ( ( _ y : Y ) - > Y ) { } <nl> <nl> func test_conv ( ) { <nl> - var _ : ( x1 : X , x2 : X ) - > X = f0 <nl> + var _ : ( _ x1 : X , _ x2 : X ) - > X = f0 <nl> var _ : ( X , X ) - > X = f0 <nl> var _ : ( Y , X ) - > X = f0 / / expected - error { { ambiguous reference to member ' f0 ( _ : x2 : ) ' } } <nl> var _ : ( X ) - > X = f1 <nl> var a7 : ( X ) - > ( X ) = f1 <nl> - var a8 : ( x2 : X ) - > ( X ) = f1 <nl> - var a9 : ( x2 : X ) - > ( ( X ) ) = f1 <nl> + var a8 : ( _ x2 : X ) - > ( X ) = f1 <nl> + var a9 : ( _ x2 : X ) - > ( ( X ) ) = f1 <nl> a7 = a8 <nl> a8 = a9 <nl> a9 = a7 <nl> <nl> var _ : ( ( X ) - > X ) - > ( ( Y ) - > Y ) = f2 <nl> - var _ : ( ( x2 : X ) - > ( X ) ) - > ( ( ( y2 : Y ) - > ( Y ) ) ) = f2 <nl> + var _ : ( ( _ x2 : X ) - > ( X ) ) - > ( ( ( _ y2 : Y ) - > ( Y ) ) ) = f2 <nl> <nl> typealias fp = ( ( X ) - > X ) - > ( ( Y ) - > Y ) <nl> var _ = f2 <nl> mmm a / test / TypeCoercion / subtyping . swift <nl> ppp b / test / TypeCoercion / subtyping . swift <nl> func p_to_ip1 ( _ p : CustomStringConvertible ) - > IsPrintable1 { } <nl> func protocolConformance ( ac1 : @ autoclosure ( ) - > CustomStringConvertible , <nl> ac2 : @ autoclosure ( ) - > FormattedPrintable , <nl> ip1 : @ autoclosure ( ) - > IsPrintable1 ) { <nl> - var f1 : ( fp : FormattedPrintable ) - > CustomStringConvertible = fp_to_p <nl> - var f2 : ( p : CustomStringConvertible ) - > FormattedPrintable = p_to_fp <nl> - let f3 : ( p : CustomStringConvertible ) - > IsPrintable1 = p_to_ip1 <nl> + var f1 : ( _ fp : FormattedPrintable ) - > CustomStringConvertible = fp_to_p <nl> + var f2 : ( _ p : CustomStringConvertible ) - > FormattedPrintable = p_to_fp <nl> + let f3 : ( _ p : CustomStringConvertible ) - > IsPrintable1 = p_to_ip1 <nl> <nl> / / FIXME : closures make ABI conversions explicit . rdar : / / problem / 19517003 <nl> - f1 = { f2 ( p : $ 0 ) } / / okay <nl> - f1 = { f3 ( p : $ 0 ) } / / okay <nl> - f2 = f1 / / expected - error { { cannot assign value of type ' ( fp : FormattedPrintable ) - > CustomStringConvertible ' to type ' ( p : CustomStringConvertible ) - > FormattedPrintable ' } } <nl> + f1 = { f2 ( $ 0 ) } / / okay <nl> + f1 = { f3 ( $ 0 ) } / / okay <nl> + f2 = f1 / / expected - error { { cannot assign value of type ' ( FormattedPrintable ) - > CustomStringConvertible ' to type ' ( CustomStringConvertible ) - > FormattedPrintable ' } } <nl> <nl> accept_creates_Printable ( ac1 ) <nl> accept_creates_Printable ( { ac2 ( ) } ) <nl> accept_creates_Printable ( { ip1 ( ) } ) <nl> - accept_creates_FormattedPrintable ( ac1 ) / / expected - error { { cannot convert value of type ' @ autoclosure ( ) - > CustomStringConvertible ' to expected argument type ' @ noescape ( ) - > FormattedPrintable ' } } <nl> + accept_creates_FormattedPrintable ( ac1 ) / / expected - error { { cannot convert value of type ' @ autoclosure ( ) - > CustomStringConvertible ' to expected argument type ' ( ) - > FormattedPrintable ' } } <nl> accept_creates_FormattedPrintable ( ac2 ) <nl> - accept_creates_FormattedPrintable ( ip1 ) / / expected - error { { cannot convert value of type ' @ autoclosure ( ) - > IsPrintable1 ' to expected argument type ' @ noescape ( ) - > FormattedPrintable ' } } <nl> + accept_creates_FormattedPrintable ( ip1 ) / / expected - error { { cannot convert value of type ' @ autoclosure ( ) - > IsPrintable1 ' to expected argument type ' ( ) - > FormattedPrintable ' } } <nl> } <nl> <nl> func p_gen_to_fp ( _ : ( ) - > CustomStringConvertible ) - > FormattedPrintable { } <nl> mmm a / test / attr / attr_autoclosure . swift <nl> ppp b / test / attr / attr_autoclosure . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> / / Simple case . <nl> - var fn : @ autoclosure ( ) - > Int = 4 / / expected - error { { @ autoclosure may only be used on parameters } } expected - error { { cannot convert value of type ' Int ' to specified type ' @ noescape ( ) - > Int ' } } <nl> + var fn : @ autoclosure ( ) - > Int = 4 / / expected - error { { @ autoclosure may only be used on parameters } } expected - error { { cannot convert value of type ' Int ' to specified type ' ( ) - > Int ' } } <nl> <nl> @ autoclosure func func1 ( ) { } / / expected - error { { @ autoclosure may only be used on ' parameter ' declarations } } <nl> <nl> protocol P2 : P1 { <nl> associatedtype Element <nl> } <nl> <nl> - func overloadedEach < O : P1 > ( _ source : O , _ closure : ( ) - > ( ) ) { <nl> + func overloadedEach < O : P1 > ( _ source : O , _ closure : @ escaping ( ) - > ( ) ) { <nl> } <nl> <nl> - func overloadedEach < P : P2 > ( _ source : P , _ closure : ( ) - > ( ) ) { <nl> + func overloadedEach < P : P2 > ( _ source : P , _ closure : @ escaping ( ) - > ( ) ) { <nl> } <nl> <nl> struct S : P2 { <nl> class Sub : Super { <nl> override func f3 ( _ x : @ autoclosure ( escaping ) ( ) - > ( ) ) { } / / expected - error { { does not override any method } } <nl> } <nl> <nl> - func func12_sink ( _ x : ( ) - > Int ) { } <nl> + func func12_sink ( _ x : @ escaping ( ) - > Int ) { } <nl> <nl> func func12a ( _ x : @ autoclosure ( ) - > Int ) { <nl> func12_sink ( x ) / / expected - error { { invalid conversion from non - escaping function of type ' @ autoclosure ( ) - > Int ' to potentially escaping function type ' ( ) - > Int ' } } <nl> mmm a / test / attr / attr_availability . swift <nl> ppp b / test / attr / attr_availability . swift <nl> func testArgNames ( ) { <nl> <nl> unavailableInit ( a : 0 ) / / expected - error { { ' unavailableInit ( a : ) ' has been replaced by ' Int . init ( other : ) ' } } { { 3 - 18 = Int } } { { 19 - 20 = other } } <nl> let fn = unavailableInit / / expected - error { { ' unavailableInit ( a : ) ' has been replaced by ' Int . init ( other : ) ' } } { { 12 - 27 = Int . init } } <nl> - fn ( a : 1 ) <nl> + fn ( 1 ) <nl> <nl> unavailableNestedInit ( a : 0 ) / / expected - error { { ' unavailableNestedInit ( a : ) ' has been replaced by ' Foo . Bar . init ( other : ) ' } } { { 3 - 24 = Foo . Bar } } { { 25 - 26 = other } } <nl> let fn2 = unavailableNestedInit / / expected - error { { ' unavailableNestedInit ( a : ) ' has been replaced by ' Foo . Bar . init ( other : ) ' } } { { 13 - 34 = Foo . Bar . init } } <nl> - fn2 ( a : 1 ) <nl> + fn2 ( 1 ) <nl> } <nl> <nl> @ available ( * , unavailable , renamed : " shinyLabeledArguments ( ) " ) <nl> func trailingClosureArg ( _ value : Int , _ other : Int , fn : ( ) - > Void ) { } / / expect <nl> func trailingClosureArg2 ( _ value : Int , _ other : Int , fn : ( ) - > Void ) { } / / expected - note { { here } } <nl> <nl> func testInstanceTrailingClosure ( ) { <nl> - trailingClosure ( 0 ) { } / / expected - error { { ' trailingClosure ( _ : fn : ) ' has been replaced by instance method ' Int . foo ( execute : ) ' } } { { 3 - 18 = 0 . foo } } { { 19 - 20 = } } <nl> - trailingClosureArg ( 0 , 1 ) { } / / expected - error { { ' trailingClosureArg ( _ : _ : fn : ) ' has been replaced by instance method ' Int . foo ( bar : execute : ) ' } } { { 3 - 21 = 0 . foo } } { { 22 - 25 = } } { { 25 - 25 = bar : } } <nl> - trailingClosureArg2 ( 0 , 1 ) { } / / expected - error { { ' trailingClosureArg2 ( _ : _ : fn : ) ' has been replaced by instance method ' Int . foo ( bar : execute : ) ' } } { { 3 - 22 = 1 . foo } } { { 23 - 23 = bar : } } { { 24 - 27 = } } <nl> + / / FIXME : regression in fixit due to noescape - by - default <nl> + trailingClosure ( 0 ) { } / / expected - error { { ' trailingClosure ( _ : fn : ) ' has been replaced by instance method ' Int . foo ( execute : ) ' } } / / FIXME : { { 3 - 18 = 0 . foo } } { { 19 - 20 = } } <nl> + trailingClosureArg ( 0 , 1 ) { } / / expected - error { { ' trailingClosureArg ( _ : _ : fn : ) ' has been replaced by instance method ' Int . foo ( bar : execute : ) ' } } / / FIXME : { { 3 - 21 = 0 . foo } } { { 22 - 25 = } } { { 25 - 25 = bar : } } <nl> + trailingClosureArg2 ( 0 , 1 ) { } / / expected - error { { ' trailingClosureArg2 ( _ : _ : fn : ) ' has been replaced by instance method ' Int . foo ( bar : execute : ) ' } } / / FIXME : { { 3 - 22 = 1 . foo } } { { 23 - 23 = bar : } } { { 24 - 27 = } } <nl> } <nl> <nl> @ available ( * , unavailable , renamed : " + " ) <nl> mmm a / test / attr / attr_noescape . swift <nl> ppp b / test / attr / attr_noescape . swift <nl> <nl> <nl> func conflictingAttrs ( _ fn : @ noescape @ escaping ( ) - > Int ) { } / / expected - error { { @ escaping conflicts with @ noescape } } <nl> <nl> - func doesEscape ( _ fn : ( ) - > Int ) { } <nl> + func doesEscape ( _ fn : @ escaping ( ) - > Int ) { } <nl> <nl> func takesGenericClosure < T > ( _ a : Int , _ fn : @ noescape ( ) - > T ) { } <nl> <nl> <nl> - func takesNoEscapeClosure ( _ fn : @ noescape ( ) - > Int ) { <nl> + func takesNoEscapeClosure ( _ fn : ( ) - > Int ) { <nl> + / / expected - note @ - 1 { { parameter ' fn ' is implicitly non - escaping } } <nl> + / / expected - note @ - 2 { { parameter ' fn ' is implicitly non - escaping } } <nl> + / / expected - note @ - 3 { { parameter ' fn ' is implicitly non - escaping } } <nl> takesNoEscapeClosure { 4 } / / ok <nl> <nl> _ = fn ( ) / / ok <nl> <nl> - var x = fn / / expected - error { { @ noescape parameter ' fn ' may only be called } } <nl> + var x = fn / / expected - error { { non - escaping parameter ' fn ' may only be called } } <nl> <nl> / / This is ok , because the closure itself is noescape . <nl> takesNoEscapeClosure { fn ( ) } <nl> <nl> / / This is not ok , because it escapes the ' fn ' closure . <nl> - doesEscape { fn ( ) } / / expected - error { { closure use of @ noescape parameter ' fn ' may allow it to escape } } <nl> + doesEscape { fn ( ) } / / expected - error { { closure use of non - escaping parameter ' fn ' may allow it to escape } } <nl> <nl> / / This is not ok , because it escapes the ' fn ' closure . <nl> func nested_function ( ) { <nl> - _ = fn ( ) / / expected - error { { declaration closing over @ noescape parameter ' fn ' may allow it to escape } } <nl> + _ = fn ( ) / / expected - error { { declaration closing over non - escaping parameter ' fn ' may allow it to escape } } <nl> } <nl> <nl> takesNoEscapeClosure ( fn ) / / ok <nl> <nl> - doesEscape ( fn ) / / expected - error { { invalid conversion from non - escaping function of type ' @ noescape ( ) - > Int ' to potentially escaping function type ' ( ) - > Int ' } } <nl> + doesEscape ( fn ) / / expected - error { { invalid conversion from non - escaping function of type ' ( ) - > Int ' to potentially escaping function type ' ( ) - > Int ' } } <nl> takesGenericClosure ( 4 , fn ) / / ok <nl> takesGenericClosure ( 4 ) { fn ( ) } / / ok . <nl> } <nl> func takeNoEscapeTest2 ( _ fn : @ noescape ( ) - > ( ) ) { <nl> <nl> / / Autoclosure implies noescape , but produce nice diagnostics so people know <nl> / / why noescape problems happen . <nl> - func testAutoclosure ( _ a : @ autoclosure ( ) - > Int ) { / / expected - note { { parameter ' a ' is implicitly @ noescape because it was declared @ autoclosure } } <nl> - doesEscape { a ( ) } / / expected - error { { closure use of @ noescape parameter ' a ' may allow it to escape } } <nl> + func testAutoclosure ( _ a : @ autoclosure ( ) - > Int ) { / / expected - note { { parameter ' a ' is implicitly non - escaping because it was declared @ autoclosure } } <nl> + doesEscape { a ( ) } / / expected - error { { closure use of non - escaping parameter ' a ' may allow it to escape } } <nl> } <nl> <nl> <nl> protocol P2 : P1 { <nl> associatedtype Element <nl> } <nl> <nl> - func overloadedEach < O : P1 , T > ( _ source : O , _ transform : ( O . Element ) - > ( ) , _ : T ) { } <nl> + func overloadedEach < O : P1 , T > ( _ source : O , _ transform : @ escaping ( O . Element ) - > ( ) , _ : T ) { } <nl> <nl> - func overloadedEach < P : P2 , T > ( _ source : P , _ transform : ( P . Element ) - > ( ) , _ : T ) { } <nl> + func overloadedEach < P : P2 , T > ( _ source : P , _ transform : @ escaping ( P . Element ) - > ( ) , _ : T ) { } <nl> <nl> struct S : P2 { <nl> typealias Element = Int <nl> func each ( _ transform : @ noescape ( Int ) - > ( ) ) { <nl> - overloadedEach ( self , / / expected - error { { cannot invoke ' overloadedEach ' with an argument list of type ' ( S , @ noescape ( Int ) - > ( ) , Int ) ' } } <nl> + overloadedEach ( self , / / expected - error { { cannot invoke ' overloadedEach ' with an argument list of type ' ( S , ( Int ) - > ( ) , Int ) ' } } <nl> transform , 1 ) <nl> / / expected - note @ - 2 { { overloads for ' overloadedEach ' exist with these partially matching parameter lists : ( O , ( O . Element ) - > ( ) , T ) , ( P , ( P . Element ) - > ( ) , T ) } } <nl> } <nl> struct S : P2 { <nl> <nl> <nl> / / rdar : / / 19763676 - False positive in @ noescape analysis triggered by parameter label <nl> - func r19763676Callee ( _ f : @ noescape ( param : Int ) - > Int ) { } <nl> + func r19763676Callee ( _ f : @ noescape ( _ param : Int ) - > Int ) { } <nl> <nl> func r19763676Caller ( _ g : @ noescape ( Int ) - > Int ) { <nl> r19763676Callee ( { _ in g ( 1 ) } ) <nl> func curriedFlatMap2 < A , B > ( _ x : [ A ] ) - > ( @ noescape ( A ) - > [ B ] ) - > [ B ] { <nl> } <nl> } <nl> <nl> - func bad ( _ a : ( Int ) - > Int ) - > Int { return 42 } <nl> + func bad ( _ a : @ escaping ( Int ) - > Int ) - > Int { return 42 } <nl> func escapeNoEscapeResult ( _ x : [ Int ] ) - > ( @ noescape ( Int ) - > Int ) - > Int { <nl> return { f in <nl> - bad ( f ) / / expected - error { { invalid conversion from non - escaping function of type ' @ noescape ( Int ) - > Int ' to potentially escaping function type ' ( Int ) - > Int ' } } <nl> + bad ( f ) / / expected - error { { invalid conversion from non - escaping function of type ' ( Int ) - > Int ' to potentially escaping function type ' ( Int ) - > Int ' } } <nl> } <nl> } <nl> <nl> <nl> / / SR - 824 - @ noescape for Type Aliased Closures <nl> / / <nl> - typealias CompletionHandlerNE = @ noescape ( success : Bool ) - > ( ) <nl> - typealias CompletionHandler = ( success : Bool ) - > ( ) <nl> + typealias CompletionHandlerNE = @ noescape ( _ success : Bool ) - > ( ) <nl> + typealias CompletionHandler = ( _ success : Bool ) - > ( ) <nl> var escape : CompletionHandlerNE <nl> - func doThing1 ( _ completion : @ noescape ( success : Bool ) - > ( ) ) { <nl> - / / expected - error @ + 2 { { @ noescape value ' escape ' may only be called } } <nl> - / / expected - error @ + 1 { { @ noescape parameter ' completion ' may only be called } } <nl> - escape = completion / / expected - error { { declaration closing over @ noescape parameter ' escape ' may allow it to escape } } <nl> + func doThing1 ( _ completion : ( _ success : Bool ) - > ( ) ) { <nl> + / / expected - note @ - 1 { { parameter ' completion ' is implicitly non - escaping } } <nl> + / / expected - error @ + 2 { { non - escaping value ' escape ' may only be called } } <nl> + / / expected - error @ + 1 { { non - escaping parameter ' completion ' may only be called } } <nl> + escape = completion / / expected - error { { declaration closing over non - escaping parameter ' escape ' may allow it to escape } } <nl> } <nl> func doThing2 ( _ completion : CompletionHandlerNE ) { <nl> - / / expected - error @ + 2 { { @ noescape value ' escape ' may only be called } } <nl> - / / expected - error @ + 1 { { @ noescape parameter ' completion ' may only be called } } <nl> - escape = completion / / expected - error { { declaration closing over @ noescape parameter ' escape ' may allow it to escape } } <nl> + / / expected - note @ - 1 { { parameter ' completion ' is implicitly non - escaping } } <nl> + / / expected - error @ + 2 { { non - escaping value ' escape ' may only be called } } <nl> + / / expected - error @ + 1 { { non - escaping parameter ' completion ' may only be called } } <nl> + escape = completion / / expected - error { { declaration closing over non - escaping parameter ' escape ' may allow it to escape } } <nl> } <nl> <nl> / / < rdar : / / problem / 19997680 > @ noescape doesn ' t work on parameters of function type <nl> enum r19997577Type { <nl> } <nl> <nl> / / type attribute and decl attribute <nl> - func noescapeD ( @ noescape f : ( ) - > Bool ) { } / / expected - error { { @ noescape is now an attribute on a parameter type , instead of on the parameter itself } } { { 16 - 25 = } } { { 29 - 29 = @ noescape } } <nl> + func noescapeD ( @ noescape f : @ escaping ( ) - > Bool ) { } / / expected - error { { @ noescape is now an attribute on a parameter type , instead of on the parameter itself } } { { 16 - 25 = } } { { 29 - 29 = @ noescape } } <nl> func noescapeT ( f : @ noescape ( ) - > Bool ) { } / / ok <nl> func autoclosureD ( @ autoclosure f : ( ) - > Bool ) { } / / expected - error { { @ autoclosure is now an attribute on a parameter type , instead of on the parameter itself } } { { 19 - 31 = } } { { 35 - 35 = @ autoclosure } } <nl> func autoclosureT ( f : @ autoclosure ( ) - > Bool ) { } / / ok <nl> <nl> - func noescapeD_noescapeT ( @ noescape f : @ noescape ( ) - > Bool ) { } <nl> + func noescapeD_noescapeT ( @ noescape f : @ noescape ( ) - > Bool ) { } / / expected - error { { @ noescape is now an attribute on a parameter type , instead of on the parameter itself } } <nl> func autoclosureD_noescapeT ( @ autoclosure f : @ noescape ( ) - > Bool ) { } / / expected - error { { @ autoclosure is now an attribute on a parameter type , instead of on the parameter itself } } { { 29 - 41 = } } { { 45 - 45 = @ autoclosure } } <nl> mmm a / test / attr / attr_objc . swift <nl> ppp b / test / attr / attr_objc . swift <nl> class infer_instanceFunc1 { <nl> <nl> @ objc func func16_ ( a : AnyObject ) { } / / no - error <nl> <nl> - func func17 ( a : ( ) - > ( ) ) { } <nl> - / / CHECK - LABEL : { { ^ } } @ objc func func17 ( a : ( ) - > ( ) ) { <nl> + func func17 ( a : @ escaping ( ) - > ( ) ) { } <nl> + / / CHECK - LABEL : { { ^ } } @ objc func func17 ( a : @ escaping ( ) - > ( ) ) { <nl> <nl> - @ objc func func17_ ( a : ( ) - > ( ) ) { } <nl> + @ objc func func17_ ( a : @ escaping ( ) - > ( ) ) { } <nl> <nl> - func func18 ( a : ( Int ) - > ( ) , b : Int ) { } <nl> - / / CHECK - LABEL : { { ^ } } @ objc func func18 ( a : ( Int ) - > ( ) , b : Int ) <nl> + func func18 ( a : @ escaping ( Int ) - > ( ) , b : Int ) { } <nl> + / / CHECK - LABEL : { { ^ } } @ objc func func18 ( a : @ escaping ( Int ) - > ( ) , b : Int ) <nl> <nl> - @ objc func func18_ ( a : ( Int ) - > ( ) , b : Int ) { } <nl> + @ objc func func18_ ( a : @ escaping ( Int ) - > ( ) , b : Int ) { } <nl> <nl> - func func19 ( a : ( String ) - > ( ) , b : Int ) { } <nl> - / / CHECK - LABEL : { { ^ } } @ objc func func19 ( a : ( String ) - > ( ) , b : Int ) { <nl> + func func19 ( a : @ escaping ( String ) - > ( ) , b : Int ) { } <nl> + / / CHECK - LABEL : { { ^ } } @ objc func func19 ( a : @ escaping ( String ) - > ( ) , b : Int ) { <nl> <nl> - @ objc func func19_ ( a : ( String ) - > ( ) , b : Int ) { } <nl> + @ objc func func19_ ( a : @ escaping ( String ) - > ( ) , b : Int ) { } <nl> <nl> func func_FunctionReturn1 ( ) - > ( ) - > ( ) { } <nl> / / CHECK - LABEL : { { ^ } } @ objc func func_FunctionReturn1 ( ) - > ( ) - > ( ) { <nl> class ClassThrows1 { <nl> / / CHECK : { { ^ } } func methodReturnsOptionalObjCClass ( ) throws - > Class_ObjC1 ? <nl> func methodReturnsOptionalObjCClass ( ) throws - > Class_ObjC1 ? { return nil } <nl> <nl> - / / CHECK : @ objc func methodWithTrailingClosures ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int , fn3 : ( Int ) - > Int ) <nl> + / / CHECK : @ objc func methodWithTrailingClosures ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int , fn3 : @ escaping ( Int ) - > Int ) <nl> / / CHECK - DUMP : func_decl " methodWithTrailingClosures ( _ : fn1 : fn2 : fn3 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 1 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> - func methodWithTrailingClosures ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int , fn3 : ( Int ) - > Int ) throws { } <nl> + func methodWithTrailingClosures ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int , fn3 : @ escaping ( Int ) - > Int ) throws { } <nl> <nl> / / CHECK : @ objc init ( degrees : Double ) throws <nl> / / CHECK - DUMP : constructor_decl " init ( degrees : ) " { { . * } } foreign_error = NilResult , unowned , param = 1 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > <nl> class ClassThrows1 { <nl> <nl> / / CHECK - DUMP - LABEL : class_decl " SubclassImplicitClassThrows1 " <nl> @ objc class SubclassImplicitClassThrows1 : ImplicitClassThrows1 { <nl> - / / CHECK : @ objc override func methodWithTrailingClosures ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( ( Int ) - > Int ) , fn3 : ( ( Int ) - > Int ) ) <nl> + / / CHECK : @ objc override func methodWithTrailingClosures ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : ( @ escaping ( Int ) - > Int ) , fn3 : ( @ escaping ( Int ) - > Int ) ) <nl> / / CHECK - DUMP : func_decl " methodWithTrailingClosures ( _ : fn1 : fn2 : fn3 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 1 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> - override func methodWithTrailingClosures ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( ( Int ) - > Int ) , fn3 : ( ( Int ) - > Int ) ) throws { } <nl> + override func methodWithTrailingClosures ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : ( @ escaping ( Int ) - > Int ) , fn3 : ( @ escaping ( Int ) - > Int ) ) throws { } <nl> } <nl> <nl> class ThrowsRedecl1 { <nl> class ThrowsRedecl1 { <nl> @ objc func method2AndReturnError ( _ x : Int ) { } / / expected - note { { declared here } } <nl> @ objc func method2 ( ) throws { } / / expected - error { { with Objective - C selector ' method2AndReturnError : ' } } <nl> <nl> - @ objc func method3 ( _ x : Int , error : Int , closure : ( Int ) - > Int ) { } / / expected - note { { declared here } } <nl> + @ objc func method3 ( _ x : Int , error : Int , closure : @ escaping ( Int ) - > Int ) { } / / expected - note { { declared here } } <nl> @ objc func method3 ( _ x : Int , closure : ( Int ) - > Int ) throws { } / / expected - error { { with Objective - C selector ' method3 : error : closure : ' } } <nl> <nl> @ objc ( initAndReturnError : ) func initMethod1 ( error : Int ) { } / / expected - note { { declared here } } <nl> class ThrowsRedecl1 { <nl> @ objc ( initWithString : error : ) func initMethod2 ( string : String , error : Int ) { } / / expected - note { { declared here } } <nl> @ objc init ( string : String ) throws { } / / expected - error { { with Objective - C selector ' initWithString : error : ' } } <nl> <nl> - @ objc ( initAndReturnError : fn : ) func initMethod3 ( error : Int , fn : ( Int ) - > Int ) { } / / expected - note { { declared here } } <nl> + @ objc ( initAndReturnError : fn : ) func initMethod3 ( error : Int , fn : @ escaping ( Int ) - > Int ) { } / / expected - note { { declared here } } <nl> @ objc init ( fn : ( Int ) - > Int ) throws { } / / expected - error { { with Objective - C selector ' initAndReturnError : fn : ' } } <nl> } <nl> <nl> class ThrowsObjCName { <nl> - @ objc ( method4 : closure : error : ) func method4 ( x : Int , closure : ( Int ) - > Int ) throws { } <nl> + @ objc ( method4 : closure : error : ) func method4 ( x : Int , closure : @ escaping ( Int ) - > Int ) throws { } <nl> <nl> - @ objc ( method5AndReturnError : x : closure : ) func method5 ( x : Int , closure : ( Int ) - > Int ) throws { } <nl> + @ objc ( method5AndReturnError : x : closure : ) func method5 ( x : Int , closure : @ escaping ( Int ) - > Int ) throws { } <nl> <nl> @ objc ( method6 ) func method6 ( ) throws { } / / expected - error { { @ objc ' method name provides names for 0 arguments , but method has one parameter ( the error parameter ) } } <nl> <nl> class ThrowsObjCName { <nl> <nl> / / CHECK - DUMP : func_decl " method8 ( _ : fn1 : fn2 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 2 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> @ objc ( method8 : fn1 : error : fn2 : ) <nl> - func method8 ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int ) throws { } <nl> + func method8 ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int ) throws { } <nl> <nl> / / CHECK - DUMP : func_decl " method9 ( _ : fn1 : fn2 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 0 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> @ objc ( method9AndReturnError : s : fn1 : fn2 : ) <nl> - func method9 ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int ) throws { } <nl> + func method9 ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int ) throws { } <nl> } <nl> <nl> class SubclassThrowsObjCName : ThrowsObjCName { <nl> / / CHECK - DUMP : func_decl " method8 ( _ : fn1 : fn2 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 2 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> - override func method8 ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int ) throws { } <nl> + override func method8 ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int ) throws { } <nl> <nl> / / CHECK - DUMP : func_decl " method9 ( _ : fn1 : fn2 : ) " { { . * } } foreign_error = ZeroResult , unowned , param = 0 , paramtype = Optional < AutoreleasingUnsafeMutablePointer < Optional < NSError > > > , resulttype = Bool <nl> - override func method9 ( _ s : String , fn1 : ( ( Int ) - > Int ) , fn2 : ( Int ) - > Int ) throws { } <nl> + override func method9 ( _ s : String , fn1 : ( @ escaping ( Int ) - > Int ) , fn2 : @ escaping ( Int ) - > Int ) throws { } <nl> } <nl> <nl> @ objc protocol ProtocolThrowsObjCName { <nl> mmm a / test / attr / attr_override . swift <nl> ppp b / test / attr / attr_override . swift <nl> class MismatchOptionalBase { <nl> <nl> func fixSeveralTypes ( a : Int ? , b : Int ! ) - > Int { return 0 } <nl> <nl> - func functionParam ( x : ( ( Int ) - > Int ) ? ) { } <nl> + func functionParam ( x : ( @ escaping ( Int ) - > Int ) ? ) { } <nl> func tupleParam ( x : ( Int , Int ) ? ) { } <nl> <nl> func nameAndTypeMismatch ( label : Int ? ) { } <nl> class MismatchOptional : MismatchOptionalBase { <nl> / / expected - error @ - 2 { { cannot override instance method parameter of type ' Int ! ' with non - optional type ' Int ' } } { { 47 - 47 = ? } } <nl> / / expected - error @ - 3 { { cannot override instance method result type ' Int ' with optional type ' Int ! ' } } { { 55 - 56 = } } <nl> <nl> - override func functionParam ( x : ( Int ) - > Int ) { } / / expected - error { { cannot override instance method parameter of type ' ( ( Int ) - > Int ) ? ' with non - optional type ' ( Int ) - > Int ' } } { { 34 - 34 = ( } } { { 46 - 46 = ) ? } } <nl> + override func functionParam ( x : @ escaping ( Int ) - > Int ) { } / / expected - error { { cannot override instance method parameter of type ' ( ( Int ) - > Int ) ? ' with non - optional type ' ( Int ) - > Int ' } } { { 34 - 34 = ( } } { { 56 - 56 = ) ? } } <nl> override func tupleParam ( x : ( Int , Int ) ) { } / / expected - error { { cannot override instance method parameter of type ' ( Int , Int ) ? ' with non - optional type ' ( Int , Int ) ' } } { { 41 - 41 = ? } } <nl> <nl> override func nameAndTypeMismatch ( _ : Int ) { } <nl> mmm a / test / decl / func / default - values . swift <nl> ppp b / test / decl / func / default - values . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> - var func5 : ( fn : ( Int , Int ) - > ( ) ) - > ( ) <nl> + var func5 : ( _ fn : ( Int , Int ) - > ( ) ) - > ( ) <nl> <nl> / / Default arguments for functions . <nl> func foo3 ( a : Int = 2 , b : Int = 3 ) { } <nl> mmm a / test / decl / func / keyword - argument - defaults . swift <nl> ppp b / test / decl / func / keyword - argument - defaults . swift <nl> struct Subscripts2 { <nl> <nl> <nl> func f4 ( _ a : Int ) - > ( Int ) - > ( ) { return { b in ( ) } } <nl> - func f5 ( _ a : Int ) - > ( b : Int ) - > ( ) { return { b in ( ) } } <nl> + func f5 ( _ a : Int ) - > ( _ b : Int ) - > ( ) { return { b in ( ) } } <nl> <nl> func testFunctions ( _ i : Int , x : X ) { <nl> f4 ( i ) ( i ) <nl> f4 ( i ) ( b : i ) / / expected - error { { extraneous argument label ' b : ' in call } } { { 9 - 12 = } } <nl> - f5 ( i ) ( i ) / / expected - error { { missing argument label ' b : ' in call } } { { 9 - 9 = b : } } <nl> - f5 ( i ) ( b : i ) <nl> + f5 ( i ) ( i ) <nl> + f5 ( i ) ( b : i ) / / expected - error { { extraneous argument label ' b : ' in call } } { { 9 - 12 = } } <nl> } <nl> <nl> struct Y { <nl> func m0 ( _ a : Int ) - > ( Int ) - > ( ) { return { b in ( ) } } <nl> - func m1 ( _ a : Int ) - > ( b : Int ) - > ( ) { return { b in ( ) } } <nl> + func m1 ( _ a : Int ) - > ( _ b : Int ) - > ( ) { return { b in ( ) } } <nl> <nl> func m2 ( _ a : Int ) - > ( Int , Int ) - > ( ) { return { b , c in ( ) } } <nl> - func m3 ( _ a : Int ) - > ( b : Int , c2 : Int ) - > ( ) { return { b , c in ( ) } } <nl> + func m3 ( _ a : Int ) - > ( _ b : Int , _ c2 : Int ) - > ( ) { return { b , c in ( ) } } <nl> <nl> subscript ( x : Int ) - > Int { <nl> get { return x } <nl> struct Y { <nl> func testMethods ( _ i : Int , x : Y ) { <nl> x . m0 ( i ) ( i ) <nl> x . m0 ( i ) ( b : i ) / / expected - error { { extraneous argument label ' b : ' in call } } { { 11 - 14 = } } <nl> - x . m1 ( i ) ( i ) / / expected - error { { missing argument label ' b : ' in call } } { { 11 - 11 = b : } } <nl> - x . m1 ( i ) ( b : i ) <nl> + x . m1 ( i ) ( i ) <nl> + x . m1 ( i ) ( i ) <nl> x . m2 ( i ) ( i , c : i ) / / expected - error { { extraneous argument label ' c : ' in call } } { { 14 - 17 = } } <nl> x . m2 ( i ) ( i , i ) <nl> - x . m3 ( i ) ( b : i , i ) / / expected - error { { missing argument label ' c2 : ' in call } } { { 17 - 17 = c2 : } } <nl> - x . m3 ( i ) ( b : i , c2 : i ) <nl> + x . m3 ( i ) ( b : i , i ) / / expected - error { { extraneous argument label ' b : ' in call } } { { 11 - 14 = } } <nl> + x . m3 ( i ) ( b : i , c2 : i ) / / expected - error { { extraneous argument labels ' b : c2 : ' in call } } { { 11 - 14 = } } { { 17 - 21 = } } <nl> } <nl> <nl> func testSubscripts ( _ i : Int , s : String , x : Y ) { <nl> func testSubscripts ( _ i : Int , s : String , x : Y ) { <nl> func + ( _ a : String , <nl> b b : Double ) { } / / expected - error { { operator cannot have keyword arguments } } { { 8 - 10 = } } <nl> <nl> - func + ( a : Double , b : String ) - > ( Int ) - > ( d : Int ) - > ( ) { <nl> + func + ( a : Double , b : String ) - > ( Int ) - > ( _ d : Int ) - > ( ) { <nl> return { c in { e in ( ) } } <nl> } <nl> mmm a / test / decl / func / keyword - argument - labels . swift <nl> ppp b / test / decl / func / keyword - argument - labels . swift <nl> struct SomeType { <nl> class SomeClass { } <nl> <nl> / / Function types . <nl> - typealias functionType = ( in : SomeRange ) - > Bool <nl> + typealias functionType = ( _ in : SomeRange ) - > Bool <nl> <nl> / / Calls <nl> func testCalls ( _ range : SomeRange ) { <nl> mmm a / test / decl / func / throwing_functions . swift <nl> ppp b / test / decl / func / throwing_functions . swift <nl> func partialApply2 < T : Parallelogram > ( _ t : T ) { <nl> func barG < T > ( _ t : T ) throws - > T { return t } <nl> func fooG < T > ( _ t : T ) - > T { return t } <nl> <nl> - var bGE : ( i : Int ) - > Int = barG / / expected - error { { invalid conversion from throwing function of type ' ( _ ) throws - > _ ' to non - throwing function type ' ( i : Int ) - > Int ' } } <nl> - var bg : ( i : Int ) throws - > Int = barG <nl> - var fG : ( i : Int ) throws - > Int = fooG <nl> + var bGE : ( _ i : Int ) - > Int = barG / / expected - error { { invalid conversion from throwing function of type ' ( _ ) throws - > _ ' to non - throwing function type ' ( Int ) - > Int ' } } <nl> + var bg : ( _ i : Int ) throws - > Int = barG <nl> + var fG : ( _ i : Int ) throws - > Int = fooG <nl> <nl> func fred ( _ callback : ( UInt8 ) throws - > ( ) ) throws { } <nl> <nl> func testSubtypeResult2 ( _ x1 : ( String ) - > ( ( Int ) throws - > String ) , <nl> subtypeResult2 ( x2 ) <nl> } <nl> <nl> - func subtypeArgument1 ( _ x : ( fn : ( ( String ) - > Int ) ) - > Int ) { } <nl> - func testSubtypeArgument1 ( _ x1 : ( fn : ( ( String ) - > Int ) ) - > Int , <nl> - x2 : ( fn : ( ( String ) throws - > Int ) ) - > Int ) { <nl> + func subtypeArgument1 ( _ x : ( _ fn : ( ( String ) - > Int ) ) - > Int ) { } <nl> + func testSubtypeArgument1 ( _ x1 : ( _ fn : ( ( String ) - > Int ) ) - > Int , <nl> + x2 : ( _ fn : ( ( String ) throws - > Int ) ) - > Int ) { <nl> subtypeArgument1 ( x1 ) <nl> subtypeArgument1 ( x2 ) <nl> } <nl> <nl> - func subtypeArgument2 ( _ x : ( fn : ( ( String ) throws - > Int ) ) - > Int ) { } <nl> - func testSubtypeArgument2 ( _ x1 : ( fn : ( ( String ) - > Int ) ) - > Int , <nl> - x2 : ( fn : ( ( String ) throws - > Int ) ) - > Int ) { <nl> - subtypeArgument2 ( x1 ) / / expected - error { { cannot convert value of type ' ( fn : ( ( String ) - > Int ) ) - > Int ' to expected argument type ' ( fn : ( ( String ) throws - > Int ) ) - > Int ' } } <nl> + func subtypeArgument2 ( _ x : ( _ fn : ( ( String ) throws - > Int ) ) - > Int ) { } <nl> + func testSubtypeArgument2 ( _ x1 : ( _ fn : ( ( String ) - > Int ) ) - > Int , <nl> + x2 : ( _ fn : ( ( String ) throws - > Int ) ) - > Int ) { <nl> + subtypeArgument2 ( x1 ) / / expected - error { { cannot convert value of type ' ( ( @ escaping ( String ) - > Int ) ) - > Int ' to expected argument type ' ( ( @ escaping ( String ) throws - > Int ) ) - > Int ' } } <nl> subtypeArgument2 ( x2 ) <nl> } <nl> <nl> mmm a / test / decl / protocol / conforms / fixit_stub . swift <nl> ppp b / test / decl / protocol / conforms / fixit_stub . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> protocol Protocol1 { <nl> - func foo ( arg1 : Int , arg2 : String ) - > String / / expected - note { { protocol requires function ' foo ( arg1 : arg2 : ) ' with type ' ( arg1 : Int , arg2 : String ) - > String ' ; do you want to add a stub ? } } { { 27 - 27 = \ n internal func foo ( arg1 : Int , arg2 : String ) - > String { \ n < # code # > \ n \ } \ n } } <nl> + func foo ( arg1 : Int , arg2 : String ) - > String / / expected - note { { protocol requires function ' foo ( arg1 : arg2 : ) ' with type ' ( Int , String ) - > String ' ; do you want to add a stub ? } } { { 27 - 27 = \ n internal func foo ( arg1 : Int , arg2 : String ) - > String { \ n < # code # > \ n \ } \ n } } <nl> func bar ( ) throws - > String / / expected - note { { protocol requires function ' bar ( ) ' with type ' ( ) throws - > String ' ; do you want to add a stub ? } } { { 27 - 27 = \ n internal func bar ( ) throws - > String { \ n < # code # > \ n \ } \ n } } <nl> init ( arg : Int ) / / expected - note { { protocol requires initializer ' init ( arg : ) ' with type ' ( arg : Int ) ' ; do you want to add a stub ? } } { { 27 - 27 = \ n internal init ( arg : Int ) { \ n < # code # > \ n \ } \ n } } <nl> var baz : Int { get } / / expected - note { { protocol requires property ' baz ' with type ' Int ' ; do you want to add a stub ? } } { { 27 - 27 = \ n internal var baz : Int \ n } } <nl> class Adopter : Protocol1 { / / expected - error { { type ' Adopter ' does not conform to <nl> <nl> <nl> protocol Protocol2 { <nl> - func foo ( arg1 : Int , arg2 : String ) - > String / / expected - note { { protocol requires function ' foo ( arg1 : arg2 : ) ' with type ' ( arg1 : Int , arg2 : String ) - > String ' ; do you want to add a stub ? } } { { 32 - 32 = \ n internal func foo ( arg1 : Int , arg2 : String ) - > String { \ n < # code # > \ n \ } \ n } } <nl> + func foo ( arg1 : Int , arg2 : String ) - > String / / expected - note { { protocol requires function ' foo ( arg1 : arg2 : ) ' with type ' ( Int , String ) - > String ' ; do you want to add a stub ? } } { { 32 - 32 = \ n internal func foo ( arg1 : Int , arg2 : String ) - > String { \ n < # code # > \ n \ } \ n } } <nl> func bar ( ) throws - > String / / expected - note { { protocol requires function ' bar ( ) ' with type ' ( ) throws - > String ' ; do you want to add a stub ? } } { { 32 - 32 = \ n internal func bar ( ) throws - > String { \ n < # code # > \ n \ } \ n } } <nl> init ( arg : Int ) / / expected - note { { protocol requires initializer ' init ( arg : ) ' with type ' ( arg : Int ) ' ; do you want to add a stub ? } } { { 32 - 32 = \ n internal init ( arg : Int ) { \ n < # code # > \ n \ } \ n } } <nl> var baz : Int { get } / / expected - note { { protocol requires property ' baz ' with type ' Int ' ; do you want to add a stub ? } } { { 32 - 32 = \ n internal var baz : Int { \ n < # code # > \ n \ } \ n } } <nl> extension Adopter4 : ProtocolWithAssocType2 { / / expected - error { { type ' Adopter4 ' d <nl> <nl> protocol ProtocolWithSelfRequirement { <nl> func foo ( ) - > Self / / expected - note { { protocol requires function ' foo ( ) ' with type ' ( ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( ) - > Adopter5 { \ n < # code # > \ n \ } \ n } } <nl> - func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( lhs : Adopter5 , rhs : Adopter5 ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( lhs : Adopter5 , rhs : Adopter5 ) - > Adopter5 { \ n < # code # > \ n \ } \ n } } <nl> + func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( Adopter5 , Adopter5 ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( lhs : Adopter5 , rhs : Adopter5 ) - > Adopter5 { \ n < # code # > \ n \ } \ n } } <nl> } <nl> <nl> struct Adopter5 : ProtocolWithSelfRequirement { / / expected - error { { type ' Adopter5 ' does not conform to protocol ' ProtocolWithSelfRequirement ' } } <nl> struct Adopter5 : ProtocolWithSelfRequirement { / / expected - error { { type ' Adopter5 ' <nl> <nl> protocol ProtocolWithSelfRequirement2 { <nl> func foo ( ) - > Self / / expected - note { { protocol requires function ' foo ( ) ' with type ' ( ) - > Self ' ; do you want to add a stub ? } } { { 51 - 51 = \ n internal func foo ( ) - > Adopter6 { \ n < # code # > \ n \ } \ n } } <nl> - func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( lhs : Adopter6 , rhs : Adopter6 ) - > Self ' ; do you want to add a stub ? } } { { 51 - 51 = \ n internal func foo ( lhs : Adopter6 , rhs : Adopter6 ) - > Adopter6 { \ n < # code # > \ n \ } \ n } } <nl> + func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( Adopter6 , Adopter6 ) - > Self ' ; do you want to add a stub ? } } { { 51 - 51 = \ n internal func foo ( lhs : Adopter6 , rhs : Adopter6 ) - > Adopter6 { \ n < # code # > \ n \ } \ n } } <nl> } <nl> <nl> struct Adopter6 { } <nl> extension Adopter6 : ProtocolWithSelfRequirement2 { / / expected - error { { type ' Adopt <nl> <nl> protocol ProtocolWithSelfRequirement3 { <nl> func foo ( ) - > Self / / expected - note { { protocol requires function ' foo ( ) ' with type ' ( ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( ) - > Self { \ n < # code # > \ n \ } \ n } } <nl> - func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( lhs : Adopter7 , rhs : Adopter7 ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( lhs : Adopter7 , rhs : Adopter7 ) - > Self { \ n < # code # > \ n \ } \ n } } <nl> + func foo ( lhs : Self , rhs : Self ) - > Self / / expected - note { { protocol requires function ' foo ( lhs : rhs : ) ' with type ' ( Adopter7 , Adopter7 ) - > Self ' ; do you want to add a stub ? } } { { 47 - 47 = \ n internal func foo ( lhs : Adopter7 , rhs : Adopter7 ) - > Self { \ n < # code # > \ n \ } \ n } } <nl> } <nl> <nl> class Adopter7 : ProtocolWithSelfRequirement3 { / / expected - error { { type ' Adopter7 ' does not conform to protocol ' ProtocolWithSelfRequirement3 ' } } <nl> mmm a / test / decl / protocol / conforms / inherited . swift <nl> ppp b / test / decl / protocol / conforms / inherited . swift <nl> protocol P10 { <nl> <nl> / / Never inheritable : method with ' Self ' in curried position . <nl> protocol P11 { <nl> - func f11 ( ) - > ( x : Self ) - > Int <nl> + func f11 ( ) - > ( _ x : Self ) - > Int <nl> } <nl> <nl> / / Inheritable : parameter is a function returning ' Self ' . <nl> class A : P1 , P2 , P3 , P4 , P5 , P6 , P7 , P8 , P9 , P10 { <nl> func f10 ( _ arr : [ A ] ) { } / / expected - error { { protocol ' P10 ' requirement ' f10 ' cannot be satisfied by a non - final class ( ' A ' ) because it uses ' Self ' in a non - parameter , non - result type position } } <nl> <nl> / / P11 <nl> - func f11 ( ) - > ( x : A ) - > Int { return { x in 5 } } <nl> + func f11 ( ) - > ( _ x : A ) - > Int { return { x in 5 } } <nl> } <nl> <nl> / / P9 <nl> final class A9 : P1 , P2 , P3 , P4 , P5 , P6 , P7 , P8 , P9 , P10 { <nl> func f10 ( _ arr : [ A9 ] ) { } <nl> <nl> / / P11 <nl> - func f11 ( ) - > ( x : A9 ) - > Int { return { x in 5 } } <nl> + func f11 ( ) - > ( _ x : A9 ) - > Int { return { x in 5 } } <nl> } <nl> <nl> / / P9 <nl> mmm a / test / decl / protocol / conforms / near_miss_objc . swift <nl> ppp b / test / decl / protocol / conforms / near_miss_objc . swift <nl> class C6a : P6 { <nl> class C7a : P7 { <nl> @ objc func method ( foo : Double ) { } <nl> / / expected - warning @ - 1 { { instance method ' method ( foo : ) ' nearly matches optional requirement ' method ( foo : ) ' of protocol ' P7 ' } } <nl> - / / expected - note @ - 2 { { candidate has non - matching type ' ( foo : Double ) - > ( ) ' } } <nl> + / / expected - note @ - 2 { { candidate has non - matching type ' ( Double ) - > ( ) ' } } <nl> / / expected - note @ - 3 { { move ' method ( foo : ) ' to an extension to silence this warning } } <nl> / / expected - note @ - 4 { { make ' method ( foo : ) ' private to silence this warning } } <nl> } <nl> mmm a / test / decl / protocol / protocols . swift <nl> ppp b / test / decl / protocol / protocols . swift <nl> class NotPrintableC : CustomStringConvertible , Any { } / / expected - error { { type ' N <nl> enum NotPrintableO : Any , CustomStringConvertible { } / / expected - error { { type ' NotPrintableO ' does not conform to protocol ' CustomStringConvertible ' } } <nl> <nl> struct NotFormattedPrintable : FormattedPrintable { / / expected - error { { type ' NotFormattedPrintable ' does not conform to protocol ' CustomStringConvertible ' } } <nl> - func print ( format : TestFormat ) { } / / expected - note { { candidate has non - matching type ' ( format : TestFormat ) - > ( ) ' } } <nl> + func print ( format : TestFormat ) { } / / expected - note { { candidate has non - matching type ' ( TestFormat ) - > ( ) ' } } <nl> } <nl> <nl> / / Circular protocols <nl> struct HasNoDefaultArg : ProtoWithDefaultArg { <nl> / / Variadic function requirements <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> protocol IntMaxable { <nl> - func intmax ( first : Int , rest : Int . . . ) - > Int / / expected - note 2 { { protocol requires function ' intmax ( first : rest : ) ' with type ' ( first : Int , rest : Int . . . ) - > Int ' } } <nl> + func intmax ( first : Int , rest : Int . . . ) - > Int / / expected - note 2 { { protocol requires function ' intmax ( first : rest : ) ' with type ' ( Int , Int . . . ) - > Int ' } } <nl> } <nl> <nl> struct HasIntMax : IntMaxable { <nl> struct HasIntMax : IntMaxable { <nl> } <nl> <nl> struct NotIntMax1 : IntMaxable { / / expected - error { { type ' NotIntMax1 ' does not conform to protocol ' IntMaxable ' } } <nl> - func intmax ( first : Int , rest : [ Int ] ) - > Int { } / / expected - note { { candidate has non - matching type ' ( first : Int , rest : [ Int ] ) - > Int ' } } <nl> + func intmax ( first : Int , rest : [ Int ] ) - > Int { } / / expected - note { { candidate has non - matching type ' ( Int , [ Int ] ) - > Int ' } } <nl> } <nl> <nl> struct NotIntMax2 : IntMaxable { / / expected - error { { type ' NotIntMax2 ' does not conform to protocol ' IntMaxable ' } } <nl> - func intmax ( first : Int , rest : Int ) - > Int { } / / expected - note { { candidate has non - matching type ' ( first : Int , rest : Int ) - > Int ' } } <nl> + func intmax ( first : Int , rest : Int ) - > Int { } / / expected - note { { candidate has non - matching type ' ( Int , Int ) - > Int ' } } <nl> } <nl> <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> / / ' Self ' type <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> protocol IsEqualComparable { <nl> - func isEqual ( other : Self ) - > Bool / / expected - note { { protocol requires function ' isEqual ( other : ) ' with type ' ( other : WrongIsEqual ) - > Bool ' } } <nl> + func isEqual ( other : Self ) - > Bool / / expected - note { { protocol requires function ' isEqual ( other : ) ' with type ' ( WrongIsEqual ) - > Bool ' } } <nl> } <nl> <nl> struct HasIsEqual : IsEqualComparable { <nl> struct HasIsEqual : IsEqualComparable { <nl> } <nl> <nl> struct WrongIsEqual : IsEqualComparable { / / expected - error { { type ' WrongIsEqual ' does not conform to protocol ' IsEqualComparable ' } } <nl> - func isEqual ( other : Int ) - > Bool { } / / expected - note { { candidate has non - matching type ' ( other : Int ) - > Bool ' } } <nl> + func isEqual ( other : Int ) - > Bool { } / / expected - note { { candidate has non - matching type ' ( Int ) - > Bool ' } } <nl> } <nl> <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> mmm a / test / decl / protocol / req / associated_type_inference . swift <nl> ppp b / test / decl / protocol / req / associated_type_inference . swift <nl> public protocol Thenable { <nl> } <nl> <nl> public class CorePromise < T > : Thenable { / / expected - error { { type ' CorePromise < T > ' does not conform to protocol ' Thenable ' } } <nl> - public func then ( _ success : ( t : T , _ : CorePromise < T > ) - > T ) - > Self { <nl> + public func then ( _ success : @ escaping ( _ t : T , _ : CorePromise < T > ) - > T ) - > Self { <nl> return self . then ( ) { ( t : T ) - > T in <nl> return success ( t : t , self ) <nl> } <nl> mmm a / test / decl / protocol / req / func . swift <nl> ppp b / test / decl / protocol / req / func . swift <nl> struct X2x : P2 { / / expected - error { { type ' X2x ' does not conform to protocol ' P2 <nl> / / Mismatch in parameter types <nl> struct X2y : P2 { / / expected - error { { type ' X2y ' does not conform to protocol ' P2 ' } } <nl> typealias Assoc = X1a <nl> - func f1 ( x : X1b ) { } / / expected - note { { candidate has non - matching type ' ( x : X1b ) - > ( ) ' } } <nl> + func f1 ( x : X1b ) { } / / expected - note { { candidate has non - matching type ' ( X1b ) - > ( ) ' } } <nl> } <nl> <nl> / / Ambiguous deduction <nl> extension Int : Crankable { } <nl> / / Invalid witnesses . <nl> protocol P6 { <nl> func foo ( _ x : Int ) <nl> - func bar ( x : Int ) / / expected - note { { protocol requires function ' bar ( x : ) ' with type ' ( x : Int ) - > ( ) ' } } <nl> + func bar ( x : Int ) / / expected - note { { protocol requires function ' bar ( x : ) ' with type ' ( Int ) - > ( ) ' } } <nl> } <nl> struct X6 : P6 { / / expected - error { { type ' X6 ' does not conform to protocol ' P6 ' } } <nl> func foo ( _ x : Missing ) { } / / expected - error { { use of undeclared type ' Missing ' } } <nl> mmm a / test / expr / closure / basic . swift <nl> ppp b / test / expr / closure / basic . swift <nl> func attrs ( ) { <nl> <nl> / / Closures with argument and parameter names . <nl> func argAndParamNames ( ) - > Int { <nl> - let _ : ( x : Int , y : Int ) - > Int = { ( a x , b y ) in x + y } / / expected - error 2 { { closure cannot have keyword arguments } } <nl> - let f1 : ( x : Int , y : Int ) - > Int = { ( x , y ) in x + y } <nl> - _ = f1 ( x : 1 , y : 2 ) <nl> - return f1 ( x : 1 , y : 2 ) <nl> + let _ : ( _ x : Int , _ y : Int ) - > Int = { ( a x , b y ) in x + y } / / expected - error 2 { { closure cannot have keyword arguments } } <nl> + let f1 : ( _ x : Int , _ y : Int ) - > Int = { ( x , y ) in x + y } <nl> + _ = f1 ( 1 , 2 ) <nl> + return f1 ( 1 , 2 ) <nl> } <nl> mmm a / test / expr / closure / closures . swift <nl> ppp b / test / expr / closure / closures . swift <nl> <nl> / / RUN : % target - parse - verify - swift <nl> <nl> - var func6 : ( fn : ( Int , Int ) - > Int ) - > ( ) <nl> + var func6 : ( _ fn : ( Int , Int ) - > Int ) - > ( ) <nl> var func6a : ( ( Int , Int ) - > Int ) - > ( ) <nl> var func6b : ( Int , ( Int , Int ) - > Int ) - > ( ) <nl> func func6c ( _ f : ( Int , Int ) - > Int , _ n : Int = 0 ) { } / / expected - warning { { prior to parameters } } <nl> func funcdecl5 ( _ a : Int , _ y : Int ) { <nl> funcdecl4 ( { funcdecl3 ( ) } , 12 ) / / expected - error { { contextual type for closure argument list expects 1 argument , which cannot be implicitly ignored } } { { 14 - 14 = _ in } } <nl> <nl> <nl> - func6 ( fn : { $ 0 + $ 1 } ) / / Closure with two named anonymous arguments <nl> - func6 ( fn : { ( $ 0 ) + $ 1 } ) / / Closure with sequence expr inferred type <nl> - func6 ( fn : { ( $ 0 ) + $ 0 } ) / / / / expected - error { { binary operator ' + ' cannot be applied to two ' ( Int , Int ) ' operands } } expected - note { { expected an argument list of type ' ( Int , Int ) ' } } <nl> + func6 ( { $ 0 + $ 1 } ) / / Closure with two named anonymous arguments <nl> + func6 ( { ( $ 0 ) + $ 1 } ) / / Closure with sequence expr inferred type <nl> + func6 ( { ( $ 0 ) + $ 0 } ) / / / / expected - error { { binary operator ' + ' cannot be applied to two ' ( Int , Int ) ' operands } } expected - note { { expected an argument list of type ' ( Int , Int ) ' } } <nl> <nl> <nl> var testfunc : ( ( ) , Int ) - > Int <nl> func funcdecl5 ( _ a : Int , _ y : Int ) { <nl> <nl> <nl> / / rdar : / / 12017658 - Infer some argument types from func6 . <nl> - func6 ( fn : { a , b - > Int in a + b } ) <nl> + func6 ( { a , b - > Int in a + b } ) <nl> / / Return type inference . <nl> - func6 ( fn : { a , b in a + b } ) <nl> + func6 ( { a , b in a + b } ) <nl> <nl> / / Infer incompatible type . <nl> - func6 ( fn : { a , b - > Float in 4 . 0 } ) / / expected - error { { declared closure result ' Float ' is incompatible with contextual type ' Int ' } } { { 21 - 26 = Int } } / / Pattern doesn ' t need to name arguments . <nl> - func6 ( fn : { _ , _ in 4 } ) <nl> + func6 ( { a , b - > Float in 4 . 0 } ) / / expected - error { { declared closure result ' Float ' is incompatible with contextual type ' Int ' } } { { 17 - 22 = Int } } / / Pattern doesn ' t need to name arguments . <nl> + func6 ( { _ , _ in 4 } ) <nl> <nl> - func6 ( fn : { a , b in 4 . 0 } ) / / expected - error { { cannot convert value of type ' Double ' to closure result type ' Int ' } } <nl> + func6 ( { a , b in 4 . 0 } ) / / expected - error { { cannot convert value of type ' Double ' to closure result type ' Int ' } } <nl> <nl> / / TODO : This diagnostic can be improved : rdar : / / 22128205 <nl> - func6 ( fn : { ( a : Float , b ) in 4 } ) / / expected - error { { cannot convert value of type ' ( Float , _ ) - > Int ' to expected argument type ' ( Int , Int ) - > Int ' } } <nl> + func6 ( { ( a : Float , b ) in 4 } ) / / expected - error { { cannot convert value of type ' ( Float , _ ) - > Int ' to expected argument type ' ( Int , Int ) - > Int ' } } <nl> <nl> <nl> <nl> func anonymousClosureArgsInClosureWithArgs ( ) { <nl> var a3 = { ( z : Int ) in $ 0 } / / expected - error { { anonymous closure arguments cannot be used inside a closure that has explicit arguments } } <nl> } <nl> <nl> - func doStuff ( _ fn : ( ) - > Int ) { } <nl> - func doVoidStuff ( _ fn : ( ) - > ( ) ) { } <nl> + func doStuff ( _ fn : @ escaping ( ) - > Int ) { } <nl> + func doVoidStuff ( _ fn : @ escaping ( ) - > ( ) ) { } <nl> <nl> / / < rdar : / / problem / 16193162 > Require specifying self for locations in code where strong reference cycles are likely <nl> class ExplicitSelfRequiredTest { <nl> mmm a / test / expr / closure / default_args . swift <nl> ppp b / test / expr / closure / default_args . swift <nl> <nl> func simple_default_args ( ) { <nl> / / < rdar : / / problem / 22753605 > QoI : bad diagnostic when closure has default argument <nl> let _ : ( Int ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { default arguments are not allowed in closures } } { { 36 - 39 = } } <nl> - let _ : ( ) - > Int = { ( x : Int = 1 ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } expected - error { { default arguments are not allowed in closures } } { { 33 - 36 = } } <nl> - let _ : ( ) - > Int = { ( x : Int ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } <nl> + let _ : ( ) - > Int = { ( _ x : Int = 1 ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } expected - error { { default arguments are not allowed in closures } } { { 35 - 38 = } } <nl> + let _ : ( ) - > Int = { ( _ x : Int ) in x + 1 } / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } <nl> } <nl> <nl> func func_default_args ( ) { <nl> func has_default_args ( x : Int = 1 ) - > Int { return x + 1 } <nl> var _ : ( Int ) - > Int = has_default_args / / okay <nl> - var _ : ( ) - > Int = has_default_args / / expected - error { { cannot convert value of type ' ( x : Int ) - > Int ' to specified type ' ( ) - > Int ' } } <nl> + var _ : ( ) - > Int = has_default_args / / expected - error { { cannot convert value of type ' ( Int ) - > Int ' to specified type ' ( ) - > Int ' } } <nl> } <nl> <nl> mmm a / test / expr / closure / trailing . swift <nl> ppp b / test / expr / closure / trailing . swift <nl> func labeledArgumentAndTrailingClosure ( ) { <nl> <nl> / / rdar : / / problem / 17965209 <nl> func rdar17965209_f < T > ( _ t : T ) { } <nl> - func rdar17965209 ( x : Int = 0 , _ handler : ( y : Int ) - > ( ) ) { } <nl> + func rdar17965209 ( x : Int = 0 , _ handler : ( _ y : Int ) - > ( ) ) { } <nl> func rdar17965209_test ( ) { <nl> rdar17965209 ( ) { <nl> ( y ) - > ( ) in <nl> mmm a / test / expr / expressions . swift <nl> ppp b / test / expr / expressions . swift <nl> var func1 : ( ) - > ( ) / / No input , no output . <nl> var func2 : ( Int ) - > Int <nl> var func3 : ( ) - > ( ) - > ( ) / / Takes nothing , returns a fn . <nl> var func3a : ( ) - > ( ( ) - > ( ) ) / / same as func3 <nl> - var func6 : ( fn : ( Int , Int ) - > Int ) - > ( ) / / Takes a fn , returns nothing . <nl> + var func6 : ( _ fn : ( Int , Int ) - > Int ) - > ( ) / / Takes a fn , returns nothing . <nl> var func7 : ( ) - > ( Int , Int , Int ) / / Takes nothing , returns tuple . <nl> <nl> / / Top - Level expressions . These are ' main ' content . <nl> func test3 ( _ arg1 : Int , arg2 : Int ) - > Int { <nl> return 4 <nl> } <nl> <nl> - func test4 ( ) - > ( ( arg1 : Int , arg2 : Int ) - > Int ) { <nl> + func test4 ( ) - > ( ( _ arg1 : Int , _ arg2 : Int ) - > Int ) { <nl> return test3 <nl> } <nl> <nl> func w4 ( _ : Int ) - > Int { return 4 } <nl> func b1 ( ) { } <nl> <nl> func foo1 ( _ a : Int , b : Int ) - > Int { } <nl> - func foo2 ( _ a : Int ) - > ( b : Int ) - > Int { } <nl> + func foo2 ( _ a : Int ) - > ( _ b : Int ) - > Int { } <nl> func foo3 ( _ a : Int = 2 , b : Int = 3 ) { } <nl> <nl> prefix operator ^ ^ <nl> mmm a / test / expr / postfix / call / default_args . swift <nl> ppp b / test / expr / postfix / call / default_args . swift <nl> <nl> <nl> func foo ( x : Int = 0 ) { } <nl> let bar = foo <nl> - bar ( ) / / expected - error { { missing argument for parameter ' x ' } } <nl> + bar ( ) / / expected - error { { missing argument for parameter # 1 } } <nl> <nl> func foo2 ( _ x : Int = 0 ) { } <nl> let baz = foo2 <nl> mmm a / test / expr / primary / unqualified_name . swift <nl> ppp b / test / expr / primary / unqualified_name . swift <nl> class C0 { <nl> init ( x : Int , y : Int , z : Int ) { } <nl> <nl> convenience init ( all : Int ) { <nl> - self . init ( x : y : z : ) ( x : all , y : all , z : all ) <nl> + self . init ( x : y : z : ) ( all , all , all ) <nl> } <nl> <nl> func f0 ( _ x : Int , y : Int , z : Int ) { } <nl> class C0 { <nl> <nl> class C1 : C0 { <nl> init ( all : Int ) { <nl> - super . init ( x : y : z : ) ( x : all , y : all , z : all ) <nl> + super . init ( x : y : z : ) ( all , all , all ) <nl> } <nl> <nl> func testC0 ( ) { <nl> mmm a / tools / swift - ide - test / swift - ide - test . cpp <nl> ppp b / tools / swift - ide - test / swift - ide - test . cpp <nl> static llvm : : cl : : opt < bool > <nl> NoEmptyLineBetweenMembers ( " no - empty - line - between - members " , <nl> llvm : : cl : : desc ( " Print no empty line between members . " ) , <nl> llvm : : cl : : init ( false ) ) ; <nl> + <nl> + static llvm : : cl : : opt < bool > DebugConstraintSolver ( " debug - constraints " , <nl> + llvm : : cl : : desc ( " Enable verbose debugging from the constraint solver . " ) ) ; <nl> } / / namespace options <nl> <nl> static std : : unique_ptr < llvm : : MemoryBuffer > <nl> int main ( int argc , char * argv [ ] ) { <nl> InitInvok . getLangOptions ( ) . EnableObjCAttrRequiresFoundation = <nl> ! options : : DisableObjCAttrRequiresFoundationModule ; <nl> <nl> + InitInvok . getLangOptions ( ) . DebugConstraintSolver = <nl> + options : : DebugConstraintSolver ; <nl> + <nl> for ( auto ConfigName : options : : BuildConfigs ) <nl> InitInvok . getLangOptions ( ) . addCustomConditionalCompilationFlag ( ConfigName ) ; <nl> <nl> mmm a / utils / build - presets . ini <nl> ppp b / utils / build - presets . ini <nl> reconfigure <nl> compiler - vendor = apple <nl> swift - install - components = swift - remote - mirror <nl> install - destdir = % ( install_destdir ) s <nl> + darwin - install - extract - symbols = 1 <nl> skip - build - cmark <nl> skip - build - llvm <nl> skip - build - llbuild <nl> build - swift - dynamic - stdlib = 0 <nl> build - swift - static - sdk - overlay = 0 <nl> build - swift - dynamic - sdk - overlay = 0 <nl> swift - include - tests = 0 <nl> + llvm - include - tests = 0 <nl> <nl> [ preset : remote_mirror_ios_customization ] <nl> <nl> mmm a / utils / build - script - impl <nl> ppp b / utils / build - script - impl <nl> for host in " $ { ALL_HOSTS [ @ ] } " ; do <nl> - DCLANG_TOOL_CLANG_FORMAT_BUILD = NO <nl> ) <nl> fi <nl> + <nl> + if [ [ $ ( true_false " $ { LLVM_INCLUDE_TESTS } " ) = = " FALSE " ] ] ; then <nl> + cmake_options + = ( <nl> + - DLLVM_INCLUDE_TESTS = NO <nl> + - DCLANG_INCLUDE_TESTS = NO <nl> + ) <nl> + fi <nl> <nl> if [ [ $ ( is_cross_tools_host $ { host } ) ] ] ; then <nl> cmake_options = ( <nl> mmm a / validation - test / compiler_crashers_fixed / 00023 - getcallerdefaultarg . swift <nl> ppp b / validation - test / compiler_crashers_fixed / 00023 - getcallerdefaultarg . swift <nl> <nl> func a ( b : Int = 0 ) { <nl> } <nl> let c = a <nl> - c ( ) / / expected - error { { missing argument for parameter ' b ' in call } } <nl> + c ( ) / / expected - error { { missing argument for parameter # 1 in call } } <nl> new file mode 100644 <nl> index 000000000000 . . 0eb56d678a14 <nl> mmm / dev / null <nl> ppp b / validation - test / execution / Inputs / dsohandle - first . swift <nl> <nl> + public func getFirstDSOHandle ( ) - > UnsafeRawPointer { <nl> + return # dsohandle <nl> + } <nl> new file mode 100644 <nl> index 000000000000 . . fd01de74f535 <nl> mmm / dev / null <nl> ppp b / validation - test / execution / Inputs / dsohandle - second . swift <nl> <nl> + public func getSecondDSOHandle ( ) - > UnsafeRawPointer { <nl> + return # dsohandle <nl> + } <nl> new file mode 100644 <nl> index 000000000000 . . eef1a5d1d641 <nl> mmm / dev / null <nl> ppp b / validation - test / execution / dsohandle - multi - module . swift <nl> <nl> + / / RUN : rm - rf % t & & mkdir % t <nl> + <nl> + / / RUN : ( cd % t & & % target - build - swift % S / Inputs / dsohandle - first . swift - emit - library - emit - module - module - name first ) <nl> + / / RUN : ( cd % t & & % target - build - swift % S / Inputs / dsohandle - second . swift - emit - library - emit - module - module - name second ) <nl> + / / RUN : % target - build - swift - I % t - L % t - lfirst - lsecond % s - o % t / main <nl> + / / RUN : env LD_LIBRARY_PATH = % t DYLD_LIBRARY_PATH = % t % target - run % t / main <nl> + / / REQUIRES : executable_test <nl> + <nl> + import first <nl> + import second <nl> + <nl> + import StdlibUnittest <nl> + <nl> + let DSOHandleTests = TestSuite ( " DSOHandle " ) <nl> + <nl> + DSOHandleTests . test ( " Unique handles for different images " ) { <nl> + let firstHandle = getFirstDSOHandle ( ) <nl> + let secondHandle = getSecondDSOHandle ( ) <nl> + expectNotEqual ( firstHandle , secondHandle ) <nl> + } <nl> + <nl> + runAllTests ( ) <nl> mmm a / validation - test / stdlib / ArrayNew . swift . gyb <nl> ppp b / validation - test / stdlib / ArrayNew . swift . gyb <nl> var ArrayTestSuite = TestSuite ( " Array " ) <nl> / / FIXME : incomplete . <nl> / / = = = mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm - = = = / / <nl> <nl> - func withInoutInt ( _ x : inout Int , body : ( x : inout Int ) - > Void ) { <nl> - body ( x : & x ) <nl> + func withInoutInt ( _ x : inout Int , body : ( _ x : inout Int ) - > Void ) { <nl> + body ( & x ) <nl> } <nl> <nl> - func withInoutT < T > ( _ x : inout T , body : ( x : inout T ) - > Void ) { <nl> - body ( x : & x ) <nl> + func withInoutT < T > ( _ x : inout T , body : ( _ x : inout T ) - > Void ) { <nl> + body ( & x ) <nl> } <nl> <nl> % for element_type in [ ' TestValueTy ' , ' TestBridgedValueTy ' ] : <nl> mmm a / validation - test / stdlib / Arrays . swift . gyb <nl> ppp b / validation - test / stdlib / Arrays . swift . gyb <nl> var ArrayTestSuite = TestSuite ( " Array " ) <nl> ArrayTestSuite . test ( " sizeof " ) { <nl> var a = [ 10 , 20 , 30 ] <nl> # if arch ( i386 ) | | arch ( arm ) <nl> - expectEqual ( 4 , sizeofValue ( a ) ) <nl> + expectEqual ( 4 , MemoryLayout . _ofInstance ( a ) . size ) <nl> # else <nl> - expectEqual ( 8 , sizeofValue ( a ) ) <nl> + expectEqual ( 8 , MemoryLayout . _ofInstance ( a ) . size ) <nl> # endif <nl> } <nl> <nl> mmm a / validation - test / stdlib / CoreAudio . swift <nl> ppp b / validation - test / stdlib / CoreAudio . swift <nl> CoreAudioTestSuite . test ( <nl> } <nl> <nl> CoreAudioTestSuite . test ( " AudioBufferList . sizeInBytes ( maximumBuffers : Int ) " ) { <nl> - expectEqual ( ablHeaderSize + strideof ( AudioBuffer . self ) , <nl> + expectEqual ( ablHeaderSize + MemoryLayout < AudioBuffer > . stride , <nl> AudioBufferList . sizeInBytes ( maximumBuffers : 1 ) ) <nl> - expectEqual ( ablHeaderSize + 16 * strideof ( AudioBuffer . self ) , <nl> + expectEqual ( ablHeaderSize + 16 * MemoryLayout < AudioBuffer > . stride , <nl> AudioBufferList . sizeInBytes ( maximumBuffers : 16 ) ) <nl> } <nl> <nl> new file mode 100644 <nl> index 000000000000 . . e6b43a878a88 <nl> mmm / dev / null <nl> ppp b / validation - test / stdlib / CryptoTokenKitTests . swift <nl> <nl> + / / RUN : % target - parse - verify - swift <nl> + <nl> + / / REQUIRES : objc_interop <nl> + / / UNSUPPORTED : OS = watchos <nl> + / / UNSUPPORTED : OS = tvos <nl> + / / UNSUPPORTED : OS = ios <nl> + <nl> + import CryptoTokenKit <nl> + import Security <nl> + <nl> + if # available ( OSX 10 . 12 , * ) { <nl> + struct TKSmartCardTest { <nl> + func t1 ( manager : TKSmartCardSlotManager , name : String ) { <nl> + manager . getSlot ( withName : name ) { ( slot : TKSmartCardSlot ? ) in <nl> + let _ : TKSmartCardSlot . State = slot ! . state <nl> + } <nl> + } <nl> + <nl> + let p1 = TKSmartCardPINFormat . Charset . numeric <nl> + let p2 = TKSmartCardPINFormat . Encoding . ascii <nl> + let p3 = TKSmartCardPINFormat . Encoding . bcd <nl> + let p4 = TKSmartCardPINFormat . Justification . left <nl> + let p5 = TKSmartCardUserInteractionForPINOperation . Completion . key <nl> + let p6 = TKSmartCardUserInteractionForSecurePINChange . Confirmation . current <nl> + let p7 = TKSmartCardProtocol . t0 <nl> + let p8 = TKSmartCardProtocol . t1 <nl> + let p9 = TKSmartCardProtocol . t15 <nl> + let p10 = TKSmartCardATR . InterfaceGroup ( ) <nl> + <nl> + func t2 ( card : TKSmartCard ) throws { <nl> + card . isSensitive = card . isValid <nl> + card . transmit ( Data ( ) ) { ( response : Data ? , error : Error ? ) in <nl> + } <nl> + <nl> + card . userInteractionForSecurePINVerification ( TKSmartCardPINFormat ( ) , <nl> + apdu : Data ( ) , pinByteOffset : 0 ) <nl> + card . userInteractionForSecurePINChange ( TKSmartCardPINFormat ( ) , <nl> + apdu : Data ( ) , currentPINByteOffset : 0 , newPINByteOffset : 0 ) <nl> + <nl> + card . send ( ins : 0xa4 , p1 : 0x04 , p2 : 0x00 , data : Data ( ) , le : 0 ) { <nl> + ( response : Data ? , sw : UInt16 , error : Error ? ) in <nl> + } <nl> + <nl> + card . send ( ins : 0xa4 , p1 : 0x04 , p2 : 0x00 , le : 0 ) { <nl> + ( response : Data ? , sw : UInt16 , error : Error ? ) in <nl> + } <nl> + <nl> + card . send ( ins : 0xa4 , p1 : 0x04 , p2 : 0x00 , data : Data ( ) ) { <nl> + ( response : Data ? , sw : UInt16 , error : Error ? ) in <nl> + } <nl> + <nl> + card . send ( ins : 0xa4 , p1 : 0x04 , p2 : 0x00 ) { <nl> + ( response : Data ? , sw : UInt16 , error : Error ? ) in <nl> + } <nl> + <nl> + let _ : Int = try card . withSession ( ) { <nl> + let ( _ , _ ) : ( UInt16 , Data ) = try card . send ( ins : 0xa4 , p1 : 0x04 , <nl> + p2 : 0x00 , data : Data ( ) , le : 0 ) <nl> + let ( _ , _ ) : ( UInt16 , Data ) = try card . send ( ins : 0xa4 , p1 : 0x04 , <nl> + p2 : 0x00 , le : 0 ) <nl> + let ( _ , _ ) : ( UInt16 , Data ) = try card . send ( ins : 0xa4 , p1 : 0x04 , <nl> + p2 : 0x00 , data : Data ( ) ) <nl> + let ( _ , _ ) : ( UInt16 , Data ) = try card . send ( ins : 0xa4 , p1 : 0x04 , <nl> + p2 : 0x00 ) <nl> + return 1 <nl> + } <nl> + } <nl> + } <nl> + <nl> + struct TKTokenTest { <nl> + func f1 ( session : TKTokenSession , sessionDelegate : TKTokenSessionDelegate , <nl> + algorithm : TKTokenSessionDelegate . KeyAlgorithm , <nl> + parameters : TKTokenSessionDelegate . KeyExchangeParameters ) throws { <nl> + let _ : Bool = sessionDelegate . tokenSession ! ( session , supports : . none , <nl> + keyObjectID : " " , algorithm : algorithm ) <nl> + let _ : Data = try sessionDelegate . tokenSession ! ( session , sign : Data ( ) , <nl> + keyObjectID : " " , algorithm : algorithm ) <nl> + let _ : Data = try sessionDelegate . tokenSession ! ( session , decrypt : Data ( ) , <nl> + keyObjectID : " " , algorithm : algorithm ) <nl> + let _ : Data = try sessionDelegate . tokenSession ! ( session , <nl> + performKeyExchange : Data ( ) , keyObjectID : " " , algorithm : algorithm , <nl> + parameters : parameters ) <nl> + let _ : Bool = algorithm . isAlgorithm ( . rsaSignatureDigestPKCS1v15SHA1 ) <nl> + let _ : Bool = algorithm . supportsAlgorithm ( . rsaSignatureDigestPKCS1v15SHA1 ) <nl> + } <nl> + <nl> + func f2 ( token : TKToken , delegate : TKTokenDelegate ) throws { <nl> + let _ : TKTokenSession = try delegate . createSession ( token ) <nl> + } <nl> + } <nl> + } <nl> mmm a / validation - test / stdlib / Dictionary . swift <nl> ppp b / validation - test / stdlib / Dictionary . swift <nl> <nl> / / RUN : % line - directive % t / main . swift - - % target - run % t / Dictionary <nl> / / REQUIRES : executable_test <nl> <nl> - / / rdar : / / 27547957 <nl> - / / XFAIL : swift_test_mode_optimize <nl> - <nl> # if os ( OSX ) | | os ( iOS ) | | os ( tvOS ) | | os ( watchOS ) <nl> import Darwin <nl> # else <nl> DictionaryTestSuite . test ( " AssociatedTypes " ) { <nl> DictionaryTestSuite . test ( " sizeof " ) { <nl> var dict = [ 1 : " meow " , 2 : " meow " ] <nl> # if arch ( i386 ) | | arch ( arm ) <nl> - expectEqual ( 4 , sizeofValue ( dict ) ) <nl> + expectEqual ( 4 , MemoryLayout . _ofInstance ( dict ) . size ) <nl> # else <nl> - expectEqual ( 8 , sizeofValue ( dict ) ) <nl> + expectEqual ( 8 , MemoryLayout . _ofInstance ( dict ) . size ) <nl> # endif <nl> } <nl> <nl> mmm a / validation - test / stdlib / FixedPointArithmeticTraps . swift . gyb <nl> ppp b / validation - test / stdlib / FixedPointArithmeticTraps . swift . gyb <nl> FixedPointArithmeticTraps . test ( " $ { description } / $ { IntTy } / TypeSize " ) { <nl> a = get $ { IntTy } ( a $ { operation } get $ { IntTy } ( 0 ) ) <nl> a = get $ { IntTy } ( a $ { operation } get $ { IntTy } ( 1 ) ) <nl> <nl> - let shiftAmount = $ { IntTy } ( sizeof ( $ { IntTy } . self ) * 8 ) <nl> + let shiftAmount = $ { IntTy } ( MemoryLayout < $ { IntTy } > . size * 8 ) <nl> <nl> / / Overflow in $ { description } . <nl> expectCrashLater ( ) <nl> FixedPointArithmeticTraps . test ( " $ { description } / $ { IntTy } / TypeSizePlusOne " ) { <nl> <nl> a = get $ { IntTy } ( a $ { operation } get $ { IntTy } ( 0 ) ) <nl> <nl> - let shiftAmount = $ { IntTy } ( sizeof ( $ { IntTy } . self ) * 8 + 1 ) <nl> + let shiftAmount = $ { IntTy } ( MemoryLayout < $ { IntTy } > . size * 8 + 1 ) <nl> <nl> / / Overflow in $ { description } . <nl> expectCrashLater ( ) <nl> mmm a / validation - test / stdlib / HashingAvalanche . swift <nl> ppp b / validation - test / stdlib / HashingAvalanche . swift <nl> import StdlibUnittest <nl> <nl> var HashingTestSuite = TestSuite ( " Hashing " ) <nl> <nl> - func avalancheTest ( _ bits : Int , _ hashUnderTest : ( UInt64 ) - > UInt64 , _ pValue : Double ) { <nl> + func avalancheTest ( <nl> + _ bits : Int , <nl> + _ hashUnderTest : @ escaping ( UInt64 ) - > UInt64 , <nl> + _ pValue : Double <nl> + ) { <nl> let testsInBatch = 100000 <nl> let testData = randArray64 ( testsInBatch ) <nl> let testDataHashed = Array ( testData . lazy . map { hashUnderTest ( $ 0 ) } ) <nl> mmm a / validation - test / stdlib / NSNumberBridging . swift . gyb <nl> ppp b / validation - test / stdlib / NSNumberBridging . swift . gyb <nl> extension $ { Self } { <nl> func toNSNumberByteArray ( ) - > [ UInt8 ] { <nl> var v = self <nl> var result : [ UInt8 ] = [ ] <nl> - for _ in 0 . . < sizeof ( $ { Self } . self ) { <nl> + for _ in 0 . . < MemoryLayout < $ { Self } > . size { <nl> result . append ( UInt8 ( v & 0xff ) ) <nl> v = v > > 8 <nl> } <nl> extension $ { Self } { <nl> func toNSNumberByteArray ( ) - > [ UInt8 ] { <nl> var v = self . bitPattern <nl> var result : [ UInt8 ] = [ ] <nl> - for _ in 0 . . < sizeof ( v . dynamicType ) { <nl> + for _ in 0 . . < MemoryLayout . _ofInstance ( v ) . size { <nl> result . append ( UInt8 ( v & 0xff ) ) <nl> v = v > > 8 <nl> } <nl> mmm a / validation - test / stdlib / NewArray . swift . gyb <nl> ppp b / validation - test / stdlib / NewArray . swift . gyb <nl> testCocoa ( ) <nl> # endif / / _runtime ( _ObjC ) <nl> <nl> extension ArraySlice { <nl> - mutating func qsort ( _ compare : ( Element , Element ) - > Bool ) { <nl> + mutating func qsort ( _ compare : @ escaping ( Element , Element ) - > Bool ) { <nl> _introSort ( & self , subRange : Range ( self . indices ) , by : compare ) <nl> } <nl> } <nl> let testWidth = 11 <nl> % arrayTypes = [ ' ContiguousArray ' , ' Array ' , ' ArraySlice ' ] <nl> % for A in arrayTypes : <nl> <nl> - func testReplace ( _ make : ( ) - > $ { A } < LifetimeTracked > ) { <nl> + func testReplace ( _ make : @ escaping ( ) - > $ { A } < LifetimeTracked > ) { <nl> <nl> checkRangeReplaceable ( <nl> make , { LifetimeTracked ( 100 ) . . < LifetimeTracked ( 100 + $ 0 ) } ) <nl> } <nl> <nl> func testReplace $ { A } ( <nl> - makeOne : ( ) - > $ { A } < LifetimeTracked > = { <nl> + makeOne : @ escaping ( ) - > $ { A } < LifetimeTracked > = { <nl> var x = $ { A } < LifetimeTracked > ( ) <nl> / / make sure some - but not all - replacements will have to grow the buffer <nl> x . reserveCapacity ( testWidth * 3 / 2 ) <nl> mmm a / validation - test / stdlib / OpenCLSDKOverlay . swift <nl> ppp b / validation - test / stdlib / OpenCLSDKOverlay . swift <nl> tests . test ( " clSetKernelArgsListAPPLE " ) { <nl> <nl> / / Create the input and output arrays in device memory for our calculation <nl> / / <nl> - guard var input = clCreateBuffer ( context , cl_mem_flags ( CL_MEM_READ_ONLY ) , sizeof ( Float . self ) * count , nil , nil ) , <nl> - var output = clCreateBuffer ( context , cl_mem_flags ( CL_MEM_WRITE_ONLY ) , sizeof ( Float . self ) * count , nil , nil ) else { <nl> + guard var input = clCreateBuffer ( context , cl_mem_flags ( CL_MEM_READ_ONLY ) , MemoryLayout < Float > . size * count , nil , nil ) , <nl> + var output = clCreateBuffer ( context , cl_mem_flags ( CL_MEM_WRITE_ONLY ) , MemoryLayout < Float > . size * count , nil , nil ) else { <nl> print ( " Error : Failed to allocate device memory ! " ) <nl> exit ( 1 ) <nl> } <nl> <nl> / / Write our data set into the input array in device memory <nl> / / <nl> - err = clEnqueueWriteBuffer ( commands , input , cl_bool ( CL_TRUE ) , 0 , sizeof ( Float . self ) * count , data , 0 , nil , nil ) <nl> + err = clEnqueueWriteBuffer ( commands , input , cl_bool ( CL_TRUE ) , 0 , MemoryLayout < Float > . size * count , data , 0 , nil , nil ) <nl> if ( err ! = CL_SUCCESS ) <nl> { <nl> print ( " Error : Failed to write to source array ! " ) <nl> tests . test ( " clSetKernelArgsListAPPLE " ) { <nl> countPtr in <nl> clSetKernelArgsListAPPLE ( <nl> kernel ! , 3 , <nl> - 0 , sizeof ( cl_mem . self ) , inputPtr , <nl> - 1 , sizeof ( cl_mem . self ) , outputPtr , <nl> - 2 , sizeofValue ( count ) , countPtr ) <nl> + 0 , MemoryLayout < cl_mem > . size , inputPtr , <nl> + 1 , MemoryLayout < cl_mem > . size , outputPtr , <nl> + 2 , MemoryLayout . _ofInstance ( count ) . size , countPtr ) <nl> } <nl> } <nl> } <nl> tests . test ( " clSetKernelArgsListAPPLE " ) { <nl> <nl> / / Get the maximum work group size for executing the kernel on the device <nl> / / <nl> - err = clGetKernelWorkGroupInfo ( kernel , device_id , cl_kernel_work_group_info ( CL_KERNEL_WORK_GROUP_SIZE ) , sizeofValue ( local ) , & local , nil ) <nl> + err = clGetKernelWorkGroupInfo ( kernel , device_id , cl_kernel_work_group_info ( CL_KERNEL_WORK_GROUP_SIZE ) , MemoryLayout . _ofInstance ( local ) . size , & local , nil ) <nl> if ( err ! = CL_SUCCESS ) <nl> { <nl> print ( " Error : Failed to retrieve kernel work group info ! \ ( err ) " ) <nl> tests . test ( " clSetKernelArgsListAPPLE " ) { <nl> <nl> / / Read back the results from the device to verify the output <nl> / / <nl> - err = clEnqueueReadBuffer ( commands , output , cl_bool ( CL_TRUE ) , 0 , sizeof ( Float . self ) * count , & results , cl_uint ( 0 ) , nil , nil ) <nl> + err = clEnqueueReadBuffer ( commands , output , cl_bool ( CL_TRUE ) , 0 , MemoryLayout < Float > . size * count , & results , cl_uint ( 0 ) , nil , nil ) <nl> if ( err ! = CL_SUCCESS ) <nl> { <nl> print ( " Error : Failed to read output array ! \ ( err ) " ) <nl> mmm a / validation - test / stdlib / Prototypes / PersistentVector . swift . gyb <nl> ppp b / validation - test / stdlib / Prototypes / PersistentVector . swift . gyb <nl> pointer to a child node . <nl> Define ` ChildNodeOrKey ` to be a type that is an unsafe union of ` Key ` and <nl> ` UnsafePointer < Node > ` : <nl> <nl> - sizeof ( ChildNodeOrKey ) = max ( sizeof ( UnsafePointer ) , sizeof ( Key ) ) <nl> - alignof ( ChildNodeOrKey ) = max ( alignof ( UnsafePointer ) , alignof ( Key ) ) <nl> - strideof ( ChildNodeOrKey ) = max ( strideof ( UnsafePointer ) , strideof ( Key ) ) <nl> + MemoryLayout < ChildNodeOrKey > . size = max ( MemoryLayout < UnsafePointer > . size , MemoryLayout < Key > . size ) <nl> + MemoryLayout < ChildNodeOrKey > . alignment = max ( MemoryLayout < UnsafePointer > . alignment , MemoryLayout < Key > . alignment ) <nl> + MemoryLayout < ChildNodeOrKey > . stride = max ( MemoryLayout < UnsafePointer > . stride , MemoryLayout < Key > . stride ) <nl> <nl> Memory layout : <nl> <nl> struct _PVSparseVectorNodePointer < Key : Hashable , Value > <nl> typealias _Self = _PVSparseVectorNodePointer <nl> <nl> static var _referenceCountSize : Int { <nl> - return sizeof ( Int . self ) <nl> + return MemoryLayout < Int > . size <nl> } <nl> static var _referenceCountAlignment : Int { <nl> - return alignof ( Int . self ) <nl> + return MemoryLayout < Int > . alignment <nl> } <nl> static var _referenceCountOffset : Int { <nl> return 0 <nl> } <nl> <nl> static var _childNodePopulationBitmapSize : Int { <nl> - return sizeof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . size <nl> } <nl> static var _childNodePopulationBitmapAlignment : Int { <nl> - return alignof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . alignment <nl> } <nl> static var _childNodePopulationBitmapOffset : Int { <nl> let padding = <nl> struct _PVSparseVectorNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static var _keyPopulationBitmapSize : Int { <nl> - return sizeof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . size <nl> } <nl> static var _keyPopulationBitmapAlignment : Int { <nl> - return alignof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . alignment <nl> } <nl> static var _keyPopulationBitmapOffset : Int { <nl> let padding = <nl> struct _PVSparseVectorNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static func _childNodeVectorSize ( layout : _PVSparseVectorNodeLayoutParameters ) - > Int { <nl> - return strideof ( UnsafePointer < UInt8 > . self ) * layout . childNodeCount <nl> + return MemoryLayout < UnsafePointer < UInt8 > > . stride * layout . childNodeCount <nl> } <nl> static var _childNodeVectorAlignment : Int { <nl> - return alignof ( UnsafePointer < UInt8 > . self ) <nl> + return MemoryLayout < UnsafePointer < UInt8 > > . alignment <nl> } <nl> static var _childNodeVectorOffset : Int { <nl> let padding = <nl> struct _PVSparseVectorNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static func _keyVectorSize ( layout : _PVSparseVectorNodeLayoutParameters ) - > Int { <nl> - return strideof ( Key . self ) * layout . keyCount <nl> + return MemoryLayout < Key > . stride * layout . keyCount <nl> } <nl> static var _keyVectorAlignment : Int { <nl> - return alignof ( Key . self ) <nl> + return MemoryLayout < Key > . alignment <nl> } <nl> static func _keyVectorOffset ( layout : _PVSparseVectorNodeLayoutParameters ) - > Int { <nl> let padding = <nl> struct _PVSparseVectorNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static func _valueVectorSize ( layout : _PVSparseVectorNodeLayoutParameters ) - > Int { <nl> - return strideof ( Value . self ) * layout . keyCount <nl> + return MemoryLayout < Value > . stride * layout . keyCount <nl> } <nl> static var _valueVectorAlignment : Int { <nl> - return alignof ( Value . self ) <nl> + return MemoryLayout < Value > . alignment <nl> } <nl> static func _valueVectorOffset ( layout : _PVSparseVectorNodeLayoutParameters ) - > Int { <nl> let padding = <nl> struct _PVArrayNodePointer < Key : Hashable , Value > <nl> <nl> static var _childNodeOrKeyStride : Int { <nl> return max ( <nl> - strideof ( UnsafePointer < UInt8 > . self ) , <nl> - strideof ( Key . self ) ) <nl> + MemoryLayout < UnsafePointer < UInt8 > > . stride , <nl> + MemoryLayout < Key > . stride ) <nl> } <nl> static var _childNodeOrKeyAlignment : Int { <nl> return max ( <nl> - alignof ( UnsafePointer < UInt8 > . self ) , <nl> - alignof ( Key . self ) ) <nl> + MemoryLayout < UnsafePointer < UInt8 > > . alignment , <nl> + MemoryLayout < Key > . alignment ) <nl> } <nl> <nl> static var _referenceCountSize : Int { <nl> - return sizeof ( Int . self ) <nl> + return MemoryLayout < Int > . size <nl> } <nl> static var _referenceCountAlignment : Int { <nl> - return alignof ( Int . self ) <nl> + return MemoryLayout < Int > . alignment <nl> } <nl> static var _referenceCountOffset : Int { <nl> return 0 <nl> } <nl> <nl> static var _childNodePopulationBitmapSize : Int { <nl> - return sizeof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . size <nl> } <nl> static var _childNodePopulationBitmapAlignment : Int { <nl> - return alignof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . alignment <nl> } <nl> static var _childNodePopulationBitmapOffset : Int { <nl> let padding = max ( <nl> struct _PVArrayNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static var _keyPopulationBitmapSize : Int { <nl> - return sizeof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . size <nl> } <nl> static var _keyPopulationBitmapAlignment : Int { <nl> - return alignof ( _Int32Bitmap . self ) <nl> + return MemoryLayout < _Int32Bitmap > . alignment <nl> } <nl> static var _keyPopulationBitmapOffset : Int { <nl> let padding = max ( <nl> struct _PVArrayNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static var _valueArraySize : Int { <nl> - return strideof ( Value . self ) * 32 <nl> + return MemoryLayout < Value > . stride * 32 <nl> } <nl> static var _valueArrayAlignment : Int { <nl> - return alignof ( Value . self ) <nl> + return MemoryLayout < Value > . alignment <nl> } <nl> static var _valueArrayOffset : Int { <nl> let padding = <nl> struct _PVCollisionNodePointer < Key : Hashable , Value > <nl> typealias _Self = _PVCollisionNodePointer <nl> <nl> static var _referenceCountSize : Int { <nl> - return sizeof ( Int . self ) <nl> + return MemoryLayout < Int > . size <nl> } <nl> static var _referenceCountAlignment : Int { <nl> - return alignof ( Int . self ) <nl> + return MemoryLayout < Int > . alignment <nl> } <nl> static var _referenceCountOffset : Int { <nl> return 0 <nl> } <nl> <nl> static var _countSize : Int { <nl> - return sizeof ( Int . self ) <nl> + return MemoryLayout < Int > . size <nl> } <nl> static var _countAlignment : Int { <nl> - return alignof ( Int . self ) <nl> + return MemoryLayout < Int > . alignment <nl> } <nl> static var _countOffset : Int { <nl> let padding = max ( <nl> struct _PVCollisionNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static func _keyArraySize ( layout : _PVCollisionNodePointerLayoutParameters ) - > Int { <nl> - return strideof ( Key . self ) * layout . keyCount <nl> + return MemoryLayout < Key > . stride * layout . keyCount <nl> } <nl> static var _keyArrayAlignment : Int { <nl> - return alignof ( Key . self ) <nl> + return MemoryLayout < Key > . alignment <nl> } <nl> static var _keyArrayOffset : Int { <nl> let padding = <nl> struct _PVCollisionNodePointer < Key : Hashable , Value > <nl> } <nl> <nl> static func _valueArraySize ( layout : _PVCollisionNodePointerLayoutParameters ) - > Int { <nl> - return strideof ( Value . self ) * layout . keyCount <nl> + return MemoryLayout < Value > . stride * layout . keyCount <nl> } <nl> static var _valueArrayAlignment : Int { <nl> - return alignof ( Value . self ) <nl> + return MemoryLayout < Value > . alignment <nl> } <nl> static func _valueArrayOffset ( layout : _PVCollisionNodePointerLayoutParameters ) - > Int { <nl> let padding = <nl> var PersistentVectorTests = TestSuite ( " PersistentVector " ) <nl> <nl> PersistentVectorTests . test ( " sizeof " ) { <nl> expectEqual ( <nl> - sizeof ( UnsafePointer < UInt8 > . self ) , <nl> - sizeof ( _PVSparseVectorNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > . self ) ) <nl> + MemoryLayout < UnsafePointer < UInt8 > > . size , <nl> + MemoryLayout < _PVSparseVectorNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > > . size ) <nl> expectEqual ( <nl> - sizeof ( UnsafePointer < UInt8 > . self ) , <nl> - sizeof ( _PVArrayNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > . self ) ) <nl> + MemoryLayout < UnsafePointer < UInt8 > > . size , <nl> + MemoryLayout < _PVArrayNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > > . size ) <nl> expectEqual ( <nl> - sizeof ( UnsafePointer < UInt8 > . self ) , <nl> - sizeof ( _PVAnyNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > . self ) ) <nl> + MemoryLayout < UnsafePointer < UInt8 > > . size , <nl> + MemoryLayout < _PVAnyNodePointer < MinimalHashableValue , OpaqueValue < Int32 > > > . size ) <nl> } <nl> <nl> % { <nl> mmm a / validation - test / stdlib / SceneKit . swift <nl> ppp b / validation - test / stdlib / SceneKit . swift <nl> func bytesFromNSData ( _ data : NSData ) - > [ UInt8 ] { <nl> <nl> func floatsFromNSData ( _ data : NSData ) - > [ Float ] { <nl> let floatPtr = data . bytes . bindMemory ( to : Float . self , capacity : data . length ) <nl> - return Array ( UnsafeBufferPointer ( start : floatPtr , count : data . length / sizeof ( Float ) ) ) <nl> + return Array ( UnsafeBufferPointer ( start : floatPtr , count : data . length / MemoryLayout < Float > . size ) ) <nl> } <nl> <nl> if # available ( iOS 8 . 0 , * ) { <nl> if # available ( iOS 8 . 0 , * ) { <nl> expectEqual ( source . vectorCount , 2 ) <nl> expectEqual ( source . componentsPerVector , 3 ) <nl> <nl> - expectEqual ( source . bytesPerComponent , sizeof ( Float ) ) <nl> + expectEqual ( source . bytesPerComponent , MemoryLayout < Float > . size ) <nl> let positions = floatsFromNSData ( source . data as NSData ) <nl> expectEqual ( positions [ 2 ] , 3 ) <nl> expectEqual ( positions [ 4 ] , 5 ) <nl> if # available ( iOS 8 . 0 , * ) { <nl> expectEqual ( source . vectorCount , 2 ) <nl> expectEqual ( source . componentsPerVector , 3 ) <nl> <nl> - expectEqual ( source . bytesPerComponent , sizeof ( Float ) ) <nl> + expectEqual ( source . bytesPerComponent , MemoryLayout < Float > . size ) <nl> let normals = floatsFromNSData ( source . data as NSData ) <nl> expectEqual ( normals [ 2 ] , 3 ) <nl> expectEqual ( normals [ 4 ] , 5 ) <nl> mmm a / validation - test / stdlib / Set . swift <nl> ppp b / validation - test / stdlib / Set . swift <nl> <nl> / / RUN : % line - directive % t / main . swift - - % target - run % t / Set <nl> / / REQUIRES : executable_test <nl> <nl> - / / rdar : / / 27547957 <nl> - / / XFAIL : swift_test_mode_optimize <nl> - <nl> import StdlibUnittest <nl> import StdlibCollectionUnittest <nl> <nl> SetTestSuite . test ( " AssociatedTypes " ) { <nl> SetTestSuite . test ( " sizeof " ) { <nl> var s = Set ( [ " Hello " , " world " ] ) <nl> # if arch ( i386 ) | | arch ( arm ) <nl> - expectEqual ( 4 , sizeofValue ( s ) ) <nl> + expectEqual ( 4 , MemoryLayout . _ofInstance ( s ) . size ) <nl> # else <nl> - expectEqual ( 8 , sizeofValue ( s ) ) <nl> + expectEqual ( 8 , MemoryLayout . _ofInstance ( s ) . size ) <nl> # endif <nl> } <nl> <nl> mmm a / validation - test / stdlib / String . swift <nl> ppp b / validation - test / stdlib / String . swift <nl> extension String { <nl> var StringTests = TestSuite ( " StringTests " ) <nl> <nl> StringTests . test ( " sizeof " ) { <nl> - expectEqual ( 3 * sizeof ( Int . self ) , sizeof ( String . self ) ) <nl> + expectEqual ( 3 * MemoryLayout < Int > . size , MemoryLayout < String > . size ) <nl> } <nl> <nl> StringTests . test ( " AssociatedTypes - UTF8View " ) { <nl> StringTests . test ( " toInt " ) { <nl> / / then print if the new String is or is not still an Int . <nl> func testConvertabilityOfStringWithModification ( <nl> _ initialValue : Int , <nl> - modification : ( chars : inout [ UTF8 . CodeUnit ] ) - > Void <nl> + modification : ( _ chars : inout [ UTF8 . CodeUnit ] ) - > Void <nl> ) { <nl> var chars = Array ( String ( initialValue ) . utf8 ) <nl> - modification ( chars : & chars ) <nl> + modification ( & chars ) <nl> let str = String . _fromWellFormedCodeUnitSequence ( UTF8 . self , input : chars ) <nl> expectEmpty ( Int ( str ) ) <nl> } <nl> mmm a / validation - test / stdlib / UnicodeUTFEncoders . swift <nl> ppp b / validation - test / stdlib / UnicodeUTFEncoders . swift <nl> final class CodecTest < Codec : TestableUnicodeCodec > { <nl> <nl> / / Use Cocoa to encode the scalar <nl> nsEncode ( scalar . value , Codec . encodingId ( ) , & nsEncodeBuffer , & used ) <nl> - let nsEncoded = nsEncodeBuffer [ 0 . . < ( used / sizeof ( CodeUnit . self ) ) ] <nl> + let nsEncoded = nsEncodeBuffer [ 0 . . < ( used / MemoryLayout < CodeUnit > . size ) ] <nl> var encodeIndex = encodeBuffer . startIndex <nl> let encodeOutput : ( CodeUnit ) - > Void = { <nl> self . encodeBuffer [ encodeIndex ] = $ 0 <nl> | Merge ' origin / master ' into new - integer - protocols | apple/swift | f06a9a1154c53eab6220ca218f463287d56320de | 2016-07-30T00:52:49Z |
mmm a / src / ppc / full - codegen - ppc . cc <nl> ppp b / src / ppc / full - codegen - ppc . cc <nl> void FullCodeGenerator : : VisitArrayLiteral ( ArrayLiteral * expr ) { <nl> <nl> / / Emit code to evaluate all the non - constant subexpressions and to store <nl> / / them into the newly cloned array . <nl> - for ( int i = 0 ; i < length ; i + + ) { <nl> - Expression * subexpr = subexprs - > at ( i ) ; <nl> + int array_index = 0 ; <nl> + for ( ; array_index < length ; array_index + + ) { <nl> + Expression * subexpr = subexprs - > at ( array_index ) ; <nl> + if ( subexpr - > IsSpread ( ) ) break ; <nl> / / If the subexpression is a literal or a simple materialized literal it <nl> / / is already set in the cloned array . <nl> if ( CompileTimeValue : : IsCompileTimeValue ( subexpr ) ) continue ; <nl> void FullCodeGenerator : : VisitArrayLiteral ( ArrayLiteral * expr ) { <nl> VisitForAccumulatorValue ( subexpr ) ; <nl> <nl> if ( has_fast_elements ) { <nl> - int offset = FixedArray : : kHeaderSize + ( i * kPointerSize ) ; <nl> + int offset = FixedArray : : kHeaderSize + ( array_index * kPointerSize ) ; <nl> __ LoadP ( r8 , MemOperand ( sp , kPointerSize ) ) ; / / Copy of array literal . <nl> __ LoadP ( r4 , FieldMemOperand ( r8 , JSObject : : kElementsOffset ) ) ; <nl> __ StoreP ( result_register ( ) , FieldMemOperand ( r4 , offset ) , r0 ) ; <nl> void FullCodeGenerator : : VisitArrayLiteral ( ArrayLiteral * expr ) { <nl> kDontSaveFPRegs , EMIT_REMEMBERED_SET , <nl> INLINE_SMI_CHECK ) ; <nl> } else { <nl> - __ LoadSmiLiteral ( r6 , Smi : : FromInt ( i ) ) ; <nl> + __ LoadSmiLiteral ( r6 , Smi : : FromInt ( array_index ) ) ; <nl> StoreArrayLiteralElementStub stub ( isolate ( ) ) ; <nl> __ CallStub ( & stub ) ; <nl> } <nl> <nl> - PrepareForBailoutForId ( expr - > GetIdForElement ( i ) , NO_REGISTERS ) ; <nl> + PrepareForBailoutForId ( expr - > GetIdForElement ( array_index ) , NO_REGISTERS ) ; <nl> + } <nl> + <nl> + / / In case the array literal contains spread expressions it has two parts . The <nl> + / / first part is the " static " array which has a literal index is handled <nl> + / / above . The second part is the part after the first spread expression <nl> + / / ( inclusive ) and these elements gets appended to the array . Note that the <nl> + / / number elements an iterable produces is unknown ahead of time . <nl> + if ( array_index < length & & result_saved ) { <nl> + __ Drop ( 1 ) ; / / literal index <nl> + __ Pop ( r3 ) ; <nl> + result_saved = false ; <nl> + } <nl> + for ( ; array_index < length ; array_index + + ) { <nl> + Expression * subexpr = subexprs - > at ( array_index ) ; <nl> + <nl> + __ Push ( r3 ) ; <nl> + if ( subexpr - > IsSpread ( ) ) { <nl> + VisitForStackValue ( subexpr - > AsSpread ( ) - > expression ( ) ) ; <nl> + __ InvokeBuiltin ( Builtins : : CONCAT_ITERABLE_TO_ARRAY , CALL_FUNCTION ) ; <nl> + } else { <nl> + VisitForStackValue ( subexpr ) ; <nl> + __ CallRuntime ( Runtime : : kAppendElement , 2 ) ; <nl> + } <nl> + <nl> + PrepareForBailoutForId ( expr - > GetIdForElement ( array_index ) , NO_REGISTERS ) ; <nl> } <nl> <nl> if ( result_saved ) { <nl> - __ pop ( ) ; / / literal index <nl> + __ Drop ( 1 ) ; / / literal index <nl> context ( ) - > PlugTOS ( ) ; <nl> } else { <nl> context ( ) - > Plug ( r3 ) ; <nl> | PPC : [ es6 ] Spread in array literals | v8/v8 | fc146e9e91350eb455e1a5bc71c46733413dd054 | 2015-05-21T15:35:23Z |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.